]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
* cppinit.c: Revert -MD removal.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44
45 #define CALLED_AS_BUILT_IN(NODE) \
46 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
47
48 /* Register mappings for target machines without register windows. */
49 #ifndef INCOMING_REGNO
50 #define INCOMING_REGNO(OUT) (OUT)
51 #endif
52 #ifndef OUTGOING_REGNO
53 #define OUTGOING_REGNO(IN) (IN)
54 #endif
55
56 #ifndef PAD_VARARGS_DOWN
57 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 #endif
59
60 /* Define the names of the builtin function types and codes. */
61 const char *const built_in_class_names[4]
62 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63
64 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA) STRINGX(X),
65 const char *const built_in_names[(int) END_BUILTINS] =
66 {
67 #include "builtins.def"
68 };
69 #undef DEF_BUILTIN
70
71 /* Setup an array of _DECL trees, make sure each element is
72 initialized to NULL_TREE. */
73 tree built_in_decls[(int) END_BUILTINS];
74
75 tree (*lang_type_promotes_to) PARAMS ((tree));
76
77 static int get_pointer_alignment PARAMS ((tree, unsigned int));
78 static tree c_strlen PARAMS ((tree));
79 static const char *c_getstr PARAMS ((tree));
80 static rtx c_readstr PARAMS ((const char *,
81 enum machine_mode));
82 static int target_char_cast PARAMS ((tree, char *));
83 static rtx get_memory_rtx PARAMS ((tree));
84 static int apply_args_size PARAMS ((void));
85 static int apply_result_size PARAMS ((void));
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector PARAMS ((int, rtx));
88 #endif
89 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
90 static void expand_builtin_prefetch PARAMS ((tree));
91 static rtx expand_builtin_apply_args PARAMS ((void));
92 static rtx expand_builtin_apply_args_1 PARAMS ((void));
93 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
94 static void expand_builtin_return PARAMS ((rtx));
95 static enum type_class type_to_class PARAMS ((tree));
96 static rtx expand_builtin_classify_type PARAMS ((tree));
97 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
98 static rtx expand_builtin_constant_p PARAMS ((tree));
99 static rtx expand_builtin_args_info PARAMS ((tree));
100 static rtx expand_builtin_next_arg PARAMS ((tree));
101 static rtx expand_builtin_va_start PARAMS ((int, tree));
102 static rtx expand_builtin_va_end PARAMS ((tree));
103 static rtx expand_builtin_va_copy PARAMS ((tree));
104 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
105 enum machine_mode));
106 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
107 enum machine_mode));
108 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
109 enum machine_mode));
110 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
111 enum machine_mode));
112 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
113 enum machine_mode));
114 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
115 enum machine_mode));
116 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
117 enum machine_mode));
118 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
119 enum machine_mode));
120 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
121 enum machine_mode));
122 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
123 enum machine_mode));
124 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
125 enum machine_mode));
126 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
127 enum machine_mode));
128 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static rtx expand_builtin_memset PARAMS ((tree, rtx,
131 enum machine_mode));
132 static rtx expand_builtin_bzero PARAMS ((tree));
133 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
134 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
135 enum machine_mode));
136 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
137 enum machine_mode));
138 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
139 enum machine_mode));
140 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
141 enum machine_mode));
142 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
143 static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx));
144 static rtx expand_builtin_frame_address PARAMS ((tree));
145 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
146 static tree stabilize_va_list PARAMS ((tree, int));
147 static rtx expand_builtin_expect PARAMS ((tree, rtx));
148 static tree fold_builtin_constant_p PARAMS ((tree));
149 static tree fold_builtin_classify_type PARAMS ((tree));
150 static tree build_function_call_expr PARAMS ((tree, tree));
151 static int validate_arglist PARAMS ((tree, ...));
152
153 /* Return the alignment in bits of EXP, a pointer valued expression.
154 But don't return more than MAX_ALIGN no matter what.
155 The alignment returned is, by default, the alignment of the thing that
156 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
157
158 Otherwise, look at the expression to see if we can do better, i.e., if the
159 expression is actually pointing at an object whose alignment is tighter. */
160
161 static int
162 get_pointer_alignment (exp, max_align)
163 tree exp;
164 unsigned int max_align;
165 {
166 unsigned int align, inner;
167
168 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
169 return 0;
170
171 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
172 align = MIN (align, max_align);
173
174 while (1)
175 {
176 switch (TREE_CODE (exp))
177 {
178 case NOP_EXPR:
179 case CONVERT_EXPR:
180 case NON_LVALUE_EXPR:
181 exp = TREE_OPERAND (exp, 0);
182 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
183 return align;
184
185 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
186 align = MIN (inner, max_align);
187 break;
188
189 case PLUS_EXPR:
190 /* If sum of pointer + int, restrict our maximum alignment to that
191 imposed by the integer. If not, we can't do any better than
192 ALIGN. */
193 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
194 return align;
195
196 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
197 & (max_align / BITS_PER_UNIT - 1))
198 != 0)
199 max_align >>= 1;
200
201 exp = TREE_OPERAND (exp, 0);
202 break;
203
204 case ADDR_EXPR:
205 /* See what we are pointing at and look at its alignment. */
206 exp = TREE_OPERAND (exp, 0);
207 if (TREE_CODE (exp) == FUNCTION_DECL)
208 align = FUNCTION_BOUNDARY;
209 else if (DECL_P (exp))
210 align = DECL_ALIGN (exp);
211 #ifdef CONSTANT_ALIGNMENT
212 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
213 align = CONSTANT_ALIGNMENT (exp, align);
214 #endif
215 return MIN (align, max_align);
216
217 default:
218 return align;
219 }
220 }
221 }
222
223 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
224 way, because it could contain a zero byte in the middle.
225 TREE_STRING_LENGTH is the size of the character array, not the string.
226
227 The value returned is of type `ssizetype'.
228
229 Unfortunately, string_constant can't access the values of const char
230 arrays with initializers, so neither can we do so here. */
231
232 static tree
233 c_strlen (src)
234 tree src;
235 {
236 tree offset_node;
237 HOST_WIDE_INT offset;
238 int max;
239 const char *ptr;
240
241 src = string_constant (src, &offset_node);
242 if (src == 0)
243 return 0;
244
245 max = TREE_STRING_LENGTH (src) - 1;
246 ptr = TREE_STRING_POINTER (src);
247
248 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
249 {
250 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
251 compute the offset to the following null if we don't know where to
252 start searching for it. */
253 int i;
254
255 for (i = 0; i < max; i++)
256 if (ptr[i] == 0)
257 return 0;
258
259 /* We don't know the starting offset, but we do know that the string
260 has no internal zero bytes. We can assume that the offset falls
261 within the bounds of the string; otherwise, the programmer deserves
262 what he gets. Subtract the offset from the length of the string,
263 and return that. This would perhaps not be valid if we were dealing
264 with named arrays in addition to literal string constants. */
265
266 return size_diffop (size_int (max), offset_node);
267 }
268
269 /* We have a known offset into the string. Start searching there for
270 a null character if we can represent it as a single HOST_WIDE_INT. */
271 if (offset_node == 0)
272 offset = 0;
273 else if (! host_integerp (offset_node, 0))
274 offset = -1;
275 else
276 offset = tree_low_cst (offset_node, 0);
277
278 /* If the offset is known to be out of bounds, warn, and call strlen at
279 runtime. */
280 if (offset < 0 || offset > max)
281 {
282 warning ("offset outside bounds of constant string");
283 return 0;
284 }
285
286 /* Use strlen to search for the first zero byte. Since any strings
287 constructed with build_string will have nulls appended, we win even
288 if we get handed something like (char[4])"abcd".
289
290 Since OFFSET is our starting index into the string, no further
291 calculation is needed. */
292 return ssize_int (strlen (ptr + offset));
293 }
294
295 /* Return a char pointer for a C string if it is a string constant
296 or sum of string constant and integer constant. */
297
298 static const char *
299 c_getstr (src)
300 tree src;
301 {
302 tree offset_node;
303
304 src = string_constant (src, &offset_node);
305 if (src == 0)
306 return 0;
307
308 if (offset_node == 0)
309 return TREE_STRING_POINTER (src);
310 else if (!host_integerp (offset_node, 1)
311 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
312 return 0;
313
314 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
315 }
316
317 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
318 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
319
320 static rtx
321 c_readstr (str, mode)
322 const char *str;
323 enum machine_mode mode;
324 {
325 HOST_WIDE_INT c[2];
326 HOST_WIDE_INT ch;
327 unsigned int i, j;
328
329 if (GET_MODE_CLASS (mode) != MODE_INT)
330 abort ();
331 c[0] = 0;
332 c[1] = 0;
333 ch = 1;
334 for (i = 0; i < GET_MODE_SIZE (mode); i++)
335 {
336 j = i;
337 if (WORDS_BIG_ENDIAN)
338 j = GET_MODE_SIZE (mode) - i - 1;
339 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
340 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
341 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
342 j *= BITS_PER_UNIT;
343 if (j > 2 * HOST_BITS_PER_WIDE_INT)
344 abort ();
345 if (ch)
346 ch = (unsigned char) str[i];
347 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
348 }
349 return immed_double_const (c[0], c[1], mode);
350 }
351
352 /* Cast a target constant CST to target CHAR and if that value fits into
353 host char type, return zero and put that value into variable pointed by
354 P. */
355
356 static int
357 target_char_cast (cst, p)
358 tree cst;
359 char *p;
360 {
361 unsigned HOST_WIDE_INT val, hostval;
362
363 if (!host_integerp (cst, 1)
364 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
365 return 1;
366
367 val = tree_low_cst (cst, 1);
368 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
369 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
370
371 hostval = val;
372 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
373 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
374
375 if (val != hostval)
376 return 1;
377
378 *p = hostval;
379 return 0;
380 }
381
382 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
383 times to get the address of either a higher stack frame, or a return
384 address located within it (depending on FNDECL_CODE). */
385
386 rtx
387 expand_builtin_return_addr (fndecl_code, count, tem)
388 enum built_in_function fndecl_code;
389 int count;
390 rtx tem;
391 {
392 int i;
393
394 /* Some machines need special handling before we can access
395 arbitrary frames. For example, on the sparc, we must first flush
396 all register windows to the stack. */
397 #ifdef SETUP_FRAME_ADDRESSES
398 if (count > 0)
399 SETUP_FRAME_ADDRESSES ();
400 #endif
401
402 /* On the sparc, the return address is not in the frame, it is in a
403 register. There is no way to access it off of the current frame
404 pointer, but it can be accessed off the previous frame pointer by
405 reading the value from the register window save area. */
406 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
407 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
408 count--;
409 #endif
410
411 /* Scan back COUNT frames to the specified frame. */
412 for (i = 0; i < count; i++)
413 {
414 /* Assume the dynamic chain pointer is in the word that the
415 frame address points to, unless otherwise specified. */
416 #ifdef DYNAMIC_CHAIN_ADDRESS
417 tem = DYNAMIC_CHAIN_ADDRESS (tem);
418 #endif
419 tem = memory_address (Pmode, tem);
420 tem = gen_rtx_MEM (Pmode, tem);
421 set_mem_alias_set (tem, get_frame_alias_set ());
422 tem = copy_to_reg (tem);
423 }
424
425 /* For __builtin_frame_address, return what we've got. */
426 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
427 return tem;
428
429 /* For __builtin_return_address, Get the return address from that
430 frame. */
431 #ifdef RETURN_ADDR_RTX
432 tem = RETURN_ADDR_RTX (count, tem);
433 #else
434 tem = memory_address (Pmode,
435 plus_constant (tem, GET_MODE_SIZE (Pmode)));
436 tem = gen_rtx_MEM (Pmode, tem);
437 set_mem_alias_set (tem, get_frame_alias_set ());
438 #endif
439 return tem;
440 }
441
442 /* Alias set used for setjmp buffer. */
443 static HOST_WIDE_INT setjmp_alias_set = -1;
444
445 /* Construct the leading half of a __builtin_setjmp call. Control will
446 return to RECEIVER_LABEL. This is used directly by sjlj exception
447 handling code. */
448
449 void
450 expand_builtin_setjmp_setup (buf_addr, receiver_label)
451 rtx buf_addr;
452 rtx receiver_label;
453 {
454 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
455 rtx stack_save;
456 rtx mem;
457
458 if (setjmp_alias_set == -1)
459 setjmp_alias_set = new_alias_set ();
460
461 #ifdef POINTERS_EXTEND_UNSIGNED
462 if (GET_MODE (buf_addr) != Pmode)
463 buf_addr = convert_memory_address (Pmode, buf_addr);
464 #endif
465
466 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
467
468 emit_queue ();
469
470 /* We store the frame pointer and the address of receiver_label in
471 the buffer and use the rest of it for the stack save area, which
472 is machine-dependent. */
473
474 #ifndef BUILTIN_SETJMP_FRAME_VALUE
475 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
476 #endif
477
478 mem = gen_rtx_MEM (Pmode, buf_addr);
479 set_mem_alias_set (mem, setjmp_alias_set);
480 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
481
482 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
483 set_mem_alias_set (mem, setjmp_alias_set);
484
485 emit_move_insn (validize_mem (mem),
486 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
487
488 stack_save = gen_rtx_MEM (sa_mode,
489 plus_constant (buf_addr,
490 2 * GET_MODE_SIZE (Pmode)));
491 set_mem_alias_set (stack_save, setjmp_alias_set);
492 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
493
494 /* If there is further processing to do, do it. */
495 #ifdef HAVE_builtin_setjmp_setup
496 if (HAVE_builtin_setjmp_setup)
497 emit_insn (gen_builtin_setjmp_setup (buf_addr));
498 #endif
499
500 /* Tell optimize_save_area_alloca that extra work is going to
501 need to go on during alloca. */
502 current_function_calls_setjmp = 1;
503
504 /* Set this so all the registers get saved in our frame; we need to be
505 able to copy the saved values for any registers from frames we unwind. */
506 current_function_has_nonlocal_label = 1;
507 }
508
509 /* Construct the trailing part of a __builtin_setjmp call.
510 This is used directly by sjlj exception handling code. */
511
512 void
513 expand_builtin_setjmp_receiver (receiver_label)
514 rtx receiver_label ATTRIBUTE_UNUSED;
515 {
516 /* Clobber the FP when we get here, so we have to make sure it's
517 marked as used by this function. */
518 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
519
520 /* Mark the static chain as clobbered here so life information
521 doesn't get messed up for it. */
522 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
523
524 /* Now put in the code to restore the frame pointer, and argument
525 pointer, if needed. The code below is from expand_end_bindings
526 in stmt.c; see detailed documentation there. */
527 #ifdef HAVE_nonlocal_goto
528 if (! HAVE_nonlocal_goto)
529 #endif
530 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
531
532 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
533 if (fixed_regs[ARG_POINTER_REGNUM])
534 {
535 #ifdef ELIMINABLE_REGS
536 size_t i;
537 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
538
539 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
540 if (elim_regs[i].from == ARG_POINTER_REGNUM
541 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
542 break;
543
544 if (i == ARRAY_SIZE (elim_regs))
545 #endif
546 {
547 /* Now restore our arg pointer from the address at which it
548 was saved in our stack frame. */
549 emit_move_insn (virtual_incoming_args_rtx,
550 copy_to_reg (get_arg_pointer_save_area (cfun)));
551 }
552 }
553 #endif
554
555 #ifdef HAVE_builtin_setjmp_receiver
556 if (HAVE_builtin_setjmp_receiver)
557 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
558 else
559 #endif
560 #ifdef HAVE_nonlocal_goto_receiver
561 if (HAVE_nonlocal_goto_receiver)
562 emit_insn (gen_nonlocal_goto_receiver ());
563 else
564 #endif
565 { /* Nothing */ }
566
567 /* @@@ This is a kludge. Not all machine descriptions define a blockage
568 insn, but we must not allow the code we just generated to be reordered
569 by scheduling. Specifically, the update of the frame pointer must
570 happen immediately, not later. So emit an ASM_INPUT to act as blockage
571 insn. */
572 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
573 }
574
575 /* __builtin_setjmp is passed a pointer to an array of five words (not
576 all will be used on all machines). It operates similarly to the C
577 library function of the same name, but is more efficient. Much of
578 the code below (and for longjmp) is copied from the handling of
579 non-local gotos.
580
581 NOTE: This is intended for use by GNAT and the exception handling
582 scheme in the compiler and will only work in the method used by
583 them. */
584
585 static rtx
586 expand_builtin_setjmp (arglist, target)
587 tree arglist;
588 rtx target;
589 {
590 rtx buf_addr, next_lab, cont_lab;
591
592 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
593 return NULL_RTX;
594
595 if (target == 0 || GET_CODE (target) != REG
596 || REGNO (target) < FIRST_PSEUDO_REGISTER)
597 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
598
599 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
600
601 next_lab = gen_label_rtx ();
602 cont_lab = gen_label_rtx ();
603
604 expand_builtin_setjmp_setup (buf_addr, next_lab);
605
606 /* Set TARGET to zero and branch to the continue label. */
607 emit_move_insn (target, const0_rtx);
608 emit_jump_insn (gen_jump (cont_lab));
609 emit_barrier ();
610 emit_label (next_lab);
611
612 expand_builtin_setjmp_receiver (next_lab);
613
614 /* Set TARGET to one. */
615 emit_move_insn (target, const1_rtx);
616 emit_label (cont_lab);
617
618 /* Tell flow about the strange goings on. Putting `next_lab' on
619 `nonlocal_goto_handler_labels' to indicates that function
620 calls may traverse the arc back to this label. */
621
622 current_function_has_nonlocal_label = 1;
623 nonlocal_goto_handler_labels
624 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
625
626 return target;
627 }
628
629 /* __builtin_longjmp is passed a pointer to an array of five words (not
630 all will be used on all machines). It operates similarly to the C
631 library function of the same name, but is more efficient. Much of
632 the code below is copied from the handling of non-local gotos.
633
634 NOTE: This is intended for use by GNAT and the exception handling
635 scheme in the compiler and will only work in the method used by
636 them. */
637
638 void
639 expand_builtin_longjmp (buf_addr, value)
640 rtx buf_addr, value;
641 {
642 rtx fp, lab, stack, insn;
643 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
644
645 if (setjmp_alias_set == -1)
646 setjmp_alias_set = new_alias_set ();
647
648 #ifdef POINTERS_EXTEND_UNSIGNED
649 if (GET_MODE (buf_addr) != Pmode)
650 buf_addr = convert_memory_address (Pmode, buf_addr);
651 #endif
652
653 buf_addr = force_reg (Pmode, buf_addr);
654
655 /* We used to store value in static_chain_rtx, but that fails if pointers
656 are smaller than integers. We instead require that the user must pass
657 a second argument of 1, because that is what builtin_setjmp will
658 return. This also makes EH slightly more efficient, since we are no
659 longer copying around a value that we don't care about. */
660 if (value != const1_rtx)
661 abort ();
662
663 current_function_calls_longjmp = 1;
664
665 #ifdef HAVE_builtin_longjmp
666 if (HAVE_builtin_longjmp)
667 emit_insn (gen_builtin_longjmp (buf_addr));
668 else
669 #endif
670 {
671 fp = gen_rtx_MEM (Pmode, buf_addr);
672 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
673 GET_MODE_SIZE (Pmode)));
674
675 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
676 2 * GET_MODE_SIZE (Pmode)));
677 set_mem_alias_set (fp, setjmp_alias_set);
678 set_mem_alias_set (lab, setjmp_alias_set);
679 set_mem_alias_set (stack, setjmp_alias_set);
680
681 /* Pick up FP, label, and SP from the block and jump. This code is
682 from expand_goto in stmt.c; see there for detailed comments. */
683 #if HAVE_nonlocal_goto
684 if (HAVE_nonlocal_goto)
685 /* We have to pass a value to the nonlocal_goto pattern that will
686 get copied into the static_chain pointer, but it does not matter
687 what that value is, because builtin_setjmp does not use it. */
688 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
689 else
690 #endif
691 {
692 lab = copy_to_reg (lab);
693
694 emit_move_insn (hard_frame_pointer_rtx, fp);
695 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
696
697 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
698 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
699 emit_indirect_jump (lab);
700 }
701 }
702
703 /* Search backwards and mark the jump insn as a non-local goto.
704 Note that this precludes the use of __builtin_longjmp to a
705 __builtin_setjmp target in the same function. However, we've
706 already cautioned the user that these functions are for
707 internal exception handling use only. */
708 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
709 {
710 if (GET_CODE (insn) == JUMP_INSN)
711 {
712 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
713 REG_NOTES (insn));
714 break;
715 }
716 else if (GET_CODE (insn) == CALL_INSN)
717 break;
718 }
719 }
720
721 /* Expand a call to __builtin_prefetch. For a target that does not support
722 data prefetch, evaluate the memory address argument in case it has side
723 effects. */
724
725 static void
726 expand_builtin_prefetch (arglist)
727 tree arglist;
728 {
729 tree arg0, arg1, arg2;
730 rtx op0, op1, op2;
731
732 if (!validate_arglist (arglist, POINTER_TYPE, 0))
733 return;
734
735 arg0 = TREE_VALUE (arglist);
736 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
737 zero (read) and argument 2 (locality) defaults to 3 (high degree of
738 locality). */
739 if (TREE_CHAIN (arglist))
740 {
741 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
742 if (TREE_CHAIN (TREE_CHAIN (arglist)))
743 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
744 else
745 arg2 = build_int_2 (3, 0);
746 }
747 else
748 {
749 arg1 = integer_zero_node;
750 arg2 = build_int_2 (3, 0);
751 }
752
753 /* Argument 0 is an address. */
754 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
755
756 /* Argument 1 (read/write flag) must be a compile-time constant int. */
757 if (TREE_CODE (arg1) != INTEGER_CST)
758 {
759 error ("second arg to `__builtin_prefetch' must be a constant");
760 arg1 = integer_zero_node;
761 }
762 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
763 /* Argument 1 must be either zero or one. */
764 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
765 {
766 warning ("invalid second arg to __builtin_prefetch; using zero");
767 op1 = const0_rtx;
768 }
769
770 /* Argument 2 (locality) must be a compile-time constant int. */
771 if (TREE_CODE (arg2) != INTEGER_CST)
772 {
773 error ("third arg to `__builtin_prefetch' must be a constant");
774 arg2 = integer_zero_node;
775 }
776 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
777 /* Argument 2 must be 0, 1, 2, or 3. */
778 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
779 {
780 warning ("invalid third arg to __builtin_prefetch; using zero");
781 op2 = const0_rtx;
782 }
783
784 #ifdef HAVE_prefetch
785 if (HAVE_prefetch)
786 {
787 if (! (*insn_data[(int)CODE_FOR_prefetch].operand[0].predicate)
788 (op0,
789 insn_data[(int)CODE_FOR_prefetch].operand[0].mode))
790 op0 = force_reg (Pmode, op0);
791 emit_insn (gen_prefetch (op0, op1, op2));
792 }
793 else
794 #endif
795 op0 = protect_from_queue (op0, 0);
796 /* Don't do anything with direct references to volatile memory, but
797 generate code to handle other side effects. */
798 if (GET_CODE (op0) != MEM && side_effects_p (op0))
799 emit_insn (op0);
800 }
801
802 /* Get a MEM rtx for expression EXP which is the address of an operand
803 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
804
805 static rtx
806 get_memory_rtx (exp)
807 tree exp;
808 {
809 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
810 rtx mem;
811
812 #ifdef POINTERS_EXTEND_UNSIGNED
813 if (GET_MODE (addr) != Pmode)
814 addr = convert_memory_address (Pmode, addr);
815 #endif
816
817 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
818
819 /* Get an expression we can use to find the attributes to assign to MEM.
820 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
821 we can. First remove any nops. */
822 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
823 || TREE_CODE (exp) == NON_LVALUE_EXPR)
824 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
825 exp = TREE_OPERAND (exp, 0);
826
827 if (TREE_CODE (exp) == ADDR_EXPR)
828 {
829 exp = TREE_OPERAND (exp, 0);
830 set_mem_attributes (mem, exp, 0);
831 }
832 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
833 {
834 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
835 /* memcpy, memset and other builtin stringops can alias with anything. */
836 set_mem_alias_set (mem, 0);
837 }
838
839 return mem;
840 }
841 \f
842 /* Built-in functions to perform an untyped call and return. */
843
844 /* For each register that may be used for calling a function, this
845 gives a mode used to copy the register's value. VOIDmode indicates
846 the register is not used for calling a function. If the machine
847 has register windows, this gives only the outbound registers.
848 INCOMING_REGNO gives the corresponding inbound register. */
849 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
850
851 /* For each register that may be used for returning values, this gives
852 a mode used to copy the register's value. VOIDmode indicates the
853 register is not used for returning values. If the machine has
854 register windows, this gives only the outbound registers.
855 INCOMING_REGNO gives the corresponding inbound register. */
856 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
857
858 /* For each register that may be used for calling a function, this
859 gives the offset of that register into the block returned by
860 __builtin_apply_args. 0 indicates that the register is not
861 used for calling a function. */
862 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
863
864 /* Return the offset of register REGNO into the block returned by
865 __builtin_apply_args. This is not declared static, since it is
866 needed in objc-act.c. */
867
868 int
869 apply_args_register_offset (regno)
870 int regno;
871 {
872 apply_args_size ();
873
874 /* Arguments are always put in outgoing registers (in the argument
875 block) if such make sense. */
876 #ifdef OUTGOING_REGNO
877 regno = OUTGOING_REGNO(regno);
878 #endif
879 return apply_args_reg_offset[regno];
880 }
881
882 /* Return the size required for the block returned by __builtin_apply_args,
883 and initialize apply_args_mode. */
884
885 static int
886 apply_args_size ()
887 {
888 static int size = -1;
889 int align;
890 unsigned int regno;
891 enum machine_mode mode;
892
893 /* The values computed by this function never change. */
894 if (size < 0)
895 {
896 /* The first value is the incoming arg-pointer. */
897 size = GET_MODE_SIZE (Pmode);
898
899 /* The second value is the structure value address unless this is
900 passed as an "invisible" first argument. */
901 if (struct_value_rtx)
902 size += GET_MODE_SIZE (Pmode);
903
904 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
905 if (FUNCTION_ARG_REGNO_P (regno))
906 {
907 /* Search for the proper mode for copying this register's
908 value. I'm not sure this is right, but it works so far. */
909 enum machine_mode best_mode = VOIDmode;
910
911 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
912 mode != VOIDmode;
913 mode = GET_MODE_WIDER_MODE (mode))
914 if (HARD_REGNO_MODE_OK (regno, mode)
915 && HARD_REGNO_NREGS (regno, mode) == 1)
916 best_mode = mode;
917
918 if (best_mode == VOIDmode)
919 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
920 mode != VOIDmode;
921 mode = GET_MODE_WIDER_MODE (mode))
922 if (HARD_REGNO_MODE_OK (regno, mode)
923 && have_insn_for (SET, mode))
924 best_mode = mode;
925
926 if (best_mode == VOIDmode)
927 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
928 mode != VOIDmode;
929 mode = GET_MODE_WIDER_MODE (mode))
930 if (HARD_REGNO_MODE_OK (regno, mode)
931 && have_insn_for (SET, mode))
932 best_mode = mode;
933
934 if (best_mode == VOIDmode)
935 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
936 mode != VOIDmode;
937 mode = GET_MODE_WIDER_MODE (mode))
938 if (HARD_REGNO_MODE_OK (regno, mode)
939 && have_insn_for (SET, mode))
940 best_mode = mode;
941
942 mode = best_mode;
943 if (mode == VOIDmode)
944 abort ();
945
946 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
947 if (size % align != 0)
948 size = CEIL (size, align) * align;
949 apply_args_reg_offset[regno] = size;
950 size += GET_MODE_SIZE (mode);
951 apply_args_mode[regno] = mode;
952 }
953 else
954 {
955 apply_args_mode[regno] = VOIDmode;
956 apply_args_reg_offset[regno] = 0;
957 }
958 }
959 return size;
960 }
961
962 /* Return the size required for the block returned by __builtin_apply,
963 and initialize apply_result_mode. */
964
965 static int
966 apply_result_size ()
967 {
968 static int size = -1;
969 int align, regno;
970 enum machine_mode mode;
971
972 /* The values computed by this function never change. */
973 if (size < 0)
974 {
975 size = 0;
976
977 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
978 if (FUNCTION_VALUE_REGNO_P (regno))
979 {
980 /* Search for the proper mode for copying this register's
981 value. I'm not sure this is right, but it works so far. */
982 enum machine_mode best_mode = VOIDmode;
983
984 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
985 mode != TImode;
986 mode = GET_MODE_WIDER_MODE (mode))
987 if (HARD_REGNO_MODE_OK (regno, mode))
988 best_mode = mode;
989
990 if (best_mode == VOIDmode)
991 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
992 mode != VOIDmode;
993 mode = GET_MODE_WIDER_MODE (mode))
994 if (HARD_REGNO_MODE_OK (regno, mode)
995 && have_insn_for (SET, mode))
996 best_mode = mode;
997
998 if (best_mode == VOIDmode)
999 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1000 mode != VOIDmode;
1001 mode = GET_MODE_WIDER_MODE (mode))
1002 if (HARD_REGNO_MODE_OK (regno, mode)
1003 && have_insn_for (SET, mode))
1004 best_mode = mode;
1005
1006 if (best_mode == VOIDmode)
1007 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1008 mode != VOIDmode;
1009 mode = GET_MODE_WIDER_MODE (mode))
1010 if (HARD_REGNO_MODE_OK (regno, mode)
1011 && have_insn_for (SET, mode))
1012 best_mode = mode;
1013
1014 mode = best_mode;
1015 if (mode == VOIDmode)
1016 abort ();
1017
1018 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1019 if (size % align != 0)
1020 size = CEIL (size, align) * align;
1021 size += GET_MODE_SIZE (mode);
1022 apply_result_mode[regno] = mode;
1023 }
1024 else
1025 apply_result_mode[regno] = VOIDmode;
1026
1027 /* Allow targets that use untyped_call and untyped_return to override
1028 the size so that machine-specific information can be stored here. */
1029 #ifdef APPLY_RESULT_SIZE
1030 size = APPLY_RESULT_SIZE;
1031 #endif
1032 }
1033 return size;
1034 }
1035
1036 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1037 /* Create a vector describing the result block RESULT. If SAVEP is true,
1038 the result block is used to save the values; otherwise it is used to
1039 restore the values. */
1040
1041 static rtx
1042 result_vector (savep, result)
1043 int savep;
1044 rtx result;
1045 {
1046 int regno, size, align, nelts;
1047 enum machine_mode mode;
1048 rtx reg, mem;
1049 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1050
1051 size = nelts = 0;
1052 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1053 if ((mode = apply_result_mode[regno]) != VOIDmode)
1054 {
1055 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1056 if (size % align != 0)
1057 size = CEIL (size, align) * align;
1058 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1059 mem = adjust_address (result, mode, size);
1060 savevec[nelts++] = (savep
1061 ? gen_rtx_SET (VOIDmode, mem, reg)
1062 : gen_rtx_SET (VOIDmode, reg, mem));
1063 size += GET_MODE_SIZE (mode);
1064 }
1065 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1066 }
1067 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1068
1069 /* Save the state required to perform an untyped call with the same
1070 arguments as were passed to the current function. */
1071
1072 static rtx
1073 expand_builtin_apply_args_1 ()
1074 {
1075 rtx registers;
1076 int size, align, regno;
1077 enum machine_mode mode;
1078
1079 /* Create a block where the arg-pointer, structure value address,
1080 and argument registers can be saved. */
1081 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1082
1083 /* Walk past the arg-pointer and structure value address. */
1084 size = GET_MODE_SIZE (Pmode);
1085 if (struct_value_rtx)
1086 size += GET_MODE_SIZE (Pmode);
1087
1088 /* Save each register used in calling a function to the block. */
1089 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1090 if ((mode = apply_args_mode[regno]) != VOIDmode)
1091 {
1092 rtx tem;
1093
1094 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1095 if (size % align != 0)
1096 size = CEIL (size, align) * align;
1097
1098 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1099
1100 emit_move_insn (adjust_address (registers, mode, size), tem);
1101 size += GET_MODE_SIZE (mode);
1102 }
1103
1104 /* Save the arg pointer to the block. */
1105 emit_move_insn (adjust_address (registers, Pmode, 0),
1106 copy_to_reg (virtual_incoming_args_rtx));
1107 size = GET_MODE_SIZE (Pmode);
1108
1109 /* Save the structure value address unless this is passed as an
1110 "invisible" first argument. */
1111 if (struct_value_incoming_rtx)
1112 {
1113 emit_move_insn (adjust_address (registers, Pmode, size),
1114 copy_to_reg (struct_value_incoming_rtx));
1115 size += GET_MODE_SIZE (Pmode);
1116 }
1117
1118 /* Return the address of the block. */
1119 return copy_addr_to_reg (XEXP (registers, 0));
1120 }
1121
1122 /* __builtin_apply_args returns block of memory allocated on
1123 the stack into which is stored the arg pointer, structure
1124 value address, static chain, and all the registers that might
1125 possibly be used in performing a function call. The code is
1126 moved to the start of the function so the incoming values are
1127 saved. */
1128
1129 static rtx
1130 expand_builtin_apply_args ()
1131 {
1132 /* Don't do __builtin_apply_args more than once in a function.
1133 Save the result of the first call and reuse it. */
1134 if (apply_args_value != 0)
1135 return apply_args_value;
1136 {
1137 /* When this function is called, it means that registers must be
1138 saved on entry to this function. So we migrate the
1139 call to the first insn of this function. */
1140 rtx temp;
1141 rtx seq;
1142
1143 start_sequence ();
1144 temp = expand_builtin_apply_args_1 ();
1145 seq = get_insns ();
1146 end_sequence ();
1147
1148 apply_args_value = temp;
1149
1150 /* Put the sequence after the NOTE that starts the function.
1151 If this is inside a SEQUENCE, make the outer-level insn
1152 chain current, so the code is placed at the start of the
1153 function. */
1154 push_topmost_sequence ();
1155 emit_insns_before (seq, NEXT_INSN (get_insns ()));
1156 pop_topmost_sequence ();
1157 return temp;
1158 }
1159 }
1160
1161 /* Perform an untyped call and save the state required to perform an
1162 untyped return of whatever value was returned by the given function. */
1163
1164 static rtx
1165 expand_builtin_apply (function, arguments, argsize)
1166 rtx function, arguments, argsize;
1167 {
1168 int size, align, regno;
1169 enum machine_mode mode;
1170 rtx incoming_args, result, reg, dest, src, call_insn;
1171 rtx old_stack_level = 0;
1172 rtx call_fusage = 0;
1173
1174 #ifdef POINTERS_EXTEND_UNSIGNED
1175 if (GET_MODE (arguments) != Pmode)
1176 arguments = convert_memory_address (Pmode, arguments);
1177 #endif
1178
1179 /* Create a block where the return registers can be saved. */
1180 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1181
1182 /* Fetch the arg pointer from the ARGUMENTS block. */
1183 incoming_args = gen_reg_rtx (Pmode);
1184 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1185 #ifndef STACK_GROWS_DOWNWARD
1186 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1187 incoming_args, 0, OPTAB_LIB_WIDEN);
1188 #endif
1189
1190 /* Perform postincrements before actually calling the function. */
1191 emit_queue ();
1192
1193 /* Push a new argument block and copy the arguments. Do not allow
1194 the (potential) memcpy call below to interfere with our stack
1195 manipulations. */
1196 do_pending_stack_adjust ();
1197 NO_DEFER_POP;
1198
1199 /* Save the stack with nonlocal if available */
1200 #ifdef HAVE_save_stack_nonlocal
1201 if (HAVE_save_stack_nonlocal)
1202 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1203 else
1204 #endif
1205 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1206
1207 /* Push a block of memory onto the stack to store the memory arguments.
1208 Save the address in a register, and copy the memory arguments. ??? I
1209 haven't figured out how the calling convention macros effect this,
1210 but it's likely that the source and/or destination addresses in
1211 the block copy will need updating in machine specific ways. */
1212 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1213 dest = gen_rtx_MEM (BLKmode, dest);
1214 set_mem_align (dest, PARM_BOUNDARY);
1215 src = gen_rtx_MEM (BLKmode, incoming_args);
1216 set_mem_align (src, PARM_BOUNDARY);
1217 emit_block_move (dest, src, argsize);
1218
1219 /* Refer to the argument block. */
1220 apply_args_size ();
1221 arguments = gen_rtx_MEM (BLKmode, arguments);
1222 set_mem_align (arguments, PARM_BOUNDARY);
1223
1224 /* Walk past the arg-pointer and structure value address. */
1225 size = GET_MODE_SIZE (Pmode);
1226 if (struct_value_rtx)
1227 size += GET_MODE_SIZE (Pmode);
1228
1229 /* Restore each of the registers previously saved. Make USE insns
1230 for each of these registers for use in making the call. */
1231 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1232 if ((mode = apply_args_mode[regno]) != VOIDmode)
1233 {
1234 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1235 if (size % align != 0)
1236 size = CEIL (size, align) * align;
1237 reg = gen_rtx_REG (mode, regno);
1238 emit_move_insn (reg, adjust_address (arguments, mode, size));
1239 use_reg (&call_fusage, reg);
1240 size += GET_MODE_SIZE (mode);
1241 }
1242
1243 /* Restore the structure value address unless this is passed as an
1244 "invisible" first argument. */
1245 size = GET_MODE_SIZE (Pmode);
1246 if (struct_value_rtx)
1247 {
1248 rtx value = gen_reg_rtx (Pmode);
1249 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1250 emit_move_insn (struct_value_rtx, value);
1251 if (GET_CODE (struct_value_rtx) == REG)
1252 use_reg (&call_fusage, struct_value_rtx);
1253 size += GET_MODE_SIZE (Pmode);
1254 }
1255
1256 /* All arguments and registers used for the call are set up by now! */
1257 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1258
1259 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1260 and we don't want to load it into a register as an optimization,
1261 because prepare_call_address already did it if it should be done. */
1262 if (GET_CODE (function) != SYMBOL_REF)
1263 function = memory_address (FUNCTION_MODE, function);
1264
1265 /* Generate the actual call instruction and save the return value. */
1266 #ifdef HAVE_untyped_call
1267 if (HAVE_untyped_call)
1268 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1269 result, result_vector (1, result)));
1270 else
1271 #endif
1272 #ifdef HAVE_call_value
1273 if (HAVE_call_value)
1274 {
1275 rtx valreg = 0;
1276
1277 /* Locate the unique return register. It is not possible to
1278 express a call that sets more than one return register using
1279 call_value; use untyped_call for that. In fact, untyped_call
1280 only needs to save the return registers in the given block. */
1281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1282 if ((mode = apply_result_mode[regno]) != VOIDmode)
1283 {
1284 if (valreg)
1285 abort (); /* HAVE_untyped_call required. */
1286 valreg = gen_rtx_REG (mode, regno);
1287 }
1288
1289 emit_call_insn (GEN_CALL_VALUE (valreg,
1290 gen_rtx_MEM (FUNCTION_MODE, function),
1291 const0_rtx, NULL_RTX, const0_rtx));
1292
1293 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1294 }
1295 else
1296 #endif
1297 abort ();
1298
1299 /* Find the CALL insn we just emitted. */
1300 for (call_insn = get_last_insn ();
1301 call_insn && GET_CODE (call_insn) != CALL_INSN;
1302 call_insn = PREV_INSN (call_insn))
1303 ;
1304
1305 if (! call_insn)
1306 abort ();
1307
1308 /* Put the register usage information on the CALL. If there is already
1309 some usage information, put ours at the end. */
1310 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1311 {
1312 rtx link;
1313
1314 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1315 link = XEXP (link, 1))
1316 ;
1317
1318 XEXP (link, 1) = call_fusage;
1319 }
1320 else
1321 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1322
1323 /* Restore the stack. */
1324 #ifdef HAVE_save_stack_nonlocal
1325 if (HAVE_save_stack_nonlocal)
1326 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1327 else
1328 #endif
1329 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1330
1331 OK_DEFER_POP;
1332
1333 /* Return the address of the result block. */
1334 return copy_addr_to_reg (XEXP (result, 0));
1335 }
1336
1337 /* Perform an untyped return. */
1338
1339 static void
1340 expand_builtin_return (result)
1341 rtx result;
1342 {
1343 int size, align, regno;
1344 enum machine_mode mode;
1345 rtx reg;
1346 rtx call_fusage = 0;
1347
1348 #ifdef POINTERS_EXTEND_UNSIGNED
1349 if (GET_MODE (result) != Pmode)
1350 result = convert_memory_address (Pmode, result);
1351 #endif
1352
1353 apply_result_size ();
1354 result = gen_rtx_MEM (BLKmode, result);
1355
1356 #ifdef HAVE_untyped_return
1357 if (HAVE_untyped_return)
1358 {
1359 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1360 emit_barrier ();
1361 return;
1362 }
1363 #endif
1364
1365 /* Restore the return value and note that each value is used. */
1366 size = 0;
1367 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1368 if ((mode = apply_result_mode[regno]) != VOIDmode)
1369 {
1370 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1371 if (size % align != 0)
1372 size = CEIL (size, align) * align;
1373 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1374 emit_move_insn (reg, adjust_address (result, mode, size));
1375
1376 push_to_sequence (call_fusage);
1377 emit_insn (gen_rtx_USE (VOIDmode, reg));
1378 call_fusage = get_insns ();
1379 end_sequence ();
1380 size += GET_MODE_SIZE (mode);
1381 }
1382
1383 /* Put the USE insns before the return. */
1384 emit_insns (call_fusage);
1385
1386 /* Return whatever values was restored by jumping directly to the end
1387 of the function. */
1388 expand_null_return ();
1389 }
1390
1391 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1392
1393 static enum type_class
1394 type_to_class (type)
1395 tree type;
1396 {
1397 switch (TREE_CODE (type))
1398 {
1399 case VOID_TYPE: return void_type_class;
1400 case INTEGER_TYPE: return integer_type_class;
1401 case CHAR_TYPE: return char_type_class;
1402 case ENUMERAL_TYPE: return enumeral_type_class;
1403 case BOOLEAN_TYPE: return boolean_type_class;
1404 case POINTER_TYPE: return pointer_type_class;
1405 case REFERENCE_TYPE: return reference_type_class;
1406 case OFFSET_TYPE: return offset_type_class;
1407 case REAL_TYPE: return real_type_class;
1408 case COMPLEX_TYPE: return complex_type_class;
1409 case FUNCTION_TYPE: return function_type_class;
1410 case METHOD_TYPE: return method_type_class;
1411 case RECORD_TYPE: return record_type_class;
1412 case UNION_TYPE:
1413 case QUAL_UNION_TYPE: return union_type_class;
1414 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1415 ? string_type_class : array_type_class);
1416 case SET_TYPE: return set_type_class;
1417 case FILE_TYPE: return file_type_class;
1418 case LANG_TYPE: return lang_type_class;
1419 default: return no_type_class;
1420 }
1421 }
1422
1423 /* Expand a call to __builtin_classify_type with arguments found in
1424 ARGLIST. */
1425
1426 static rtx
1427 expand_builtin_classify_type (arglist)
1428 tree arglist;
1429 {
1430 if (arglist != 0)
1431 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1432 return GEN_INT (no_type_class);
1433 }
1434
1435 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1436
1437 static rtx
1438 expand_builtin_constant_p (exp)
1439 tree exp;
1440 {
1441 tree arglist = TREE_OPERAND (exp, 1);
1442 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1443 rtx tmp;
1444
1445 if (arglist == 0)
1446 return const0_rtx;
1447 arglist = TREE_VALUE (arglist);
1448
1449 /* We have taken care of the easy cases during constant folding. This
1450 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE get a
1451 chance to see if it can deduce whether ARGLIST is constant. */
1452
1453 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1454 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1455 return tmp;
1456 }
1457
1458 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1459 Return 0 if a normal call should be emitted rather than expanding the
1460 function in-line. EXP is the expression that is a call to the builtin
1461 function; if convenient, the result should be placed in TARGET.
1462 SUBTARGET may be used as the target for computing one of EXP's operands. */
1463
1464 static rtx
1465 expand_builtin_mathfn (exp, target, subtarget)
1466 tree exp;
1467 rtx target, subtarget;
1468 {
1469 optab builtin_optab;
1470 rtx op0, insns;
1471 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1472 tree arglist = TREE_OPERAND (exp, 1);
1473 enum machine_mode argmode;
1474
1475 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1476 return 0;
1477
1478 /* Stabilize and compute the argument. */
1479 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1480 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1481 {
1482 exp = copy_node (exp);
1483 TREE_OPERAND (exp, 1) = arglist;
1484 /* Wrap the computation of the argument in a SAVE_EXPR. That
1485 way, if we need to expand the argument again (as in the
1486 flag_errno_math case below where we cannot directly set
1487 errno), we will not perform side-effects more than once.
1488 Note that here we're mutating the original EXP as well as the
1489 copy; that's the right thing to do in case the original EXP
1490 is expanded later. */
1491 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1492 arglist = copy_node (arglist);
1493 }
1494 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1495
1496 /* Make a suitable register to place result in. */
1497 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1498
1499 emit_queue ();
1500 start_sequence ();
1501
1502 switch (DECL_FUNCTION_CODE (fndecl))
1503 {
1504 case BUILT_IN_SIN:
1505 case BUILT_IN_SINF:
1506 case BUILT_IN_SINL:
1507 builtin_optab = sin_optab; break;
1508 case BUILT_IN_COS:
1509 case BUILT_IN_COSF:
1510 case BUILT_IN_COSL:
1511 builtin_optab = cos_optab; break;
1512 case BUILT_IN_SQRT:
1513 case BUILT_IN_SQRTF:
1514 case BUILT_IN_SQRTL:
1515 builtin_optab = sqrt_optab; break;
1516 default:
1517 abort ();
1518 }
1519
1520 /* Compute into TARGET.
1521 Set TARGET to wherever the result comes back. */
1522 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1523 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1524
1525 /* If we were unable to expand via the builtin, stop the
1526 sequence (without outputting the insns) and return 0, causing
1527 a call to the library function. */
1528 if (target == 0)
1529 {
1530 end_sequence ();
1531 return 0;
1532 }
1533
1534 /* If errno must be maintained, we must set it to EDOM for NaN results. */
1535
1536 if (flag_errno_math && HONOR_NANS (argmode))
1537 {
1538 rtx lab1;
1539
1540 lab1 = gen_label_rtx ();
1541
1542 /* Test the result; if it is NaN, set errno=EDOM because
1543 the argument was not in the domain. */
1544 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1545 0, lab1);
1546
1547 #ifdef TARGET_EDOM
1548 {
1549 #ifdef GEN_ERRNO_RTX
1550 rtx errno_rtx = GEN_ERRNO_RTX;
1551 #else
1552 rtx errno_rtx
1553 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1554 #endif
1555
1556 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1557 }
1558 #else
1559 /* We can't set errno=EDOM directly; let the library call do it.
1560 Pop the arguments right away in case the call gets deleted. */
1561 NO_DEFER_POP;
1562 expand_call (exp, target, 0);
1563 OK_DEFER_POP;
1564 #endif
1565
1566 emit_label (lab1);
1567 }
1568
1569 /* Output the entire sequence. */
1570 insns = get_insns ();
1571 end_sequence ();
1572 emit_insns (insns);
1573
1574 return target;
1575 }
1576
1577 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1578 if we failed the caller should emit a normal call, otherwise
1579 try to get the result in TARGET, if convenient. */
1580
1581 static rtx
1582 expand_builtin_strlen (exp, target)
1583 tree exp;
1584 rtx target;
1585 {
1586 tree arglist = TREE_OPERAND (exp, 1);
1587 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1588
1589 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1590 return 0;
1591 else
1592 {
1593 rtx pat;
1594 tree src = TREE_VALUE (arglist);
1595
1596 int align
1597 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1598
1599 rtx result, src_reg, char_rtx, before_strlen;
1600 enum machine_mode insn_mode = value_mode, char_mode;
1601 enum insn_code icode = CODE_FOR_nothing;
1602
1603 /* If SRC is not a pointer type, don't do this operation inline. */
1604 if (align == 0)
1605 return 0;
1606
1607 /* Bail out if we can't compute strlen in the right mode. */
1608 while (insn_mode != VOIDmode)
1609 {
1610 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1611 if (icode != CODE_FOR_nothing)
1612 break;
1613
1614 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1615 }
1616 if (insn_mode == VOIDmode)
1617 return 0;
1618
1619 /* Make a place to write the result of the instruction. */
1620 result = target;
1621 if (! (result != 0
1622 && GET_CODE (result) == REG
1623 && GET_MODE (result) == insn_mode
1624 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1625 result = gen_reg_rtx (insn_mode);
1626
1627 /* Make a place to hold the source address. We will not expand
1628 the actual source until we are sure that the expansion will
1629 not fail -- there are trees that cannot be expanded twice. */
1630 src_reg = gen_reg_rtx (Pmode);
1631
1632 /* Mark the beginning of the strlen sequence so we can emit the
1633 source operand later. */
1634 before_strlen = get_last_insn();
1635
1636 char_rtx = const0_rtx;
1637 char_mode = insn_data[(int) icode].operand[2].mode;
1638 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1639 char_mode))
1640 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1641
1642 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1643 char_rtx, GEN_INT (align));
1644 if (! pat)
1645 return 0;
1646 emit_insn (pat);
1647
1648 /* Now that we are assured of success, expand the source. */
1649 start_sequence ();
1650 pat = memory_address (BLKmode,
1651 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1652 if (pat != src_reg)
1653 emit_move_insn (src_reg, pat);
1654 pat = gen_sequence ();
1655 end_sequence ();
1656
1657 if (before_strlen)
1658 emit_insn_after (pat, before_strlen);
1659 else
1660 emit_insn_before (pat, get_insns ());
1661
1662 /* Return the value in the proper mode for this function. */
1663 if (GET_MODE (result) == value_mode)
1664 target = result;
1665 else if (target != 0)
1666 convert_move (target, result, 0);
1667 else
1668 target = convert_to_mode (value_mode, result, 0);
1669
1670 return target;
1671 }
1672 }
1673
1674 /* Expand a call to the strstr builtin. Return 0 if we failed the
1675 caller should emit a normal call, otherwise try to get the result
1676 in TARGET, if convenient (and in mode MODE if that's convenient). */
1677
1678 static rtx
1679 expand_builtin_strstr (arglist, target, mode)
1680 tree arglist;
1681 rtx target;
1682 enum machine_mode mode;
1683 {
1684 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1685 return 0;
1686 else
1687 {
1688 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1689 tree fn;
1690 const char *p1, *p2;
1691
1692 p2 = c_getstr (s2);
1693 if (p2 == NULL)
1694 return 0;
1695
1696 p1 = c_getstr (s1);
1697 if (p1 != NULL)
1698 {
1699 const char *r = strstr (p1, p2);
1700
1701 if (r == NULL)
1702 return const0_rtx;
1703
1704 /* Return an offset into the constant string argument. */
1705 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1706 s1, ssize_int (r - p1))),
1707 target, mode, EXPAND_NORMAL);
1708 }
1709
1710 if (p2[0] == '\0')
1711 return expand_expr (s1, target, mode, EXPAND_NORMAL);
1712
1713 if (p2[1] != '\0')
1714 return 0;
1715
1716 fn = built_in_decls[BUILT_IN_STRCHR];
1717 if (!fn)
1718 return 0;
1719
1720 /* New argument list transforming strstr(s1, s2) to
1721 strchr(s1, s2[0]). */
1722 arglist =
1723 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1724 arglist = tree_cons (NULL_TREE, s1, arglist);
1725 return expand_expr (build_function_call_expr (fn, arglist),
1726 target, mode, EXPAND_NORMAL);
1727 }
1728 }
1729
1730 /* Expand a call to the strchr builtin. Return 0 if we failed the
1731 caller should emit a normal call, otherwise try to get the result
1732 in TARGET, if convenient (and in mode MODE if that's convenient). */
1733
1734 static rtx
1735 expand_builtin_strchr (arglist, target, mode)
1736 tree arglist;
1737 rtx target;
1738 enum machine_mode mode;
1739 {
1740 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1741 return 0;
1742 else
1743 {
1744 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1745 const char *p1;
1746
1747 if (TREE_CODE (s2) != INTEGER_CST)
1748 return 0;
1749
1750 p1 = c_getstr (s1);
1751 if (p1 != NULL)
1752 {
1753 char c;
1754 const char *r;
1755
1756 if (target_char_cast (s2, &c))
1757 return 0;
1758
1759 r = strchr (p1, c);
1760
1761 if (r == NULL)
1762 return const0_rtx;
1763
1764 /* Return an offset into the constant string argument. */
1765 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1766 s1, ssize_int (r - p1))),
1767 target, mode, EXPAND_NORMAL);
1768 }
1769
1770 /* FIXME: Should use here strchrM optab so that ports can optimize
1771 this. */
1772 return 0;
1773 }
1774 }
1775
1776 /* Expand a call to the strrchr builtin. Return 0 if we failed the
1777 caller should emit a normal call, otherwise try to get the result
1778 in TARGET, if convenient (and in mode MODE if that's convenient). */
1779
1780 static rtx
1781 expand_builtin_strrchr (arglist, target, mode)
1782 tree arglist;
1783 rtx target;
1784 enum machine_mode mode;
1785 {
1786 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1787 return 0;
1788 else
1789 {
1790 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1791 tree fn;
1792 const char *p1;
1793
1794 if (TREE_CODE (s2) != INTEGER_CST)
1795 return 0;
1796
1797 p1 = c_getstr (s1);
1798 if (p1 != NULL)
1799 {
1800 char c;
1801 const char *r;
1802
1803 if (target_char_cast (s2, &c))
1804 return 0;
1805
1806 r = strrchr (p1, c);
1807
1808 if (r == NULL)
1809 return const0_rtx;
1810
1811 /* Return an offset into the constant string argument. */
1812 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1813 s1, ssize_int (r - p1))),
1814 target, mode, EXPAND_NORMAL);
1815 }
1816
1817 if (! integer_zerop (s2))
1818 return 0;
1819
1820 fn = built_in_decls[BUILT_IN_STRCHR];
1821 if (!fn)
1822 return 0;
1823
1824 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
1825 return expand_expr (build_function_call_expr (fn, arglist),
1826 target, mode, EXPAND_NORMAL);
1827 }
1828 }
1829
1830 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
1831 caller should emit a normal call, otherwise try to get the result
1832 in TARGET, if convenient (and in mode MODE if that's convenient). */
1833
1834 static rtx
1835 expand_builtin_strpbrk (arglist, target, mode)
1836 tree arglist;
1837 rtx target;
1838 enum machine_mode mode;
1839 {
1840 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1841 return 0;
1842 else
1843 {
1844 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1845 tree fn;
1846 const char *p1, *p2;
1847
1848 p2 = c_getstr (s2);
1849 if (p2 == NULL)
1850 return 0;
1851
1852 p1 = c_getstr (s1);
1853 if (p1 != NULL)
1854 {
1855 const char *r = strpbrk (p1, p2);
1856
1857 if (r == NULL)
1858 return const0_rtx;
1859
1860 /* Return an offset into the constant string argument. */
1861 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1862 s1, ssize_int (r - p1))),
1863 target, mode, EXPAND_NORMAL);
1864 }
1865
1866 if (p2[0] == '\0')
1867 {
1868 /* strpbrk(x, "") == NULL.
1869 Evaluate and ignore the arguments in case they had
1870 side-effects. */
1871 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
1872 return const0_rtx;
1873 }
1874
1875 if (p2[1] != '\0')
1876 return 0; /* Really call strpbrk. */
1877
1878 fn = built_in_decls[BUILT_IN_STRCHR];
1879 if (!fn)
1880 return 0;
1881
1882 /* New argument list transforming strpbrk(s1, s2) to
1883 strchr(s1, s2[0]). */
1884 arglist =
1885 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1886 arglist = tree_cons (NULL_TREE, s1, arglist);
1887 return expand_expr (build_function_call_expr (fn, arglist),
1888 target, mode, EXPAND_NORMAL);
1889 }
1890 }
1891
1892 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
1893 bytes from constant string DATA + OFFSET and return it as target
1894 constant. */
1895
1896 static rtx
1897 builtin_memcpy_read_str (data, offset, mode)
1898 PTR data;
1899 HOST_WIDE_INT offset;
1900 enum machine_mode mode;
1901 {
1902 const char *str = (const char *) data;
1903
1904 if (offset < 0
1905 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
1906 > strlen (str) + 1))
1907 abort (); /* Attempt to read past the end of constant string. */
1908
1909 return c_readstr (str + offset, mode);
1910 }
1911
1912 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
1913 Return 0 if we failed, the caller should emit a normal call, otherwise
1914 try to get the result in TARGET, if convenient (and in mode MODE if
1915 that's convenient). */
1916
1917 static rtx
1918 expand_builtin_memcpy (arglist, target, mode)
1919 tree arglist;
1920 rtx target;
1921 enum machine_mode mode;
1922 {
1923 if (!validate_arglist (arglist,
1924 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1925 return 0;
1926 else
1927 {
1928 tree dest = TREE_VALUE (arglist);
1929 tree src = TREE_VALUE (TREE_CHAIN (arglist));
1930 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1931 const char *src_str;
1932
1933 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
1934 unsigned int dest_align
1935 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
1936 rtx dest_mem, src_mem, dest_addr, len_rtx;
1937
1938 /* If DEST is not a pointer type, call the normal function. */
1939 if (dest_align == 0)
1940 return 0;
1941
1942 /* If the LEN parameter is zero, return DEST. */
1943 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
1944 {
1945 /* Evaluate and ignore SRC in case it has side-effects. */
1946 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
1947 return expand_expr (dest, target, mode, EXPAND_NORMAL);
1948 }
1949
1950 /* If either SRC is not a pointer type, don't do this
1951 operation in-line. */
1952 if (src_align == 0)
1953 return 0;
1954
1955 dest_mem = get_memory_rtx (dest);
1956 set_mem_align (dest_mem, dest_align);
1957 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
1958 src_str = c_getstr (src);
1959
1960 /* If SRC is a string constant and block move would be done
1961 by pieces, we can avoid loading the string from memory
1962 and only stored the computed constants. */
1963 if (src_str
1964 && GET_CODE (len_rtx) == CONST_INT
1965 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
1966 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
1967 (PTR) src_str, dest_align))
1968 {
1969 store_by_pieces (dest_mem, INTVAL (len_rtx),
1970 builtin_memcpy_read_str,
1971 (PTR) src_str, dest_align);
1972 return force_operand (XEXP (dest_mem, 0), NULL_RTX);
1973 }
1974
1975 src_mem = get_memory_rtx (src);
1976 set_mem_align (src_mem, src_align);
1977
1978 /* Copy word part most expediently. */
1979 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx);
1980
1981 if (dest_addr == 0)
1982 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
1983
1984 return dest_addr;
1985 }
1986 }
1987
1988 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
1989 if we failed the caller should emit a normal call, otherwise try to get
1990 the result in TARGET, if convenient (and in mode MODE if that's
1991 convenient). */
1992
1993 static rtx
1994 expand_builtin_strcpy (exp, target, mode)
1995 tree exp;
1996 rtx target;
1997 enum machine_mode mode;
1998 {
1999 tree arglist = TREE_OPERAND (exp, 1);
2000 tree fn, len;
2001
2002 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2003 return 0;
2004
2005 fn = built_in_decls[BUILT_IN_MEMCPY];
2006 if (!fn)
2007 return 0;
2008
2009 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2010 if (len == 0)
2011 return 0;
2012
2013 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2014 chainon (arglist, build_tree_list (NULL_TREE, len));
2015 return expand_expr (build_function_call_expr (fn, arglist),
2016 target, mode, EXPAND_NORMAL);
2017 }
2018
2019 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2020 bytes from constant string DATA + OFFSET and return it as target
2021 constant. */
2022
2023 static rtx
2024 builtin_strncpy_read_str (data, offset, mode)
2025 PTR data;
2026 HOST_WIDE_INT offset;
2027 enum machine_mode mode;
2028 {
2029 const char *str = (const char *) data;
2030
2031 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2032 return const0_rtx;
2033
2034 return c_readstr (str + offset, mode);
2035 }
2036
2037 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2038 if we failed the caller should emit a normal call. */
2039
2040 static rtx
2041 expand_builtin_strncpy (arglist, target, mode)
2042 tree arglist;
2043 rtx target;
2044 enum machine_mode mode;
2045 {
2046 if (!validate_arglist (arglist,
2047 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2048 return 0;
2049 else
2050 {
2051 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2052 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2053 tree fn;
2054
2055 /* We must be passed a constant len parameter. */
2056 if (TREE_CODE (len) != INTEGER_CST)
2057 return 0;
2058
2059 /* If the len parameter is zero, return the dst parameter. */
2060 if (integer_zerop (len))
2061 {
2062 /* Evaluate and ignore the src argument in case it has
2063 side-effects. */
2064 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2065 VOIDmode, EXPAND_NORMAL);
2066 /* Return the dst parameter. */
2067 return expand_expr (TREE_VALUE (arglist), target, mode,
2068 EXPAND_NORMAL);
2069 }
2070
2071 /* Now, we must be passed a constant src ptr parameter. */
2072 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2073 return 0;
2074
2075 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2076
2077 /* We're required to pad with trailing zeros if the requested
2078 len is greater than strlen(s2)+1. In that case try to
2079 use store_by_pieces, if it fails, punt. */
2080 if (tree_int_cst_lt (slen, len))
2081 {
2082 tree dest = TREE_VALUE (arglist);
2083 unsigned int dest_align
2084 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2085 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2086 rtx dest_mem;
2087
2088 if (!p || dest_align == 0 || !host_integerp (len, 1)
2089 || !can_store_by_pieces (tree_low_cst (len, 1),
2090 builtin_strncpy_read_str,
2091 (PTR) p, dest_align))
2092 return 0;
2093
2094 dest_mem = get_memory_rtx (dest);
2095 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2096 builtin_strncpy_read_str,
2097 (PTR) p, dest_align);
2098 return force_operand (XEXP (dest_mem, 0), NULL_RTX);
2099 }
2100
2101 /* OK transform into builtin memcpy. */
2102 fn = built_in_decls[BUILT_IN_MEMCPY];
2103 if (!fn)
2104 return 0;
2105 return expand_expr (build_function_call_expr (fn, arglist),
2106 target, mode, EXPAND_NORMAL);
2107 }
2108 }
2109
2110 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2111 bytes from constant string DATA + OFFSET and return it as target
2112 constant. */
2113
2114 static rtx
2115 builtin_memset_read_str (data, offset, mode)
2116 PTR data;
2117 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2118 enum machine_mode mode;
2119 {
2120 const char *c = (const char *) data;
2121 char *p = alloca (GET_MODE_SIZE (mode));
2122
2123 memset (p, *c, GET_MODE_SIZE (mode));
2124
2125 return c_readstr (p, mode);
2126 }
2127
2128 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2129 if we failed the caller should emit a normal call, otherwise try to get
2130 the result in TARGET, if convenient (and in mode MODE if that's
2131 convenient). */
2132
2133 static rtx
2134 expand_builtin_memset (exp, target, mode)
2135 tree exp;
2136 rtx target;
2137 enum machine_mode mode;
2138 {
2139 tree arglist = TREE_OPERAND (exp, 1);
2140
2141 if (!validate_arglist (arglist,
2142 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2143 return 0;
2144 else
2145 {
2146 tree dest = TREE_VALUE (arglist);
2147 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2148 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2149 char c;
2150
2151 unsigned int dest_align
2152 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2153 rtx dest_mem, dest_addr, len_rtx;
2154
2155 /* If DEST is not a pointer type, don't do this
2156 operation in-line. */
2157 if (dest_align == 0)
2158 return 0;
2159
2160 /* If the LEN parameter is zero, return DEST. */
2161 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2162 {
2163 /* Evaluate and ignore VAL in case it has side-effects. */
2164 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2165 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2166 }
2167
2168 if (TREE_CODE (val) != INTEGER_CST)
2169 return 0;
2170
2171 if (target_char_cast (val, &c))
2172 return 0;
2173
2174 if (c)
2175 {
2176 if (!host_integerp (len, 1))
2177 return 0;
2178 if (!can_store_by_pieces (tree_low_cst (len, 1),
2179 builtin_memset_read_str, (PTR) &c,
2180 dest_align))
2181 return 0;
2182
2183 dest_mem = get_memory_rtx (dest);
2184 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2185 builtin_memset_read_str,
2186 (PTR) &c, dest_align);
2187 return force_operand (XEXP (dest_mem, 0), NULL_RTX);
2188 }
2189
2190 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2191
2192 dest_mem = get_memory_rtx (dest);
2193 set_mem_align (dest_mem, dest_align);
2194 dest_addr = clear_storage (dest_mem, len_rtx);
2195
2196 if (dest_addr == 0)
2197 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2198
2199 return dest_addr;
2200 }
2201 }
2202
2203 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2204 if we failed the caller should emit a normal call. */
2205
2206 static rtx
2207 expand_builtin_bzero (exp)
2208 tree exp;
2209 {
2210 tree arglist = TREE_OPERAND (exp, 1);
2211 tree dest, size, newarglist;
2212 rtx result;
2213
2214 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2215 return NULL_RTX;
2216
2217 dest = TREE_VALUE (arglist);
2218 size = TREE_VALUE (TREE_CHAIN (arglist));
2219
2220 /* New argument list transforming bzero(ptr x, int y) to
2221 memset(ptr x, int 0, size_t y). This is done this way
2222 so that if it isn't expanded inline, we fallback to
2223 calling bzero instead of memset. */
2224
2225 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2226 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2227 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2228
2229 TREE_OPERAND (exp, 1) = newarglist;
2230 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2231
2232 /* Always restore the original arguments. */
2233 TREE_OPERAND (exp, 1) = arglist;
2234
2235 return result;
2236 }
2237
2238 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2239 ARGLIST is the argument list for this call. Return 0 if we failed and the
2240 caller should emit a normal call, otherwise try to get the result in
2241 TARGET, if convenient (and in mode MODE, if that's convenient). */
2242
2243 static rtx
2244 expand_builtin_memcmp (exp, arglist, target, mode)
2245 tree exp ATTRIBUTE_UNUSED;
2246 tree arglist;
2247 rtx target;
2248 enum machine_mode mode;
2249 {
2250 tree arg1, arg2, len;
2251 const char *p1, *p2;
2252
2253 if (!validate_arglist (arglist,
2254 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2255 return 0;
2256
2257 arg1 = TREE_VALUE (arglist);
2258 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2259 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2260
2261 /* If the len parameter is zero, return zero. */
2262 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2263 {
2264 /* Evaluate and ignore arg1 and arg2 in case they have
2265 side-effects. */
2266 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2267 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2268 return const0_rtx;
2269 }
2270
2271 p1 = c_getstr (arg1);
2272 p2 = c_getstr (arg2);
2273
2274 /* If all arguments are constant, and the value of len is not greater
2275 than the lengths of arg1 and arg2, evaluate at compile-time. */
2276 if (host_integerp (len, 1) && p1 && p2
2277 && compare_tree_int (len, strlen (p1) + 1) <= 0
2278 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2279 {
2280 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2281
2282 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2283 }
2284
2285 /* If len parameter is one, return an expression corresponding to
2286 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2287 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2288 {
2289 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2290 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2291 tree ind1 =
2292 fold (build1 (CONVERT_EXPR, integer_type_node,
2293 build1 (INDIRECT_REF, cst_uchar_node,
2294 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2295 tree ind2 =
2296 fold (build1 (CONVERT_EXPR, integer_type_node,
2297 build1 (INDIRECT_REF, cst_uchar_node,
2298 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2299 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2300 return expand_expr (result, target, mode, EXPAND_NORMAL);
2301 }
2302
2303 #ifdef HAVE_cmpstrsi
2304 {
2305 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2306 rtx result;
2307 rtx insn;
2308
2309 int arg1_align
2310 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2311 int arg2_align
2312 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2313 enum machine_mode insn_mode
2314 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2315
2316 /* If we don't have POINTER_TYPE, call the function. */
2317 if (arg1_align == 0 || arg2_align == 0)
2318 return 0;
2319
2320 /* Make a place to write the result of the instruction. */
2321 result = target;
2322 if (! (result != 0
2323 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2324 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2325 result = gen_reg_rtx (insn_mode);
2326
2327 arg1_rtx = get_memory_rtx (arg1);
2328 arg2_rtx = get_memory_rtx (arg2);
2329 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2330 if (!HAVE_cmpstrsi)
2331 insn = NULL_RTX;
2332 else
2333 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2334 GEN_INT (MIN (arg1_align, arg2_align)));
2335
2336 if (insn)
2337 emit_insn (insn);
2338 else
2339 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2340 TYPE_MODE (integer_type_node), 3,
2341 XEXP (arg1_rtx, 0), Pmode,
2342 XEXP (arg2_rtx, 0), Pmode,
2343 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2344 TREE_UNSIGNED (sizetype)),
2345 TYPE_MODE (sizetype));
2346
2347 /* Return the value in the proper mode for this function. */
2348 mode = TYPE_MODE (TREE_TYPE (exp));
2349 if (GET_MODE (result) == mode)
2350 return result;
2351 else if (target != 0)
2352 {
2353 convert_move (target, result, 0);
2354 return target;
2355 }
2356 else
2357 return convert_to_mode (mode, result, 0);
2358 }
2359 #endif
2360
2361 return 0;
2362 }
2363
2364 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2365 if we failed the caller should emit a normal call, otherwise try to get
2366 the result in TARGET, if convenient. */
2367
2368 static rtx
2369 expand_builtin_strcmp (exp, target, mode)
2370 tree exp;
2371 rtx target;
2372 enum machine_mode mode;
2373 {
2374 tree arglist = TREE_OPERAND (exp, 1);
2375 tree arg1, arg2, len, len2, fn;
2376 const char *p1, *p2;
2377
2378 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2379 return 0;
2380
2381 arg1 = TREE_VALUE (arglist);
2382 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2383
2384 p1 = c_getstr (arg1);
2385 p2 = c_getstr (arg2);
2386
2387 if (p1 && p2)
2388 {
2389 const int i = strcmp (p1, p2);
2390 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2391 }
2392
2393 /* If either arg is "", return an expression corresponding to
2394 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2395 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2396 {
2397 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2398 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2399 tree ind1 =
2400 fold (build1 (CONVERT_EXPR, integer_type_node,
2401 build1 (INDIRECT_REF, cst_uchar_node,
2402 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2403 tree ind2 =
2404 fold (build1 (CONVERT_EXPR, integer_type_node,
2405 build1 (INDIRECT_REF, cst_uchar_node,
2406 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2407 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2408 return expand_expr (result, target, mode, EXPAND_NORMAL);
2409 }
2410
2411 len = c_strlen (arg1);
2412 len2 = c_strlen (arg2);
2413
2414 if (len)
2415 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2416
2417 if (len2)
2418 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2419
2420 /* If we don't have a constant length for the first, use the length
2421 of the second, if we know it. We don't require a constant for
2422 this case; some cost analysis could be done if both are available
2423 but neither is constant. For now, assume they're equally cheap
2424 unless one has side effects.
2425
2426 If both strings have constant lengths, use the smaller. This
2427 could arise if optimization results in strcpy being called with
2428 two fixed strings, or if the code was machine-generated. We should
2429 add some code to the `memcmp' handler below to deal with such
2430 situations, someday. */
2431
2432 if (!len || TREE_CODE (len) != INTEGER_CST)
2433 {
2434 if (len2 && !TREE_SIDE_EFFECTS (len2))
2435 len = len2;
2436 else if (len == 0)
2437 return 0;
2438 }
2439 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2440 && tree_int_cst_lt (len2, len))
2441 len = len2;
2442
2443 /* If both arguments have side effects, we cannot optimize. */
2444 if (TREE_SIDE_EFFECTS (len))
2445 return 0;
2446
2447 fn = built_in_decls[BUILT_IN_MEMCMP];
2448 if (!fn)
2449 return 0;
2450
2451 chainon (arglist, build_tree_list (NULL_TREE, len));
2452 return expand_expr (build_function_call_expr (fn, arglist),
2453 target, mode, EXPAND_NORMAL);
2454 }
2455
2456 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2457 if we failed the caller should emit a normal call, otherwise try to get
2458 the result in TARGET, if convenient. */
2459
2460 static rtx
2461 expand_builtin_strncmp (exp, target, mode)
2462 tree exp;
2463 rtx target;
2464 enum machine_mode mode;
2465 {
2466 tree arglist = TREE_OPERAND (exp, 1);
2467 tree fn, newarglist, len = 0;
2468 tree arg1, arg2, arg3;
2469 const char *p1, *p2;
2470
2471 if (!validate_arglist (arglist,
2472 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2473 return 0;
2474
2475 arg1 = TREE_VALUE (arglist);
2476 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2477 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2478
2479 /* If the len parameter is zero, return zero. */
2480 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2481 {
2482 /* Evaluate and ignore arg1 and arg2 in case they have
2483 side-effects. */
2484 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2485 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2486 return const0_rtx;
2487 }
2488
2489 p1 = c_getstr (arg1);
2490 p2 = c_getstr (arg2);
2491
2492 /* If all arguments are constant, evaluate at compile-time. */
2493 if (host_integerp (arg3, 1) && p1 && p2)
2494 {
2495 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2496 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2497 }
2498
2499 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2500 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2501 if (host_integerp (arg3, 1)
2502 && (tree_low_cst (arg3, 1) == 1
2503 || (tree_low_cst (arg3, 1) > 1
2504 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2505 {
2506 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2507 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2508 tree ind1 =
2509 fold (build1 (CONVERT_EXPR, integer_type_node,
2510 build1 (INDIRECT_REF, cst_uchar_node,
2511 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2512 tree ind2 =
2513 fold (build1 (CONVERT_EXPR, integer_type_node,
2514 build1 (INDIRECT_REF, cst_uchar_node,
2515 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2516 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2517 return expand_expr (result, target, mode, EXPAND_NORMAL);
2518 }
2519
2520 /* If c_strlen can determine an expression for one of the string
2521 lengths, and it doesn't have side effects, then call
2522 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
2523
2524 /* Perhaps one of the strings is really constant, if so prefer
2525 that constant length over the other string's length. */
2526 if (p1)
2527 len = c_strlen (arg1);
2528 else if (p2)
2529 len = c_strlen (arg2);
2530
2531 /* If we still don't have a len, try either string arg as long
2532 as they don't have side effects. */
2533 if (!len && !TREE_SIDE_EFFECTS (arg1))
2534 len = c_strlen (arg1);
2535 if (!len && !TREE_SIDE_EFFECTS (arg2))
2536 len = c_strlen (arg2);
2537 /* If we still don't have a length, punt. */
2538 if (!len)
2539 return 0;
2540
2541 fn = built_in_decls[BUILT_IN_MEMCMP];
2542 if (!fn)
2543 return 0;
2544
2545 /* Add one to the string length. */
2546 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2547
2548 /* The actual new length parameter is MIN(len,arg3). */
2549 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2550
2551 newarglist = build_tree_list (NULL_TREE, len);
2552 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
2553 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
2554 return expand_expr (build_function_call_expr (fn, newarglist),
2555 target, mode, EXPAND_NORMAL);
2556 }
2557
2558 /* Expand expression EXP, which is a call to the strcat builtin.
2559 Return 0 if we failed the caller should emit a normal call,
2560 otherwise try to get the result in TARGET, if convenient. */
2561
2562 static rtx
2563 expand_builtin_strcat (arglist, target, mode)
2564 tree arglist;
2565 rtx target;
2566 enum machine_mode mode;
2567 {
2568 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2569 return 0;
2570 else
2571 {
2572 tree dst = TREE_VALUE (arglist),
2573 src = TREE_VALUE (TREE_CHAIN (arglist));
2574 const char *p = c_getstr (src);
2575
2576 /* If the string length is zero, return the dst parameter. */
2577 if (p && *p == '\0')
2578 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2579
2580 return 0;
2581 }
2582 }
2583
2584 /* Expand expression EXP, which is a call to the strncat builtin.
2585 Return 0 if we failed the caller should emit a normal call,
2586 otherwise try to get the result in TARGET, if convenient. */
2587
2588 static rtx
2589 expand_builtin_strncat (arglist, target, mode)
2590 tree arglist;
2591 rtx target;
2592 enum machine_mode mode;
2593 {
2594 if (!validate_arglist (arglist,
2595 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2596 return 0;
2597 else
2598 {
2599 tree dst = TREE_VALUE (arglist),
2600 src = TREE_VALUE (TREE_CHAIN (arglist)),
2601 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2602 const char *p = c_getstr (src);
2603
2604 /* If the requested length is zero, or the src parameter string
2605 length is zero, return the dst parameter. */
2606 if (integer_zerop (len) || (p && *p == '\0'))
2607 {
2608 /* Evaluate and ignore the src and len parameters in case
2609 they have side-effects. */
2610 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2611 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2612 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2613 }
2614
2615 /* If the requested len is greater than or equal to the string
2616 length, call strcat. */
2617 if (TREE_CODE (len) == INTEGER_CST && p
2618 && compare_tree_int (len, strlen (p)) >= 0)
2619 {
2620 tree newarglist
2621 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
2622 tree fn = built_in_decls[BUILT_IN_STRCAT];
2623
2624 /* If the replacement _DECL isn't initialized, don't do the
2625 transformation. */
2626 if (!fn)
2627 return 0;
2628
2629 return expand_expr (build_function_call_expr (fn, newarglist),
2630 target, mode, EXPAND_NORMAL);
2631 }
2632 return 0;
2633 }
2634 }
2635
2636 /* Expand expression EXP, which is a call to the strspn builtin.
2637 Return 0 if we failed the caller should emit a normal call,
2638 otherwise try to get the result in TARGET, if convenient. */
2639
2640 static rtx
2641 expand_builtin_strspn (arglist, target, mode)
2642 tree arglist;
2643 rtx target;
2644 enum machine_mode mode;
2645 {
2646 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2647 return 0;
2648 else
2649 {
2650 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2651 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2652
2653 /* If both arguments are constants, evaluate at compile-time. */
2654 if (p1 && p2)
2655 {
2656 const size_t r = strspn (p1, p2);
2657 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2658 }
2659
2660 /* If either argument is "", return 0. */
2661 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2662 {
2663 /* Evaluate and ignore both arguments in case either one has
2664 side-effects. */
2665 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2666 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2667 return const0_rtx;
2668 }
2669 return 0;
2670 }
2671 }
2672
2673 /* Expand expression EXP, which is a call to the strcspn builtin.
2674 Return 0 if we failed the caller should emit a normal call,
2675 otherwise try to get the result in TARGET, if convenient. */
2676
2677 static rtx
2678 expand_builtin_strcspn (arglist, target, mode)
2679 tree arglist;
2680 rtx target;
2681 enum machine_mode mode;
2682 {
2683 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2684 return 0;
2685 else
2686 {
2687 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2688 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2689
2690 /* If both arguments are constants, evaluate at compile-time. */
2691 if (p1 && p2)
2692 {
2693 const size_t r = strcspn (p1, p2);
2694 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2695 }
2696
2697 /* If the first argument is "", return 0. */
2698 if (p1 && *p1 == '\0')
2699 {
2700 /* Evaluate and ignore argument s2 in case it has
2701 side-effects. */
2702 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2703 return const0_rtx;
2704 }
2705
2706 /* If the second argument is "", return __builtin_strlen(s1). */
2707 if (p2 && *p2 == '\0')
2708 {
2709 tree newarglist = build_tree_list (NULL_TREE, s1),
2710 fn = built_in_decls[BUILT_IN_STRLEN];
2711
2712 /* If the replacement _DECL isn't initialized, don't do the
2713 transformation. */
2714 if (!fn)
2715 return 0;
2716
2717 return expand_expr (build_function_call_expr (fn, newarglist),
2718 target, mode, EXPAND_NORMAL);
2719 }
2720 return 0;
2721 }
2722 }
2723
2724 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
2725 if that's convenient. */
2726
2727 rtx
2728 expand_builtin_saveregs ()
2729 {
2730 rtx val, seq;
2731
2732 /* Don't do __builtin_saveregs more than once in a function.
2733 Save the result of the first call and reuse it. */
2734 if (saveregs_value != 0)
2735 return saveregs_value;
2736
2737 /* When this function is called, it means that registers must be
2738 saved on entry to this function. So we migrate the call to the
2739 first insn of this function. */
2740
2741 start_sequence ();
2742
2743 #ifdef EXPAND_BUILTIN_SAVEREGS
2744 /* Do whatever the machine needs done in this case. */
2745 val = EXPAND_BUILTIN_SAVEREGS ();
2746 #else
2747 /* ??? We used to try and build up a call to the out of line function,
2748 guessing about what registers needed saving etc. This became much
2749 harder with __builtin_va_start, since we don't have a tree for a
2750 call to __builtin_saveregs to fall back on. There was exactly one
2751 port (i860) that used this code, and I'm unconvinced it could actually
2752 handle the general case. So we no longer try to handle anything
2753 weird and make the backend absorb the evil. */
2754
2755 error ("__builtin_saveregs not supported by this target");
2756 val = const0_rtx;
2757 #endif
2758
2759 seq = get_insns ();
2760 end_sequence ();
2761
2762 saveregs_value = val;
2763
2764 /* Put the sequence after the NOTE that starts the function. If this
2765 is inside a SEQUENCE, make the outer-level insn chain current, so
2766 the code is placed at the start of the function. */
2767 push_topmost_sequence ();
2768 emit_insns_after (seq, get_insns ());
2769 pop_topmost_sequence ();
2770
2771 return val;
2772 }
2773
2774 /* __builtin_args_info (N) returns word N of the arg space info
2775 for the current function. The number and meanings of words
2776 is controlled by the definition of CUMULATIVE_ARGS. */
2777
2778 static rtx
2779 expand_builtin_args_info (exp)
2780 tree exp;
2781 {
2782 tree arglist = TREE_OPERAND (exp, 1);
2783 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
2784 int *word_ptr = (int *) &current_function_args_info;
2785 #if 0
2786 /* These are used by the code below that is if 0'ed away */
2787 int i;
2788 tree type, elts, result;
2789 #endif
2790
2791 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
2792 abort ();
2793
2794 if (arglist != 0)
2795 {
2796 if (!host_integerp (TREE_VALUE (arglist), 0))
2797 error ("argument of `__builtin_args_info' must be constant");
2798 else
2799 {
2800 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
2801
2802 if (wordnum < 0 || wordnum >= nwords)
2803 error ("argument of `__builtin_args_info' out of range");
2804 else
2805 return GEN_INT (word_ptr[wordnum]);
2806 }
2807 }
2808 else
2809 error ("missing argument in `__builtin_args_info'");
2810
2811 return const0_rtx;
2812
2813 #if 0
2814 for (i = 0; i < nwords; i++)
2815 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
2816
2817 type = build_array_type (integer_type_node,
2818 build_index_type (build_int_2 (nwords, 0)));
2819 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
2820 TREE_CONSTANT (result) = 1;
2821 TREE_STATIC (result) = 1;
2822 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
2823 TREE_CONSTANT (result) = 1;
2824 return expand_expr (result, NULL_RTX, VOIDmode, 0);
2825 #endif
2826 }
2827
2828 /* Expand ARGLIST, from a call to __builtin_next_arg. */
2829
2830 static rtx
2831 expand_builtin_next_arg (arglist)
2832 tree arglist;
2833 {
2834 tree fntype = TREE_TYPE (current_function_decl);
2835
2836 if ((TYPE_ARG_TYPES (fntype) == 0
2837 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2838 == void_type_node))
2839 && ! current_function_varargs)
2840 {
2841 error ("`va_start' used in function with fixed args");
2842 return const0_rtx;
2843 }
2844
2845 if (arglist)
2846 {
2847 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
2848 tree arg = TREE_VALUE (arglist);
2849
2850 /* Strip off all nops for the sake of the comparison. This
2851 is not quite the same as STRIP_NOPS. It does more.
2852 We must also strip off INDIRECT_EXPR for C++ reference
2853 parameters. */
2854 while (TREE_CODE (arg) == NOP_EXPR
2855 || TREE_CODE (arg) == CONVERT_EXPR
2856 || TREE_CODE (arg) == NON_LVALUE_EXPR
2857 || TREE_CODE (arg) == INDIRECT_REF)
2858 arg = TREE_OPERAND (arg, 0);
2859 if (arg != last_parm)
2860 warning ("second parameter of `va_start' not last named argument");
2861 }
2862 else if (! current_function_varargs)
2863 /* Evidently an out of date version of <stdarg.h>; can't validate
2864 va_start's second argument, but can still work as intended. */
2865 warning ("`__builtin_next_arg' called without an argument");
2866
2867 return expand_binop (Pmode, add_optab,
2868 current_function_internal_arg_pointer,
2869 current_function_arg_offset_rtx,
2870 NULL_RTX, 0, OPTAB_LIB_WIDEN);
2871 }
2872
2873 /* Make it easier for the backends by protecting the valist argument
2874 from multiple evaluations. */
2875
2876 static tree
2877 stabilize_va_list (valist, needs_lvalue)
2878 tree valist;
2879 int needs_lvalue;
2880 {
2881 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
2882 {
2883 if (TREE_SIDE_EFFECTS (valist))
2884 valist = save_expr (valist);
2885
2886 /* For this case, the backends will be expecting a pointer to
2887 TREE_TYPE (va_list_type_node), but it's possible we've
2888 actually been given an array (an actual va_list_type_node).
2889 So fix it. */
2890 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
2891 {
2892 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
2893 tree p2 = build_pointer_type (va_list_type_node);
2894
2895 valist = build1 (ADDR_EXPR, p2, valist);
2896 valist = fold (build1 (NOP_EXPR, p1, valist));
2897 }
2898 }
2899 else
2900 {
2901 tree pt;
2902
2903 if (! needs_lvalue)
2904 {
2905 if (! TREE_SIDE_EFFECTS (valist))
2906 return valist;
2907
2908 pt = build_pointer_type (va_list_type_node);
2909 valist = fold (build1 (ADDR_EXPR, pt, valist));
2910 TREE_SIDE_EFFECTS (valist) = 1;
2911 }
2912
2913 if (TREE_SIDE_EFFECTS (valist))
2914 valist = save_expr (valist);
2915 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
2916 valist));
2917 }
2918
2919 return valist;
2920 }
2921
2922 /* The "standard" implementation of va_start: just assign `nextarg' to
2923 the variable. */
2924
2925 void
2926 std_expand_builtin_va_start (stdarg_p, valist, nextarg)
2927 int stdarg_p;
2928 tree valist;
2929 rtx nextarg;
2930 {
2931 tree t;
2932
2933 if (! stdarg_p)
2934 {
2935 /* The dummy named parameter is declared as a 'word' sized
2936 object, but if a 'word' is smaller than an 'int', it would
2937 have been promoted to int when it was added to the arglist. */
2938 int align = PARM_BOUNDARY / BITS_PER_UNIT;
2939 int size = MAX (UNITS_PER_WORD,
2940 GET_MODE_SIZE (TYPE_MODE (integer_type_node)));
2941 int offset = ((size + align - 1) / align) * align;
2942 nextarg = plus_constant (nextarg, -offset);
2943 }
2944
2945 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
2946 make_tree (ptr_type_node, nextarg));
2947 TREE_SIDE_EFFECTS (t) = 1;
2948
2949 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2950 }
2951
2952 /* Expand ARGLIST, which from a call to __builtin_stdarg_va_start or
2953 __builtin_varargs_va_start, depending on STDARG_P. */
2954
2955 static rtx
2956 expand_builtin_va_start (stdarg_p, arglist)
2957 int stdarg_p;
2958 tree arglist;
2959 {
2960 rtx nextarg;
2961 tree chain = arglist, valist;
2962
2963 if (stdarg_p)
2964 nextarg = expand_builtin_next_arg (chain = TREE_CHAIN (arglist));
2965 else
2966 nextarg = expand_builtin_next_arg (NULL_TREE);
2967
2968 if (TREE_CHAIN (chain))
2969 error ("too many arguments to function `va_start'");
2970
2971 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
2972
2973 #ifdef EXPAND_BUILTIN_VA_START
2974 EXPAND_BUILTIN_VA_START (stdarg_p, valist, nextarg);
2975 #else
2976 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2977 #endif
2978
2979 return const0_rtx;
2980 }
2981
2982 /* The "standard" implementation of va_arg: read the value from the
2983 current (padded) address and increment by the (padded) size. */
2984
2985 rtx
2986 std_expand_builtin_va_arg (valist, type)
2987 tree valist, type;
2988 {
2989 tree addr_tree, t, type_size = NULL;
2990 tree align, alignm1;
2991 tree rounded_size;
2992 rtx addr;
2993
2994 /* Compute the rounded size of the type. */
2995 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
2996 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
2997 if (type == error_mark_node
2998 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
2999 || TREE_OVERFLOW (type_size))
3000 rounded_size = size_zero_node;
3001 else
3002 rounded_size = fold (build (MULT_EXPR, sizetype,
3003 fold (build (TRUNC_DIV_EXPR, sizetype,
3004 fold (build (PLUS_EXPR, sizetype,
3005 type_size, alignm1)),
3006 align)),
3007 align));
3008
3009 /* Get AP. */
3010 addr_tree = valist;
3011 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3012 {
3013 /* Small args are padded downward. */
3014 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3015 fold (build (COND_EXPR, sizetype,
3016 fold (build (GT_EXPR, sizetype,
3017 rounded_size,
3018 align)),
3019 size_zero_node,
3020 fold (build (MINUS_EXPR, sizetype,
3021 rounded_size,
3022 type_size))))));
3023 }
3024
3025 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3026 addr = copy_to_reg (addr);
3027
3028 /* Compute new value for AP. */
3029 if (! integer_zerop (rounded_size))
3030 {
3031 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3032 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3033 rounded_size));
3034 TREE_SIDE_EFFECTS (t) = 1;
3035 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3036 }
3037
3038 return addr;
3039 }
3040
3041 /* Expand __builtin_va_arg, which is not really a builtin function, but
3042 a very special sort of operator. */
3043
3044 rtx
3045 expand_builtin_va_arg (valist, type)
3046 tree valist, type;
3047 {
3048 rtx addr, result;
3049 tree promoted_type, want_va_type, have_va_type;
3050
3051 /* Verify that valist is of the proper type. */
3052
3053 want_va_type = va_list_type_node;
3054 have_va_type = TREE_TYPE (valist);
3055 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3056 {
3057 /* If va_list is an array type, the argument may have decayed
3058 to a pointer type, e.g. by being passed to another function.
3059 In that case, unwrap both types so that we can compare the
3060 underlying records. */
3061 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3062 || TREE_CODE (have_va_type) == POINTER_TYPE)
3063 {
3064 want_va_type = TREE_TYPE (want_va_type);
3065 have_va_type = TREE_TYPE (have_va_type);
3066 }
3067 }
3068 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3069 {
3070 error ("first argument to `va_arg' not of type `va_list'");
3071 addr = const0_rtx;
3072 }
3073
3074 /* Generate a diagnostic for requesting data of a type that cannot
3075 be passed through `...' due to type promotion at the call site. */
3076 else if ((promoted_type = (*lang_type_promotes_to) (type)) != NULL_TREE)
3077 {
3078 const char *name = "<anonymous type>", *pname = 0;
3079 static int gave_help;
3080
3081 if (TYPE_NAME (type))
3082 {
3083 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3084 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3085 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3086 && DECL_NAME (TYPE_NAME (type)))
3087 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3088 }
3089 if (TYPE_NAME (promoted_type))
3090 {
3091 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3092 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3093 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3094 && DECL_NAME (TYPE_NAME (promoted_type)))
3095 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3096 }
3097
3098 error ("`%s' is promoted to `%s' when passed through `...'", name, pname);
3099 if (! gave_help)
3100 {
3101 gave_help = 1;
3102 error ("(so you should pass `%s' not `%s' to `va_arg')", pname, name);
3103 }
3104
3105 addr = const0_rtx;
3106 }
3107 else
3108 {
3109 /* Make it easier for the backends by protecting the valist argument
3110 from multiple evaluations. */
3111 valist = stabilize_va_list (valist, 0);
3112
3113 #ifdef EXPAND_BUILTIN_VA_ARG
3114 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3115 #else
3116 addr = std_expand_builtin_va_arg (valist, type);
3117 #endif
3118 }
3119
3120 #ifdef POINTERS_EXTEND_UNSIGNED
3121 if (GET_MODE (addr) != Pmode)
3122 addr = convert_memory_address (Pmode, addr);
3123 #endif
3124
3125 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3126 set_mem_alias_set (result, get_varargs_alias_set ());
3127
3128 return result;
3129 }
3130
3131 /* Expand ARGLIST, from a call to __builtin_va_end. */
3132
3133 static rtx
3134 expand_builtin_va_end (arglist)
3135 tree arglist;
3136 {
3137 tree valist = TREE_VALUE (arglist);
3138
3139 #ifdef EXPAND_BUILTIN_VA_END
3140 valist = stabilize_va_list (valist, 0);
3141 EXPAND_BUILTIN_VA_END(arglist);
3142 #else
3143 /* Evaluate for side effects, if needed. I hate macros that don't
3144 do that. */
3145 if (TREE_SIDE_EFFECTS (valist))
3146 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3147 #endif
3148
3149 return const0_rtx;
3150 }
3151
3152 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3153 builtin rather than just as an assignment in stdarg.h because of the
3154 nastiness of array-type va_list types. */
3155
3156 static rtx
3157 expand_builtin_va_copy (arglist)
3158 tree arglist;
3159 {
3160 tree dst, src, t;
3161
3162 dst = TREE_VALUE (arglist);
3163 src = TREE_VALUE (TREE_CHAIN (arglist));
3164
3165 dst = stabilize_va_list (dst, 1);
3166 src = stabilize_va_list (src, 0);
3167
3168 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3169 {
3170 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3171 TREE_SIDE_EFFECTS (t) = 1;
3172 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3173 }
3174 else
3175 {
3176 rtx dstb, srcb, size;
3177
3178 /* Evaluate to pointers. */
3179 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3180 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3181 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3182 VOIDmode, EXPAND_NORMAL);
3183
3184 #ifdef POINTERS_EXTEND_UNSIGNED
3185 if (GET_MODE (dstb) != Pmode)
3186 dstb = convert_memory_address (Pmode, dstb);
3187
3188 if (GET_MODE (srcb) != Pmode)
3189 srcb = convert_memory_address (Pmode, srcb);
3190 #endif
3191
3192 /* "Dereference" to BLKmode memories. */
3193 dstb = gen_rtx_MEM (BLKmode, dstb);
3194 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3195 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3196 srcb = gen_rtx_MEM (BLKmode, srcb);
3197 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3198 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3199
3200 /* Copy. */
3201 emit_block_move (dstb, srcb, size);
3202 }
3203
3204 return const0_rtx;
3205 }
3206
3207 /* Expand a call to one of the builtin functions __builtin_frame_address or
3208 __builtin_return_address. */
3209
3210 static rtx
3211 expand_builtin_frame_address (exp)
3212 tree exp;
3213 {
3214 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3215 tree arglist = TREE_OPERAND (exp, 1);
3216
3217 /* The argument must be a nonnegative integer constant.
3218 It counts the number of frames to scan up the stack.
3219 The value is the return address saved in that frame. */
3220 if (arglist == 0)
3221 /* Warning about missing arg was already issued. */
3222 return const0_rtx;
3223 else if (! host_integerp (TREE_VALUE (arglist), 1))
3224 {
3225 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3226 error ("invalid arg to `__builtin_frame_address'");
3227 else
3228 error ("invalid arg to `__builtin_return_address'");
3229 return const0_rtx;
3230 }
3231 else
3232 {
3233 rtx tem
3234 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3235 tree_low_cst (TREE_VALUE (arglist), 1),
3236 hard_frame_pointer_rtx);
3237
3238 /* Some ports cannot access arbitrary stack frames. */
3239 if (tem == NULL)
3240 {
3241 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3242 warning ("unsupported arg to `__builtin_frame_address'");
3243 else
3244 warning ("unsupported arg to `__builtin_return_address'");
3245 return const0_rtx;
3246 }
3247
3248 /* For __builtin_frame_address, return what we've got. */
3249 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3250 return tem;
3251
3252 if (GET_CODE (tem) != REG
3253 && ! CONSTANT_P (tem))
3254 tem = copy_to_mode_reg (Pmode, tem);
3255 return tem;
3256 }
3257 }
3258
3259 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3260 we failed and the caller should emit a normal call, otherwise try to get
3261 the result in TARGET, if convenient. */
3262
3263 static rtx
3264 expand_builtin_alloca (arglist, target)
3265 tree arglist;
3266 rtx target;
3267 {
3268 rtx op0;
3269 rtx result;
3270
3271 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3272 return 0;
3273
3274 /* Compute the argument. */
3275 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3276
3277 /* Allocate the desired space. */
3278 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3279
3280 #ifdef POINTERS_EXTEND_UNSIGNED
3281 if (GET_MODE (result) != ptr_mode)
3282 result = convert_memory_address (ptr_mode, result);
3283 #endif
3284
3285 return result;
3286 }
3287
3288 /* Expand a call to the ffs builtin. The arguments are in ARGLIST.
3289 Return 0 if a normal call should be emitted rather than expanding the
3290 function in-line. If convenient, the result should be placed in TARGET.
3291 SUBTARGET may be used as the target for computing one of EXP's operands. */
3292
3293 static rtx
3294 expand_builtin_ffs (arglist, target, subtarget)
3295 tree arglist;
3296 rtx target, subtarget;
3297 {
3298 rtx op0;
3299 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3300 return 0;
3301
3302 /* Compute the argument. */
3303 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3304 /* Compute ffs, into TARGET if possible.
3305 Set TARGET to wherever the result comes back. */
3306 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3307 ffs_optab, op0, target, 1);
3308 if (target == 0)
3309 abort ();
3310 return target;
3311 }
3312
3313 /* If the string passed to fputs is a constant and is one character
3314 long, we attempt to transform this call into __builtin_fputc(). */
3315
3316 static rtx
3317 expand_builtin_fputs (arglist, ignore, unlocked)
3318 tree arglist;
3319 int ignore;
3320 int unlocked;
3321 {
3322 tree len, fn;
3323 tree fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3324 : built_in_decls[BUILT_IN_FPUTC];
3325 tree fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3326 : built_in_decls[BUILT_IN_FWRITE];
3327
3328 /* If the return value is used, or the replacement _DECL isn't
3329 initialized, don't do the transformation. */
3330 if (!ignore || !fn_fputc || !fn_fwrite)
3331 return 0;
3332
3333 /* Verify the arguments in the original call. */
3334 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3335 return 0;
3336
3337 /* Get the length of the string passed to fputs. If the length
3338 can't be determined, punt. */
3339 if (!(len = c_strlen (TREE_VALUE (arglist)))
3340 || TREE_CODE (len) != INTEGER_CST)
3341 return 0;
3342
3343 switch (compare_tree_int (len, 1))
3344 {
3345 case -1: /* length is 0, delete the call entirely . */
3346 {
3347 /* Evaluate and ignore the argument in case it has
3348 side-effects. */
3349 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3350 VOIDmode, EXPAND_NORMAL);
3351 return const0_rtx;
3352 }
3353 case 0: /* length is 1, call fputc. */
3354 {
3355 const char *p = c_getstr (TREE_VALUE (arglist));
3356
3357 if (p != NULL)
3358 {
3359 /* New argument list transforming fputs(string, stream) to
3360 fputc(string[0], stream). */
3361 arglist =
3362 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3363 arglist =
3364 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3365 fn = fn_fputc;
3366 break;
3367 }
3368 }
3369 /* FALLTHROUGH */
3370 case 1: /* length is greater than 1, call fwrite. */
3371 {
3372 tree string_arg = TREE_VALUE (arglist);
3373
3374 /* New argument list transforming fputs(string, stream) to
3375 fwrite(string, 1, len, stream). */
3376 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3377 arglist = tree_cons (NULL_TREE, len, arglist);
3378 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3379 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3380 fn = fn_fwrite;
3381 break;
3382 }
3383 default:
3384 abort ();
3385 }
3386
3387 return expand_expr (build_function_call_expr (fn, arglist),
3388 (ignore ? const0_rtx : NULL_RTX),
3389 VOIDmode, EXPAND_NORMAL);
3390 }
3391
3392 /* Expand a call to __builtin_expect. We return our argument and emit a
3393 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3394 a non-jump context. */
3395
3396 static rtx
3397 expand_builtin_expect (arglist, target)
3398 tree arglist;
3399 rtx target;
3400 {
3401 tree exp, c;
3402 rtx note, rtx_c;
3403
3404 if (arglist == NULL_TREE
3405 || TREE_CHAIN (arglist) == NULL_TREE)
3406 return const0_rtx;
3407 exp = TREE_VALUE (arglist);
3408 c = TREE_VALUE (TREE_CHAIN (arglist));
3409
3410 if (TREE_CODE (c) != INTEGER_CST)
3411 {
3412 error ("second arg to `__builtin_expect' must be a constant");
3413 c = integer_zero_node;
3414 }
3415
3416 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3417
3418 /* Don't bother with expected value notes for integral constants. */
3419 if (GET_CODE (target) != CONST_INT)
3420 {
3421 /* We do need to force this into a register so that we can be
3422 moderately sure to be able to correctly interpret the branch
3423 condition later. */
3424 target = force_reg (GET_MODE (target), target);
3425
3426 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3427
3428 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3429 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3430 }
3431
3432 return target;
3433 }
3434
3435 /* Like expand_builtin_expect, except do this in a jump context. This is
3436 called from do_jump if the conditional is a __builtin_expect. Return either
3437 a SEQUENCE of insns to emit the jump or NULL if we cannot optimize
3438 __builtin_expect. We need to optimize this at jump time so that machines
3439 like the PowerPC don't turn the test into a SCC operation, and then jump
3440 based on the test being 0/1. */
3441
3442 rtx
3443 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3444 tree exp;
3445 rtx if_false_label;
3446 rtx if_true_label;
3447 {
3448 tree arglist = TREE_OPERAND (exp, 1);
3449 tree arg0 = TREE_VALUE (arglist);
3450 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3451 rtx ret = NULL_RTX;
3452
3453 /* Only handle __builtin_expect (test, 0) and
3454 __builtin_expect (test, 1). */
3455 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3456 && (integer_zerop (arg1) || integer_onep (arg1)))
3457 {
3458 int j;
3459 int num_jumps = 0;
3460
3461 /* If we fail to locate an appropriate conditional jump, we'll
3462 fall back to normal evaluation. Ensure that the expression
3463 can be re-evaluated. */
3464 switch (unsafe_for_reeval (arg0))
3465 {
3466 case 0: /* Safe. */
3467 break;
3468
3469 case 1: /* Mildly unsafe. */
3470 arg0 = unsave_expr (arg0);
3471 break;
3472
3473 case 2: /* Wildly unsafe. */
3474 return NULL_RTX;
3475 }
3476
3477 /* Expand the jump insns. */
3478 start_sequence ();
3479 do_jump (arg0, if_false_label, if_true_label);
3480 ret = gen_sequence ();
3481 end_sequence ();
3482
3483 /* Now that the __builtin_expect has been validated, go through and add
3484 the expect's to each of the conditional jumps. If we run into an
3485 error, just give up and generate the 'safe' code of doing a SCC
3486 operation and then doing a branch on that. */
3487 for (j = 0; j < XVECLEN (ret, 0); j++)
3488 {
3489 rtx insn = XVECEXP (ret, 0, j);
3490 rtx pattern;
3491
3492 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3493 && (pattern = pc_set (insn)) != NULL_RTX)
3494 {
3495 rtx ifelse = SET_SRC (pattern);
3496 rtx label;
3497 int taken;
3498
3499 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3500 continue;
3501
3502 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3503 {
3504 taken = 1;
3505 label = XEXP (XEXP (ifelse, 1), 0);
3506 }
3507 /* An inverted jump reverses the probabilities. */
3508 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3509 {
3510 taken = 0;
3511 label = XEXP (XEXP (ifelse, 2), 0);
3512 }
3513 /* We shouldn't have to worry about conditional returns during
3514 the expansion stage, but handle it gracefully anyway. */
3515 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3516 {
3517 taken = 1;
3518 label = NULL_RTX;
3519 }
3520 /* An inverted return reverses the probabilities. */
3521 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3522 {
3523 taken = 0;
3524 label = NULL_RTX;
3525 }
3526 else
3527 continue;
3528
3529 /* If the test is expected to fail, reverse the
3530 probabilities. */
3531 if (integer_zerop (arg1))
3532 taken = 1 - taken;
3533
3534 /* If we are jumping to the false label, reverse the
3535 probabilities. */
3536 if (label == NULL_RTX)
3537 ; /* conditional return */
3538 else if (label == if_false_label)
3539 taken = 1 - taken;
3540 else if (label != if_true_label)
3541 continue;
3542
3543 num_jumps++;
3544 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3545 }
3546 }
3547
3548 /* If no jumps were modified, fail and do __builtin_expect the normal
3549 way. */
3550 if (num_jumps == 0)
3551 ret = NULL_RTX;
3552 }
3553
3554 return ret;
3555 }
3556 \f
3557 /* Expand an expression EXP that calls a built-in function,
3558 with result going to TARGET if that's convenient
3559 (and in mode MODE if that's convenient).
3560 SUBTARGET may be used as the target for computing one of EXP's operands.
3561 IGNORE is nonzero if the value is to be ignored. */
3562
3563 rtx
3564 expand_builtin (exp, target, subtarget, mode, ignore)
3565 tree exp;
3566 rtx target;
3567 rtx subtarget;
3568 enum machine_mode mode;
3569 int ignore;
3570 {
3571 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3572 tree arglist = TREE_OPERAND (exp, 1);
3573 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3574
3575 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
3576 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
3577
3578 /* When not optimizing, generate calls to library functions for a certain
3579 set of builtins. */
3580 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
3581 switch (fcode)
3582 {
3583 case BUILT_IN_SIN:
3584 case BUILT_IN_COS:
3585 case BUILT_IN_SQRT:
3586 case BUILT_IN_SQRTF:
3587 case BUILT_IN_SQRTL:
3588 case BUILT_IN_MEMSET:
3589 case BUILT_IN_MEMCPY:
3590 case BUILT_IN_MEMCMP:
3591 case BUILT_IN_BCMP:
3592 case BUILT_IN_BZERO:
3593 case BUILT_IN_INDEX:
3594 case BUILT_IN_RINDEX:
3595 case BUILT_IN_STRCHR:
3596 case BUILT_IN_STRRCHR:
3597 case BUILT_IN_STRLEN:
3598 case BUILT_IN_STRCPY:
3599 case BUILT_IN_STRNCPY:
3600 case BUILT_IN_STRNCMP:
3601 case BUILT_IN_STRSTR:
3602 case BUILT_IN_STRPBRK:
3603 case BUILT_IN_STRCAT:
3604 case BUILT_IN_STRNCAT:
3605 case BUILT_IN_STRSPN:
3606 case BUILT_IN_STRCSPN:
3607 case BUILT_IN_STRCMP:
3608 case BUILT_IN_FFS:
3609 case BUILT_IN_PUTCHAR:
3610 case BUILT_IN_PUTS:
3611 case BUILT_IN_PRINTF:
3612 case BUILT_IN_FPUTC:
3613 case BUILT_IN_FPUTS:
3614 case BUILT_IN_FWRITE:
3615 case BUILT_IN_PUTCHAR_UNLOCKED:
3616 case BUILT_IN_PUTS_UNLOCKED:
3617 case BUILT_IN_PRINTF_UNLOCKED:
3618 case BUILT_IN_FPUTC_UNLOCKED:
3619 case BUILT_IN_FPUTS_UNLOCKED:
3620 case BUILT_IN_FWRITE_UNLOCKED:
3621 return expand_call (exp, target, ignore);
3622
3623 default:
3624 break;
3625 }
3626
3627 switch (fcode)
3628 {
3629 case BUILT_IN_ABS:
3630 case BUILT_IN_LABS:
3631 case BUILT_IN_LLABS:
3632 case BUILT_IN_IMAXABS:
3633 case BUILT_IN_FABS:
3634 case BUILT_IN_FABSF:
3635 case BUILT_IN_FABSL:
3636 /* build_function_call changes these into ABS_EXPR. */
3637 abort ();
3638
3639 case BUILT_IN_CONJ:
3640 case BUILT_IN_CONJF:
3641 case BUILT_IN_CONJL:
3642 case BUILT_IN_CREAL:
3643 case BUILT_IN_CREALF:
3644 case BUILT_IN_CREALL:
3645 case BUILT_IN_CIMAG:
3646 case BUILT_IN_CIMAGF:
3647 case BUILT_IN_CIMAGL:
3648 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
3649 and IMAGPART_EXPR. */
3650 abort ();
3651
3652 case BUILT_IN_SIN:
3653 case BUILT_IN_SINF:
3654 case BUILT_IN_SINL:
3655 case BUILT_IN_COS:
3656 case BUILT_IN_COSF:
3657 case BUILT_IN_COSL:
3658 /* Treat these like sqrt only if unsafe math optimizations are allowed,
3659 because of possible accuracy problems. */
3660 if (! flag_unsafe_math_optimizations)
3661 break;
3662 case BUILT_IN_SQRT:
3663 case BUILT_IN_SQRTF:
3664 case BUILT_IN_SQRTL:
3665 target = expand_builtin_mathfn (exp, target, subtarget);
3666 if (target)
3667 return target;
3668 break;
3669
3670 case BUILT_IN_FMOD:
3671 break;
3672
3673 case BUILT_IN_APPLY_ARGS:
3674 return expand_builtin_apply_args ();
3675
3676 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
3677 FUNCTION with a copy of the parameters described by
3678 ARGUMENTS, and ARGSIZE. It returns a block of memory
3679 allocated on the stack into which is stored all the registers
3680 that might possibly be used for returning the result of a
3681 function. ARGUMENTS is the value returned by
3682 __builtin_apply_args. ARGSIZE is the number of bytes of
3683 arguments that must be copied. ??? How should this value be
3684 computed? We'll also need a safe worst case value for varargs
3685 functions. */
3686 case BUILT_IN_APPLY:
3687 if (!validate_arglist (arglist, POINTER_TYPE,
3688 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3689 && !validate_arglist (arglist, REFERENCE_TYPE,
3690 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3691 return const0_rtx;
3692 else
3693 {
3694 int i;
3695 tree t;
3696 rtx ops[3];
3697
3698 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
3699 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
3700
3701 return expand_builtin_apply (ops[0], ops[1], ops[2]);
3702 }
3703
3704 /* __builtin_return (RESULT) causes the function to return the
3705 value described by RESULT. RESULT is address of the block of
3706 memory returned by __builtin_apply. */
3707 case BUILT_IN_RETURN:
3708 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
3709 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
3710 NULL_RTX, VOIDmode, 0));
3711 return const0_rtx;
3712
3713 case BUILT_IN_SAVEREGS:
3714 return expand_builtin_saveregs ();
3715
3716 case BUILT_IN_ARGS_INFO:
3717 return expand_builtin_args_info (exp);
3718
3719 /* Return the address of the first anonymous stack arg. */
3720 case BUILT_IN_NEXT_ARG:
3721 return expand_builtin_next_arg (arglist);
3722
3723 case BUILT_IN_CLASSIFY_TYPE:
3724 return expand_builtin_classify_type (arglist);
3725
3726 case BUILT_IN_CONSTANT_P:
3727 return expand_builtin_constant_p (exp);
3728
3729 case BUILT_IN_FRAME_ADDRESS:
3730 case BUILT_IN_RETURN_ADDRESS:
3731 return expand_builtin_frame_address (exp);
3732
3733 /* Returns the address of the area where the structure is returned.
3734 0 otherwise. */
3735 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3736 if (arglist != 0
3737 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3738 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
3739 return const0_rtx;
3740 else
3741 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3742
3743 case BUILT_IN_ALLOCA:
3744 target = expand_builtin_alloca (arglist, target);
3745 if (target)
3746 return target;
3747 break;
3748
3749 case BUILT_IN_FFS:
3750 target = expand_builtin_ffs (arglist, target, subtarget);
3751 if (target)
3752 return target;
3753 break;
3754
3755 case BUILT_IN_STRLEN:
3756 target = expand_builtin_strlen (exp, target);
3757 if (target)
3758 return target;
3759 break;
3760
3761 case BUILT_IN_STRCPY:
3762 target = expand_builtin_strcpy (exp, target, mode);
3763 if (target)
3764 return target;
3765 break;
3766
3767 case BUILT_IN_STRNCPY:
3768 target = expand_builtin_strncpy (arglist, target, mode);
3769 if (target)
3770 return target;
3771 break;
3772
3773 case BUILT_IN_STRCAT:
3774 target = expand_builtin_strcat (arglist, target, mode);
3775 if (target)
3776 return target;
3777 break;
3778
3779 case BUILT_IN_STRNCAT:
3780 target = expand_builtin_strncat (arglist, target, mode);
3781 if (target)
3782 return target;
3783 break;
3784
3785 case BUILT_IN_STRSPN:
3786 target = expand_builtin_strspn (arglist, target, mode);
3787 if (target)
3788 return target;
3789 break;
3790
3791 case BUILT_IN_STRCSPN:
3792 target = expand_builtin_strcspn (arglist, target, mode);
3793 if (target)
3794 return target;
3795 break;
3796
3797 case BUILT_IN_STRSTR:
3798 target = expand_builtin_strstr (arglist, target, mode);
3799 if (target)
3800 return target;
3801 break;
3802
3803 case BUILT_IN_STRPBRK:
3804 target = expand_builtin_strpbrk (arglist, target, mode);
3805 if (target)
3806 return target;
3807 break;
3808
3809 case BUILT_IN_INDEX:
3810 case BUILT_IN_STRCHR:
3811 target = expand_builtin_strchr (arglist, target, mode);
3812 if (target)
3813 return target;
3814 break;
3815
3816 case BUILT_IN_RINDEX:
3817 case BUILT_IN_STRRCHR:
3818 target = expand_builtin_strrchr (arglist, target, mode);
3819 if (target)
3820 return target;
3821 break;
3822
3823 case BUILT_IN_MEMCPY:
3824 target = expand_builtin_memcpy (arglist, target, mode);
3825 if (target)
3826 return target;
3827 break;
3828
3829 case BUILT_IN_MEMSET:
3830 target = expand_builtin_memset (exp, target, mode);
3831 if (target)
3832 return target;
3833 break;
3834
3835 case BUILT_IN_BZERO:
3836 target = expand_builtin_bzero (exp);
3837 if (target)
3838 return target;
3839 break;
3840
3841 case BUILT_IN_STRCMP:
3842 target = expand_builtin_strcmp (exp, target, mode);
3843 if (target)
3844 return target;
3845 break;
3846
3847 case BUILT_IN_STRNCMP:
3848 target = expand_builtin_strncmp (exp, target, mode);
3849 if (target)
3850 return target;
3851 break;
3852
3853 case BUILT_IN_BCMP:
3854 case BUILT_IN_MEMCMP:
3855 target = expand_builtin_memcmp (exp, arglist, target, mode);
3856 if (target)
3857 return target;
3858 break;
3859
3860 case BUILT_IN_SETJMP:
3861 target = expand_builtin_setjmp (arglist, target);
3862 if (target)
3863 return target;
3864 break;
3865
3866 /* __builtin_longjmp is passed a pointer to an array of five words.
3867 It's similar to the C library longjmp function but works with
3868 __builtin_setjmp above. */
3869 case BUILT_IN_LONGJMP:
3870 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3871 break;
3872 else
3873 {
3874 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
3875 VOIDmode, 0);
3876 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3877 NULL_RTX, VOIDmode, 0);
3878
3879 if (value != const1_rtx)
3880 {
3881 error ("__builtin_longjmp second argument must be 1");
3882 return const0_rtx;
3883 }
3884
3885 expand_builtin_longjmp (buf_addr, value);
3886 return const0_rtx;
3887 }
3888
3889 case BUILT_IN_TRAP:
3890 #ifdef HAVE_trap
3891 if (HAVE_trap)
3892 emit_insn (gen_trap ());
3893 else
3894 #endif
3895 error ("__builtin_trap not supported by this target");
3896 emit_barrier ();
3897 return const0_rtx;
3898
3899 case BUILT_IN_PUTCHAR:
3900 case BUILT_IN_PUTS:
3901 case BUILT_IN_FPUTC:
3902 case BUILT_IN_FWRITE:
3903 case BUILT_IN_PUTCHAR_UNLOCKED:
3904 case BUILT_IN_PUTS_UNLOCKED:
3905 case BUILT_IN_FPUTC_UNLOCKED:
3906 case BUILT_IN_FWRITE_UNLOCKED:
3907 break;
3908 case BUILT_IN_FPUTS:
3909 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
3910 if (target)
3911 return target;
3912 break;
3913 case BUILT_IN_FPUTS_UNLOCKED:
3914 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
3915 if (target)
3916 return target;
3917 break;
3918
3919 /* Various hooks for the DWARF 2 __throw routine. */
3920 case BUILT_IN_UNWIND_INIT:
3921 expand_builtin_unwind_init ();
3922 return const0_rtx;
3923 case BUILT_IN_DWARF_CFA:
3924 return virtual_cfa_rtx;
3925 #ifdef DWARF2_UNWIND_INFO
3926 case BUILT_IN_DWARF_FP_REGNUM:
3927 return expand_builtin_dwarf_fp_regnum ();
3928 case BUILT_IN_INIT_DWARF_REG_SIZES:
3929 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
3930 return const0_rtx;
3931 #endif
3932 case BUILT_IN_FROB_RETURN_ADDR:
3933 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
3934 case BUILT_IN_EXTRACT_RETURN_ADDR:
3935 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
3936 case BUILT_IN_EH_RETURN:
3937 expand_builtin_eh_return (TREE_VALUE (arglist),
3938 TREE_VALUE (TREE_CHAIN (arglist)));
3939 return const0_rtx;
3940 #ifdef EH_RETURN_DATA_REGNO
3941 case BUILT_IN_EH_RETURN_DATA_REGNO:
3942 return expand_builtin_eh_return_data_regno (arglist);
3943 #endif
3944 case BUILT_IN_VARARGS_START:
3945 return expand_builtin_va_start (0, arglist);
3946 case BUILT_IN_STDARG_START:
3947 return expand_builtin_va_start (1, arglist);
3948 case BUILT_IN_VA_END:
3949 return expand_builtin_va_end (arglist);
3950 case BUILT_IN_VA_COPY:
3951 return expand_builtin_va_copy (arglist);
3952 case BUILT_IN_EXPECT:
3953 return expand_builtin_expect (arglist, target);
3954 case BUILT_IN_PREFETCH:
3955 expand_builtin_prefetch (arglist);
3956 return const0_rtx;
3957
3958
3959 default: /* just do library call, if unknown builtin */
3960 error ("built-in function `%s' not currently supported",
3961 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
3962 }
3963
3964 /* The switch statement above can drop through to cause the function
3965 to be called normally. */
3966 return expand_call (exp, target, ignore);
3967 }
3968
3969 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
3970 constant. ARGLIST is the argument list of the call. */
3971
3972 static tree
3973 fold_builtin_constant_p (arglist)
3974 tree arglist;
3975 {
3976 if (arglist == 0)
3977 return 0;
3978
3979 arglist = TREE_VALUE (arglist);
3980
3981 /* We return 1 for a numeric type that's known to be a constant
3982 value at compile-time or for an aggregate type that's a
3983 literal constant. */
3984 STRIP_NOPS (arglist);
3985
3986 /* If we know this is a constant, emit the constant of one. */
3987 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
3988 || (TREE_CODE (arglist) == CONSTRUCTOR
3989 && TREE_CONSTANT (arglist))
3990 || (TREE_CODE (arglist) == ADDR_EXPR
3991 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
3992 return integer_one_node;
3993
3994 /* If we aren't going to be running CSE or this expression
3995 has side effects, show we don't know it to be a constant.
3996 Likewise if it's a pointer or aggregate type since in those
3997 case we only want literals, since those are only optimized
3998 when generating RTL, not later.
3999 And finally, if we are compiling an initializer, not code, we
4000 need to return a definite result now; there's not going to be any
4001 more optimization done. */
4002 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4003 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4004 || POINTER_TYPE_P (TREE_TYPE (arglist))
4005 || cfun == 0)
4006 return integer_zero_node;
4007
4008 return 0;
4009 }
4010
4011 /* Fold a call to __builtin_classify_type. */
4012
4013 static tree
4014 fold_builtin_classify_type (arglist)
4015 tree arglist;
4016 {
4017 if (arglist == 0)
4018 return build_int_2 (no_type_class, 0);
4019
4020 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4021 }
4022
4023 /* Used by constant folding to eliminate some builtin calls early. EXP is
4024 the CALL_EXPR of a call to a builtin function. */
4025
4026 tree
4027 fold_builtin (exp)
4028 tree exp;
4029 {
4030 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4031 tree arglist = TREE_OPERAND (exp, 1);
4032 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4033
4034 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4035 return 0;
4036
4037 switch (fcode)
4038 {
4039 case BUILT_IN_CONSTANT_P:
4040 return fold_builtin_constant_p (arglist);
4041
4042 case BUILT_IN_CLASSIFY_TYPE:
4043 return fold_builtin_classify_type (arglist);
4044
4045 case BUILT_IN_STRLEN:
4046 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4047 {
4048 tree len = c_strlen (TREE_VALUE (arglist));
4049 if (len != 0)
4050 return len;
4051 }
4052 break;
4053
4054 default:
4055 break;
4056 }
4057
4058 return 0;
4059 }
4060
4061 static tree
4062 build_function_call_expr (fn, arglist)
4063 tree fn, arglist;
4064 {
4065 tree call_expr;
4066
4067 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4068 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4069 call_expr, arglist);
4070 TREE_SIDE_EFFECTS (call_expr) = 1;
4071 return fold (call_expr);
4072 }
4073
4074 /* This function validates the types of a function call argument list
4075 represented as a tree chain of parameters against a specified list
4076 of tree_codes. If the last specifier is a 0, that represents an
4077 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4078
4079 static int
4080 validate_arglist VPARAMS ((tree arglist, ...))
4081 {
4082 enum tree_code code;
4083 int res = 0;
4084
4085 VA_OPEN (ap, arglist);
4086 VA_FIXEDARG (ap, tree, arglist);
4087
4088 do {
4089 code = va_arg (ap, enum tree_code);
4090 switch (code)
4091 {
4092 case 0:
4093 /* This signifies an ellipses, any further arguments are all ok. */
4094 res = 1;
4095 goto end;
4096 case VOID_TYPE:
4097 /* This signifies an endlink, if no arguments remain, return
4098 true, otherwise return false. */
4099 res = arglist == 0;
4100 goto end;
4101 default:
4102 /* If no parameters remain or the parameter's code does not
4103 match the specified code, return false. Otherwise continue
4104 checking any remaining arguments. */
4105 if (arglist == 0 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4106 goto end;
4107 break;
4108 }
4109 arglist = TREE_CHAIN (arglist);
4110 } while (1);
4111
4112 /* We need gotos here since we can only have one VA_CLOSE in a
4113 function. */
4114 end: ;
4115 VA_CLOSE (ap);
4116
4117 return res;
4118 }
4119
4120 /* Default version of target-specific builtin setup that does nothing. */
4121
4122 void
4123 default_init_builtins ()
4124 {
4125 }
4126
4127 /* Default target-specific builtin expander that does nothing. */
4128
4129 rtx
4130 default_expand_builtin (exp, target, subtarget, mode, ignore)
4131 tree exp ATTRIBUTE_UNUSED;
4132 rtx target ATTRIBUTE_UNUSED;
4133 rtx subtarget ATTRIBUTE_UNUSED;
4134 enum machine_mode mode ATTRIBUTE_UNUSED;
4135 int ignore ATTRIBUTE_UNUSED;
4136 {
4137 return NULL_RTX;
4138 }
This page took 0.232466 seconds and 5 git commands to generate.