1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
36 #include "hard-reg-set.h"
39 #include "langhooks.h"
40 #include "gimple-pretty-print.h"
44 #include "dominance.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
52 #include "gimple-iterator.h"
53 #include "gimple-walk.h"
54 #include "gimple-ssa.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
60 #include "tree-into-ssa.h"
63 #include "tree-pass.h"
64 #include "tree-stdarg.h"
66 /* A simple pass that attempts to optimize stdarg functions on architectures
67 that need to save register arguments to stack on entry to stdarg functions.
68 If the function doesn't use any va_start macros, no registers need to
69 be saved. If va_start macros are used, the va_list variables don't escape
70 the function, it is only necessary to save registers that will be used
71 in va_arg macros. E.g. if va_arg is only used with integral types
72 in the function, floating point registers don't need to be saved, etc. */
75 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
76 is executed at most as many times as VA_START_BB. */
79 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
81 vec
<edge
> stack
= vNULL
;
87 if (va_arg_bb
== va_start_bb
)
90 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
93 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
94 bitmap_clear (visited
);
97 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
100 while (! stack
.is_empty ())
107 if (e
->flags
& EDGE_COMPLEX
)
113 if (src
== va_start_bb
)
116 /* va_arg_bb can be executed more times than va_start_bb. */
117 if (src
== va_arg_bb
)
123 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
125 if (! bitmap_bit_p (visited
, src
->index
))
127 bitmap_set_bit (visited
, src
->index
);
128 FOR_EACH_EDGE (e
, ei
, src
->preds
)
134 sbitmap_free (visited
);
139 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
140 return constant, otherwise return HOST_WIDE_INT_M1U.
141 GPR_P is true if this is GPR counter. */
143 static unsigned HOST_WIDE_INT
144 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
149 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
150 unsigned int max_size
;
152 if (si
->offsets
== NULL
)
156 si
->offsets
= XNEWVEC (int, num_ssa_names
);
157 for (i
= 0; i
< num_ssa_names
; ++i
)
161 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
162 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
163 orig_lhs
= lhs
= rhs
;
166 enum tree_code rhs_code
;
169 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
171 if (counter_val
>= max_size
)
177 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
181 stmt
= SSA_NAME_DEF_STMT (lhs
);
183 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
184 return HOST_WIDE_INT_M1U
;
186 rhs_code
= gimple_assign_rhs_code (stmt
);
187 rhs1
= gimple_assign_rhs1 (stmt
);
188 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
189 || gimple_assign_cast_p (stmt
))
190 && TREE_CODE (rhs1
) == SSA_NAME
)
196 if ((rhs_code
== POINTER_PLUS_EXPR
197 || rhs_code
== PLUS_EXPR
)
198 && TREE_CODE (rhs1
) == SSA_NAME
199 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
201 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
206 if (rhs_code
== ADDR_EXPR
207 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
208 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
209 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
211 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
212 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
216 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
217 return HOST_WIDE_INT_M1U
;
219 rhs
= gimple_assign_rhs1 (stmt
);
220 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
221 return HOST_WIDE_INT_M1U
;
223 if (TREE_CODE (counter
) == COMPONENT_REF
)
225 if (get_base_address (counter
) != get_base_address (rhs
)
226 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
227 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
228 return HOST_WIDE_INT_M1U
;
230 else if (counter
!= rhs
)
231 return HOST_WIDE_INT_M1U
;
237 val
= ret
+ counter_val
;
240 enum tree_code rhs_code
;
243 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
247 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
249 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
251 stmt
= SSA_NAME_DEF_STMT (lhs
);
253 rhs_code
= gimple_assign_rhs_code (stmt
);
254 rhs1
= gimple_assign_rhs1 (stmt
);
255 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
256 || gimple_assign_cast_p (stmt
))
257 && TREE_CODE (rhs1
) == SSA_NAME
)
263 if ((rhs_code
== POINTER_PLUS_EXPR
264 || rhs_code
== PLUS_EXPR
)
265 && TREE_CODE (rhs1
) == SSA_NAME
266 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
268 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
273 if (rhs_code
== ADDR_EXPR
274 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
275 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
276 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
278 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
279 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
290 /* Called by walk_tree to look for references to va_list variables. */
293 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
296 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
299 if (TREE_CODE (var
) == SSA_NAME
)
301 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
304 else if (TREE_CODE (var
) == VAR_DECL
)
306 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
314 /* Helper function of va_list_counter_struct_op. Compute
315 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
316 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
317 statement. GPR_P is true if AP is a GPR counter, false if it is
321 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
324 unsigned HOST_WIDE_INT increment
;
326 if (si
->compute_sizes
< 0)
328 si
->compute_sizes
= 0;
329 if (si
->va_start_count
== 1
330 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
331 si
->compute_sizes
= 1;
333 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
335 "bb%d will %sbe executed at most once for each va_start "
336 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
337 si
->va_start_bb
->index
);
342 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
344 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
346 cfun
->va_list_gpr_size
+= increment
;
350 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
352 cfun
->va_list_fpr_size
+= increment
;
357 if (write_p
|| !si
->compute_sizes
)
360 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
362 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
367 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
368 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
369 is false, AP has been seen in VAR = AP assignment.
370 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
371 va_arg operation that doesn't cause the va_list variable to escape
375 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
380 if (TREE_CODE (ap
) != COMPONENT_REF
381 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
384 if (TREE_CODE (var
) != SSA_NAME
385 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
388 base
= get_base_address (ap
);
389 if (TREE_CODE (base
) != VAR_DECL
390 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
393 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
394 va_list_counter_op (si
, ap
, var
, true, write_p
);
395 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
396 va_list_counter_op (si
, ap
, var
, false, write_p
);
402 /* Check for TEM = AP. Return true if found and the caller shouldn't
403 search for va_list references in the statement. */
406 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
408 if (TREE_CODE (ap
) != VAR_DECL
409 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
412 if (TREE_CODE (tem
) != SSA_NAME
413 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
416 if (si
->compute_sizes
< 0)
418 si
->compute_sizes
= 0;
419 if (si
->va_start_count
== 1
420 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
421 si
->compute_sizes
= 1;
423 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
425 "bb%d will %sbe executed at most once for each va_start "
426 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
427 si
->va_start_bb
->index
);
430 /* For void * or char * va_list types, there is just one counter.
431 If va_arg is used in a loop, we don't know how many registers need
433 if (! si
->compute_sizes
)
436 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
439 /* Note the temporary, as we need to track whether it doesn't escape
440 the current function. */
441 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
451 sequence and update cfun->va_list_gpr_size. Return true if found. */
454 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
456 unsigned HOST_WIDE_INT increment
;
458 if (TREE_CODE (ap
) != VAR_DECL
459 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
462 if (TREE_CODE (tem2
) != SSA_NAME
463 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
466 if (si
->compute_sizes
<= 0)
469 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
470 if (increment
+ 1 <= 1)
473 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
474 cfun
->va_list_gpr_size
+= increment
;
476 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
482 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
483 containing value of some va_list variable plus optionally some constant,
484 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
485 depending whether LHS is a function local temporary. */
488 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
490 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
493 if (TREE_CODE (rhs
) == SSA_NAME
)
495 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
498 else if (TREE_CODE (rhs
) == ADDR_EXPR
499 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
500 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
502 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
503 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
509 if (TREE_CODE (lhs
) != SSA_NAME
)
511 si
->va_list_escapes
= true;
515 if (si
->compute_sizes
< 0)
517 si
->compute_sizes
= 0;
518 if (si
->va_start_count
== 1
519 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
520 si
->compute_sizes
= 1;
522 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
524 "bb%d will %sbe executed at most once for each va_start "
525 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
526 si
->va_start_bb
->index
);
529 /* For void * or char * va_list types, there is just one counter.
530 If va_arg is used in a loop, we don't know how many registers need
532 if (! si
->compute_sizes
)
534 si
->va_list_escapes
= true;
538 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
539 == HOST_WIDE_INT_M1U
)
541 si
->va_list_escapes
= true;
545 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
549 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
550 Return true if va_list might be escaping. */
553 check_all_va_list_escapes (struct stdarg_info
*si
)
557 FOR_EACH_BB_FN (bb
, cfun
)
559 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
565 gphi
*phi
= i
.phi ();
567 lhs
= PHI_RESULT (phi
);
568 if (virtual_operand_p (lhs
)
569 || bitmap_bit_p (si
->va_list_escape_vars
,
570 SSA_NAME_VERSION (lhs
)))
573 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
575 tree rhs
= USE_FROM_PTR (uop
);
576 if (TREE_CODE (rhs
) == SSA_NAME
577 && bitmap_bit_p (si
->va_list_escape_vars
,
578 SSA_NAME_VERSION (rhs
)))
580 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
582 fputs ("va_list escapes in ", dump_file
);
583 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
584 fputc ('\n', dump_file
);
591 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
594 gimple stmt
= gsi_stmt (i
);
598 if (is_gimple_debug (stmt
))
601 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
603 if (! bitmap_bit_p (si
->va_list_escape_vars
,
604 SSA_NAME_VERSION (use
)))
607 if (is_gimple_assign (stmt
))
609 tree rhs
= gimple_assign_rhs1 (stmt
);
610 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
613 if (rhs_code
== MEM_REF
614 && TREE_OPERAND (rhs
, 0) == use
615 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
616 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
617 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
619 unsigned HOST_WIDE_INT gpr_size
;
620 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
622 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
623 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
624 + tree_to_uhwi (access_size
);
625 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
626 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
627 else if (gpr_size
> cfun
->va_list_gpr_size
)
628 cfun
->va_list_gpr_size
= gpr_size
;
632 /* va_arg sequences may contain
633 other_ap_temp = ap_temp;
634 other_ap_temp = ap_temp + constant;
635 other_ap_temp = (some_type *) ap_temp;
639 && ((rhs_code
== POINTER_PLUS_EXPR
640 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
642 || gimple_assign_cast_p (stmt
)
643 || (get_gimple_rhs_class (rhs_code
)
644 == GIMPLE_SINGLE_RHS
)))
646 tree lhs
= gimple_assign_lhs (stmt
);
648 if (TREE_CODE (lhs
) == SSA_NAME
649 && bitmap_bit_p (si
->va_list_escape_vars
,
650 SSA_NAME_VERSION (lhs
)))
653 if (TREE_CODE (lhs
) == VAR_DECL
654 && bitmap_bit_p (si
->va_list_vars
,
655 DECL_UID (lhs
) + num_ssa_names
))
658 else if (rhs_code
== ADDR_EXPR
659 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
660 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
662 tree lhs
= gimple_assign_lhs (stmt
);
664 if (bitmap_bit_p (si
->va_list_escape_vars
,
665 SSA_NAME_VERSION (lhs
)))
670 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
672 fputs ("va_list escapes in ", dump_file
);
673 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
674 fputc ('\n', dump_file
);
684 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
687 optimize_va_list_gpr_fpr_size (function
*fun
)
690 bool va_list_escapes
= false;
691 bool va_list_simple_ptr
;
692 struct stdarg_info si
;
693 struct walk_stmt_info wi
;
694 const char *funcname
= NULL
;
697 fun
->va_list_gpr_size
= 0;
698 fun
->va_list_fpr_size
= 0;
699 memset (&si
, 0, sizeof (si
));
700 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
701 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
704 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
706 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
707 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
708 && (TREE_TYPE (cfun_va_list
) == void_type_node
709 || TREE_TYPE (cfun_va_list
) == char_type_node
);
710 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
712 FOR_EACH_BB_FN (bb
, fun
)
714 gimple_stmt_iterator i
;
716 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
718 gimple stmt
= gsi_stmt (i
);
721 if (!is_gimple_call (stmt
))
724 callee
= gimple_call_fndecl (stmt
);
726 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
729 switch (DECL_FUNCTION_CODE (callee
))
731 case BUILT_IN_VA_START
:
733 /* If old style builtins are used, don't optimize anything. */
734 case BUILT_IN_SAVEREGS
:
735 case BUILT_IN_NEXT_ARG
:
736 va_list_escapes
= true;
743 ap
= gimple_call_arg (stmt
, 0);
745 if (TREE_CODE (ap
) != ADDR_EXPR
)
747 va_list_escapes
= true;
750 ap
= TREE_OPERAND (ap
, 0);
751 if (TREE_CODE (ap
) == ARRAY_REF
)
753 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
755 va_list_escapes
= true;
758 ap
= TREE_OPERAND (ap
, 0);
760 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
761 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
762 || TREE_CODE (ap
) != VAR_DECL
)
764 va_list_escapes
= true;
768 if (is_global_var (ap
))
770 va_list_escapes
= true;
774 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
776 /* VA_START_BB and VA_START_AP will be only used if there is just
777 one va_start in the function. */
786 /* If there were no va_start uses in the function, there is no need to
788 if (si
.va_start_count
== 0)
791 /* If some va_list arguments weren't local, we can't optimize. */
795 /* For void * or char * va_list, something useful can be done only
796 if there is just one va_start. */
797 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
799 va_list_escapes
= true;
803 /* For struct * va_list, if the backend didn't tell us what the counter fields
804 are, there is nothing more we can do. */
805 if (!va_list_simple_ptr
806 && va_list_gpr_counter_field
== NULL_TREE
807 && va_list_fpr_counter_field
== NULL_TREE
)
809 va_list_escapes
= true;
813 /* For void * or char * va_list there is just one counter
814 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
815 if (va_list_simple_ptr
)
816 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
818 calculate_dominance_info (CDI_DOMINATORS
);
819 memset (&wi
, 0, sizeof (wi
));
820 wi
.info
= si
.va_list_vars
;
822 FOR_EACH_BB_FN (bb
, fun
)
824 si
.compute_sizes
= -1;
827 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
828 them as assignments for the purpose of escape analysis. This is
829 not needed for non-simple va_list because virtual phis don't perform
830 any real data movement. Also, check PHI nodes for taking address of
836 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
839 gphi
*phi
= i
.phi ();
840 lhs
= PHI_RESULT (phi
);
842 if (virtual_operand_p (lhs
))
845 if (va_list_simple_ptr
)
847 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
849 rhs
= USE_FROM_PTR (uop
);
850 if (va_list_ptr_read (&si
, rhs
, lhs
))
852 else if (va_list_ptr_write (&si
, lhs
, rhs
))
855 check_va_list_escapes (&si
, lhs
, rhs
);
857 if (si
.va_list_escapes
)
859 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
861 fputs ("va_list escapes in ", dump_file
);
862 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
863 fputc ('\n', dump_file
);
865 va_list_escapes
= true;
870 for (unsigned j
= 0; !va_list_escapes
871 && j
< gimple_phi_num_args (phi
); ++j
)
872 if ((!va_list_simple_ptr
873 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
874 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
875 find_va_list_reference
, &wi
, NULL
))
877 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
879 fputs ("va_list escapes in ", dump_file
);
880 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
881 fputc ('\n', dump_file
);
883 va_list_escapes
= true;
887 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
888 !gsi_end_p (i
) && !va_list_escapes
;
891 gimple stmt
= gsi_stmt (i
);
893 /* Don't look at __builtin_va_{start,end}, they are ok. */
894 if (is_gimple_call (stmt
))
896 tree callee
= gimple_call_fndecl (stmt
);
899 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
900 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
901 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
905 if (is_gimple_assign (stmt
))
907 lhs
= gimple_assign_lhs (stmt
);
908 rhs
= gimple_assign_rhs1 (stmt
);
910 if (va_list_simple_ptr
)
912 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
913 == GIMPLE_SINGLE_RHS
)
915 /* Check for ap ={v} {}. */
916 if (TREE_CLOBBER_P (rhs
))
919 /* Check for tem = ap. */
920 else if (va_list_ptr_read (&si
, rhs
, lhs
))
923 /* Check for the last insn in:
928 else if (va_list_ptr_write (&si
, lhs
, rhs
))
932 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
933 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
934 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
935 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
936 == GIMPLE_SINGLE_RHS
))
937 check_va_list_escapes (&si
, lhs
, rhs
);
941 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
942 == GIMPLE_SINGLE_RHS
)
944 /* Check for ap ={v} {}. */
945 if (TREE_CLOBBER_P (rhs
))
948 /* Check for ap[0].field = temp. */
949 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
952 /* Check for temp = ap[0].field. */
953 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
958 /* Do any architecture specific checking. */
959 if (targetm
.stdarg_optimize_hook
960 && targetm
.stdarg_optimize_hook (&si
, stmt
))
964 else if (is_gimple_debug (stmt
))
967 /* All other uses of va_list are either va_copy (that is not handled
968 in this optimization), taking address of va_list variable or
969 passing va_list to other functions (in that case va_list might
970 escape the function and therefore va_start needs to set it up
971 fully), or some unexpected use of va_list. None of these should
972 happen in a gimplified VA_ARG_EXPR. */
973 if (si
.va_list_escapes
974 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
976 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
978 fputs ("va_list escapes in ", dump_file
);
979 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
980 fputc ('\n', dump_file
);
982 va_list_escapes
= true;
990 if (! va_list_escapes
991 && va_list_simple_ptr
992 && ! bitmap_empty_p (si
.va_list_escape_vars
)
993 && check_all_va_list_escapes (&si
))
994 va_list_escapes
= true;
999 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
1000 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
1002 BITMAP_FREE (si
.va_list_vars
);
1003 BITMAP_FREE (si
.va_list_escape_vars
);
1007 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
1008 funcname
, (int) va_list_escapes
);
1009 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
1010 fputs ("all", dump_file
);
1012 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
1013 fputs (" GPR units and ", dump_file
);
1014 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
1015 fputs ("all", dump_file
);
1017 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
1018 fputs (" FPR units.\n", dump_file
);
1022 /* Return true if STMT is IFN_VA_ARG. */
1025 gimple_call_ifn_va_arg_p (gimple stmt
)
1027 return (is_gimple_call (stmt
)
1028 && gimple_call_internal_p (stmt
)
1029 && gimple_call_internal_fn (stmt
) == IFN_VA_ARG
);
1032 /* Expand IFN_VA_ARGs in FUN. */
1035 expand_ifn_va_arg_1 (function
*fun
)
1037 bool modified
= false;
1039 gimple_stmt_iterator i
;
1041 FOR_EACH_BB_FN (bb
, fun
)
1042 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1044 gimple stmt
= gsi_stmt (i
);
1045 tree ap
, expr
, lhs
, type
, do_deref
;
1046 gimple_seq pre
= NULL
, post
= NULL
;
1048 if (!gimple_call_ifn_va_arg_p (stmt
))
1053 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1054 ap
= gimple_call_arg (stmt
, 0);
1055 do_deref
= gimple_call_arg (stmt
, 2);
1057 if (do_deref
== integer_one_node
)
1058 ap
= build_fold_indirect_ref (ap
);
1060 push_gimplify_context (false);
1062 /* Make it easier for the backends by protecting the valist argument
1063 from multiple evaluations. */
1064 if (do_deref
== integer_one_node
)
1065 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1067 gimplify_expr (&ap
, &pre
, &post
, is_gimple_val
, fb_rvalue
);
1069 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1071 lhs
= gimple_call_lhs (stmt
);
1072 if (lhs
!= NULL_TREE
)
1074 unsigned int nargs
= gimple_call_num_args (stmt
);
1075 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1079 /* We've transported the size of with WITH_SIZE_EXPR here as
1080 the last argument of the internal fn call. Now reinstate
1082 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1083 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1086 /* We use gimplify_assign here, rather than gimple_build_assign,
1087 because gimple_assign knows how to deal with variable-sized
1089 gimplify_assign (lhs
, expr
, &pre
);
1092 gimplify_expr (&expr
, &pre
, &post
, is_gimple_lvalue
, fb_lvalue
);
1094 pop_gimplify_context (NULL
);
1096 gimple_seq_add_seq (&pre
, post
);
1097 update_modified_stmts (pre
);
1099 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1100 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1102 gimple_find_sub_bbs (pre
, &i
);
1104 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1106 gsi_remove (&i
, true);
1107 gcc_assert (gsi_end_p (i
));
1109 /* We're walking here into the bbs which contain the expansion of
1110 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1111 expanding. We could try to skip walking these bbs, perhaps by
1112 walking backwards over gimples and bbs. */
1119 free_dominance_info (CDI_DOMINATORS
);
1120 update_ssa (TODO_update_ssa
);
1123 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1126 expand_ifn_va_arg (function
*fun
)
1128 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1129 expand_ifn_va_arg_1 (fun
);
1133 gimple_stmt_iterator i
;
1134 FOR_EACH_BB_FN (bb
, fun
)
1135 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1136 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i
)));
1142 const pass_data pass_data_stdarg
=
1144 GIMPLE_PASS
, /* type */
1145 "stdarg", /* name */
1146 OPTGROUP_NONE
, /* optinfo_flags */
1147 TV_NONE
, /* tv_id */
1148 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1149 PROP_gimple_lva
, /* properties_provided */
1150 0, /* properties_destroyed */
1151 0, /* todo_flags_start */
1152 0, /* todo_flags_finish */
1155 class pass_stdarg
: public gimple_opt_pass
1158 pass_stdarg (gcc::context
*ctxt
)
1159 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1162 /* opt_pass methods: */
1163 virtual bool gate (function
*)
1165 /* Always run this pass, in order to expand va_arg internal_fns. We
1166 also need to do that if fun->stdarg == 0, because a va_arg may also
1167 occur in a function without varargs, f.i. if when passing a va_list to
1168 another function. */
1172 virtual unsigned int execute (function
*);
1174 }; // class pass_stdarg
1177 pass_stdarg::execute (function
*fun
)
1179 /* TODO: Postpone expand_ifn_va_arg till after
1180 optimize_va_list_gpr_fpr_size. */
1181 expand_ifn_va_arg (fun
);
1184 /* This optimization is only for stdarg functions. */
1185 && fun
->stdarg
!= 0)
1186 optimize_va_list_gpr_fpr_size (fun
);
1194 make_pass_stdarg (gcc::context
*ctxt
)
1196 return new pass_stdarg (ctxt
);
1201 const pass_data pass_data_lower_vaarg
=
1203 GIMPLE_PASS
, /* type */
1204 "lower_vaarg", /* name */
1205 OPTGROUP_NONE
, /* optinfo_flags */
1206 TV_NONE
, /* tv_id */
1207 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1208 PROP_gimple_lva
, /* properties_provided */
1209 0, /* properties_destroyed */
1210 0, /* todo_flags_start */
1211 0, /* todo_flags_finish */
1214 class pass_lower_vaarg
: public gimple_opt_pass
1217 pass_lower_vaarg (gcc::context
*ctxt
)
1218 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1221 /* opt_pass methods: */
1222 virtual bool gate (function
*)
1224 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1227 virtual unsigned int execute (function
*);
1229 }; // class pass_lower_vaarg
1232 pass_lower_vaarg::execute (function
*fun
)
1234 expand_ifn_va_arg (fun
);
1241 make_pass_lower_vaarg (gcc::context
*ctxt
)
1243 return new pass_lower_vaarg (ctxt
);