This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.
Index Nav: | [Date Index] [Subject Index] [Author Index] [Thread Index] | |
---|---|---|
Message Nav: | [Date Prev] [Date Next] | [Thread Prev] [Thread Next] |
Other format: | [Raw text] |
Hi! When doing the last cp_genericize_r change, I've noticed most of the function is a spaghetti if like: if (TREE_CODE (stmt) == X1) do_something; else if (TREE_CODE (stmt) == X2) do_something_else; else if (TREE_CODE (stmt) == X3) do_yet_another_stuff; else ... sometimes with additional conditions testing something different. This patch transforms this into a big switch (with 29 case labels), so that the compiler can decide more easily how to expand it efficiently (I think we don't have an optimization pass that would transform the above into a switch). The patch is large, but mostly because I had to reindent some chunks of code, so I've also attached a diff -upbd which is much smaller and easier to read. Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk? 2017-06-14 Jakub Jelinek <jakub@redhat.com> * cp-gimplify.c (cp_genericize_r): Turn most of the function into a switch (TREE_CODE (stmt)) statement from long else if sequence. --- gcc/cp/cp-gimplify.c.jj 2017-06-13 22:04:27.000000000 +0200 +++ gcc/cp/cp-gimplify.c 2017-06-13 22:32:22.700204429 +0200 @@ -1118,132 +1118,135 @@ cp_genericize_r (tree *stmt_p, int *walk return NULL_TREE; } - if (TREE_CODE (stmt) == ADDR_EXPR - && is_invisiref_parm (TREE_OPERAND (stmt, 0))) + switch (TREE_CODE (stmt)) { - /* If in an OpenMP context, note var uses. */ - if (__builtin_expect (wtd->omp_ctx != NULL, 0) - && omp_var_to_track (TREE_OPERAND (stmt, 0))) - omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); - *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); - *walk_subtrees = 0; - } - else if (TREE_CODE (stmt) == RETURN_EXPR - && TREE_OPERAND (stmt, 0) - && is_invisiref_parm (TREE_OPERAND (stmt, 0))) - /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ - *walk_subtrees = 0; - else if (TREE_CODE (stmt) == OMP_CLAUSE) - switch (OMP_CLAUSE_CODE (stmt)) - { - case OMP_CLAUSE_LASTPRIVATE: - /* Don't dereference an invisiref in OpenMP clauses. */ - if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) - { - *walk_subtrees = 0; - if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) - cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), - cp_genericize_r, data, NULL); - } - break; - case OMP_CLAUSE_PRIVATE: - /* Don't dereference an invisiref in OpenMP clauses. */ - if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) + case ADDR_EXPR: + if (is_invisiref_parm (TREE_OPERAND (stmt, 0))) + { + /* If in an OpenMP context, note var uses. */ + if (__builtin_expect (wtd->omp_ctx != NULL, 0) + && omp_var_to_track (TREE_OPERAND (stmt, 0))) + omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); + *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); *walk_subtrees = 0; - else if (wtd->omp_ctx != NULL) - { - /* Private clause doesn't cause any references to the - var in outer contexts, avoid calling - omp_cxx_notice_variable for it. */ - struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; - wtd->omp_ctx = NULL; - cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, - data, NULL); - wtd->omp_ctx = old; + } + break; + + case RETURN_EXPR: + if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) + /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ + *walk_subtrees = 0; + break; + + case OMP_CLAUSE: + switch (OMP_CLAUSE_CODE (stmt)) + { + case OMP_CLAUSE_LASTPRIVATE: + /* Don't dereference an invisiref in OpenMP clauses. */ + if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) + { + *walk_subtrees = 0; + if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) + cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), + cp_genericize_r, data, NULL); + } + break; + case OMP_CLAUSE_PRIVATE: + /* Don't dereference an invisiref in OpenMP clauses. */ + if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; - } - break; - case OMP_CLAUSE_SHARED: - case OMP_CLAUSE_FIRSTPRIVATE: - case OMP_CLAUSE_COPYIN: - case OMP_CLAUSE_COPYPRIVATE: - /* Don't dereference an invisiref in OpenMP clauses. */ - if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) - *walk_subtrees = 0; - break; - case OMP_CLAUSE_REDUCTION: - /* Don't dereference an invisiref in reduction clause's - OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE} - still needs to be genericized. */ - if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) - { + else if (wtd->omp_ctx != NULL) + { + /* Private clause doesn't cause any references to the + var in outer contexts, avoid calling + omp_cxx_notice_variable for it. */ + struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; + wtd->omp_ctx = NULL; + cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, + data, NULL); + wtd->omp_ctx = old; + *walk_subtrees = 0; + } + break; + case OMP_CLAUSE_SHARED: + case OMP_CLAUSE_FIRSTPRIVATE: + case OMP_CLAUSE_COPYIN: + case OMP_CLAUSE_COPYPRIVATE: + /* Don't dereference an invisiref in OpenMP clauses. */ + if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; - if (OMP_CLAUSE_REDUCTION_INIT (stmt)) - cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt), - cp_genericize_r, data, NULL); - if (OMP_CLAUSE_REDUCTION_MERGE (stmt)) - cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt), - cp_genericize_r, data, NULL); - } - break; - default: - break; - } - else if (IS_TYPE_OR_DECL_P (stmt)) - *walk_subtrees = 0; + break; + case OMP_CLAUSE_REDUCTION: + /* Don't dereference an invisiref in reduction clause's + OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE} + still needs to be genericized. */ + if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) + { + *walk_subtrees = 0; + if (OMP_CLAUSE_REDUCTION_INIT (stmt)) + cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt), + cp_genericize_r, data, NULL); + if (OMP_CLAUSE_REDUCTION_MERGE (stmt)) + cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt), + cp_genericize_r, data, NULL); + } + break; + default: + break; + } + break; - /* Due to the way voidify_wrapper_expr is written, we don't get a chance - to lower this construct before scanning it, so we need to lower these - before doing anything else. */ - else if (TREE_CODE (stmt) == CLEANUP_STMT) - *stmt_p = build2_loc (EXPR_LOCATION (stmt), - CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR - : TRY_FINALLY_EXPR, - void_type_node, - CLEANUP_BODY (stmt), - CLEANUP_EXPR (stmt)); + /* Due to the way voidify_wrapper_expr is written, we don't get a chance + to lower this construct before scanning it, so we need to lower these + before doing anything else. */ + case CLEANUP_STMT: + *stmt_p = build2_loc (EXPR_LOCATION (stmt), + CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR + : TRY_FINALLY_EXPR, + void_type_node, + CLEANUP_BODY (stmt), + CLEANUP_EXPR (stmt)); + break; - else if (TREE_CODE (stmt) == IF_STMT) - { + case IF_STMT: genericize_if_stmt (stmt_p); /* *stmt_p has changed, tail recurse to handle it again. */ return cp_genericize_r (stmt_p, walk_subtrees, data); - } - /* COND_EXPR might have incompatible types in branches if one or both - arms are bitfields. Fix it up now. */ - else if (TREE_CODE (stmt) == COND_EXPR) - { - tree type_left - = (TREE_OPERAND (stmt, 1) - ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) - : NULL_TREE); - tree type_right - = (TREE_OPERAND (stmt, 2) - ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) - : NULL_TREE); - if (type_left - && !useless_type_conversion_p (TREE_TYPE (stmt), - TREE_TYPE (TREE_OPERAND (stmt, 1)))) - { - TREE_OPERAND (stmt, 1) - = fold_convert (type_left, TREE_OPERAND (stmt, 1)); - gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), - type_left)); - } - if (type_right - && !useless_type_conversion_p (TREE_TYPE (stmt), - TREE_TYPE (TREE_OPERAND (stmt, 2)))) - { - TREE_OPERAND (stmt, 2) - = fold_convert (type_right, TREE_OPERAND (stmt, 2)); - gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), - type_right)); - } - } + /* COND_EXPR might have incompatible types in branches if one or both + arms are bitfields. Fix it up now. */ + case COND_EXPR: + { + tree type_left + = (TREE_OPERAND (stmt, 1) + ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) + : NULL_TREE); + tree type_right + = (TREE_OPERAND (stmt, 2) + ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) + : NULL_TREE); + if (type_left + && !useless_type_conversion_p (TREE_TYPE (stmt), + TREE_TYPE (TREE_OPERAND (stmt, 1)))) + { + TREE_OPERAND (stmt, 1) + = fold_convert (type_left, TREE_OPERAND (stmt, 1)); + gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), + type_left)); + } + if (type_right + && !useless_type_conversion_p (TREE_TYPE (stmt), + TREE_TYPE (TREE_OPERAND (stmt, 2)))) + { + TREE_OPERAND (stmt, 2) + = fold_convert (type_right, TREE_OPERAND (stmt, 2)); + gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), + type_right)); + } + } + break; - else if (TREE_CODE (stmt) == BIND_EXPR) - { + case BIND_EXPR: if (__builtin_expect (wtd->omp_ctx != NULL, 0)) { tree decl; @@ -1281,113 +1284,118 @@ cp_genericize_r (tree *stmt_p, int *walk cp_walk_tree (&BIND_EXPR_BODY (stmt), cp_genericize_r, data, NULL); wtd->bind_expr_stack.pop (); - } + break; - else if (TREE_CODE (stmt) == USING_STMT) - { - tree block = NULL_TREE; + case USING_STMT: + { + tree block = NULL_TREE; - /* Get the innermost inclosing GIMPLE_BIND that has a non NULL - BLOCK, and append an IMPORTED_DECL to its - BLOCK_VARS chained list. */ - if (wtd->bind_expr_stack.exists ()) + /* Get the innermost inclosing GIMPLE_BIND that has a non NULL + BLOCK, and append an IMPORTED_DECL to its + BLOCK_VARS chained list. */ + if (wtd->bind_expr_stack.exists ()) + { + int i; + for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--) + if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i]))) + break; + } + if (block) + { + tree using_directive; + gcc_assert (TREE_OPERAND (stmt, 0)); + + using_directive = make_node (IMPORTED_DECL); + TREE_TYPE (using_directive) = void_type_node; + + IMPORTED_DECL_ASSOCIATED_DECL (using_directive) + = TREE_OPERAND (stmt, 0); + DECL_CHAIN (using_directive) = BLOCK_VARS (block); + BLOCK_VARS (block) = using_directive; + } + /* The USING_STMT won't appear in GENERIC. */ + *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); + *walk_subtrees = 0; + } + break; + + case DECL_EXPR: + if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) { - int i; - for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--) - if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i]))) - break; + /* Using decls inside DECL_EXPRs are just dropped on the floor. */ + *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); + *walk_subtrees = 0; } - if (block) + else { - tree using_directive; - gcc_assert (TREE_OPERAND (stmt, 0)); - - using_directive = make_node (IMPORTED_DECL); - TREE_TYPE (using_directive) = void_type_node; - - IMPORTED_DECL_ASSOCIATED_DECL (using_directive) - = TREE_OPERAND (stmt, 0); - DECL_CHAIN (using_directive) = BLOCK_VARS (block); - BLOCK_VARS (block) = using_directive; + tree d = DECL_EXPR_DECL (stmt); + if (VAR_P (d)) + gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d)); } - /* The USING_STMT won't appear in GENERIC. */ - *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); - *walk_subtrees = 0; - } - - else if (TREE_CODE (stmt) == DECL_EXPR - && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) - { - /* Using decls inside DECL_EXPRs are just dropped on the floor. */ - *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); - *walk_subtrees = 0; - } - else if (TREE_CODE (stmt) == DECL_EXPR) - { - tree d = DECL_EXPR_DECL (stmt); - if (VAR_P (d)) - gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d)); - } - else if (TREE_CODE (stmt) == OMP_PARALLEL - || TREE_CODE (stmt) == OMP_TASK - || TREE_CODE (stmt) == OMP_TASKLOOP) - { - struct cp_genericize_omp_taskreg omp_ctx; - tree c, decl; - splay_tree_node n; + break; - *walk_subtrees = 0; - cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); - omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; - omp_ctx.default_shared = omp_ctx.is_parallel; - omp_ctx.outer = wtd->omp_ctx; - omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); - wtd->omp_ctx = &omp_ctx; - for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) - switch (OMP_CLAUSE_CODE (c)) - { - case OMP_CLAUSE_SHARED: - case OMP_CLAUSE_PRIVATE: - case OMP_CLAUSE_FIRSTPRIVATE: - case OMP_CLAUSE_LASTPRIVATE: - decl = OMP_CLAUSE_DECL (c); - if (decl == error_mark_node || !omp_var_to_track (decl)) + case OMP_PARALLEL: + case OMP_TASK: + case OMP_TASKLOOP: + { + struct cp_genericize_omp_taskreg omp_ctx; + tree c, decl; + splay_tree_node n; + + *walk_subtrees = 0; + cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); + omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; + omp_ctx.default_shared = omp_ctx.is_parallel; + omp_ctx.outer = wtd->omp_ctx; + omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); + wtd->omp_ctx = &omp_ctx; + for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) + switch (OMP_CLAUSE_CODE (c)) + { + case OMP_CLAUSE_SHARED: + case OMP_CLAUSE_PRIVATE: + case OMP_CLAUSE_FIRSTPRIVATE: + case OMP_CLAUSE_LASTPRIVATE: + decl = OMP_CLAUSE_DECL (c); + if (decl == error_mark_node || !omp_var_to_track (decl)) + break; + n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); + if (n != NULL) + break; + splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, + OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED + ? OMP_CLAUSE_DEFAULT_SHARED + : OMP_CLAUSE_DEFAULT_PRIVATE); + if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer) + omp_cxx_notice_variable (omp_ctx.outer, decl); break; - n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); - if (n != NULL) + case OMP_CLAUSE_DEFAULT: + if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) + omp_ctx.default_shared = true; + default: break; - splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, - OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED - ? OMP_CLAUSE_DEFAULT_SHARED - : OMP_CLAUSE_DEFAULT_PRIVATE); - if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE - && omp_ctx.outer) - omp_cxx_notice_variable (omp_ctx.outer, decl); - break; - case OMP_CLAUSE_DEFAULT: - if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) - omp_ctx.default_shared = true; - default: - break; - } - if (TREE_CODE (stmt) == OMP_TASKLOOP) - genericize_omp_for_stmt (stmt_p, walk_subtrees, data); - else - cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); - wtd->omp_ctx = omp_ctx.outer; - splay_tree_delete (omp_ctx.variables); - } - else if (TREE_CODE (stmt) == TRY_BLOCK) - { - *walk_subtrees = 0; - tree try_block = wtd->try_block; - wtd->try_block = stmt; - cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL); - wtd->try_block = try_block; - cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL); - } - else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR) - { + } + if (TREE_CODE (stmt) == OMP_TASKLOOP) + genericize_omp_for_stmt (stmt_p, walk_subtrees, data); + else + cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); + wtd->omp_ctx = omp_ctx.outer; + splay_tree_delete (omp_ctx.variables); + } + break; + + case TRY_BLOCK: + { + *walk_subtrees = 0; + tree try_block = wtd->try_block; + wtd->try_block = stmt; + cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL); + wtd->try_block = try_block; + cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL); + } + break; + + case MUST_NOT_THROW_EXPR: /* MUST_NOT_THROW_COND might be something else with TM. */ if (MUST_NOT_THROW_COND (stmt) == NULL_TREE) { @@ -1397,78 +1405,93 @@ cp_genericize_r (tree *stmt_p, int *walk cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); wtd->try_block = try_block; } - } - else if (TREE_CODE (stmt) == THROW_EXPR) - { - location_t loc = location_of (stmt); - if (TREE_NO_WARNING (stmt)) - /* Never mind. */; - else if (wtd->try_block) - { - if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR - && warning_at (loc, OPT_Wterminate, - "throw will always call terminate()") - && cxx_dialect >= cxx11 - && DECL_DESTRUCTOR_P (current_function_decl)) - inform (loc, "in C++11 destructors default to noexcept"); - } - else - { - if (warn_cxx11_compat && cxx_dialect < cxx11 - && DECL_DESTRUCTOR_P (current_function_decl) - && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl)) - == NULL_TREE) - && (get_defaulted_eh_spec (current_function_decl) - == empty_except_spec)) - warning_at (loc, OPT_Wc__11_compat, - "in C++11 this throw will terminate because " - "destructors default to noexcept"); - } - } - else if (TREE_CODE (stmt) == CONVERT_EXPR) - gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); - else if (TREE_CODE (stmt) == FOR_STMT) - genericize_for_stmt (stmt_p, walk_subtrees, data); - else if (TREE_CODE (stmt) == WHILE_STMT) - genericize_while_stmt (stmt_p, walk_subtrees, data); - else if (TREE_CODE (stmt) == DO_STMT) - genericize_do_stmt (stmt_p, walk_subtrees, data); - else if (TREE_CODE (stmt) == SWITCH_STMT) - genericize_switch_stmt (stmt_p, walk_subtrees, data); - else if (TREE_CODE (stmt) == CONTINUE_STMT) - genericize_continue_stmt (stmt_p); - else if (TREE_CODE (stmt) == BREAK_STMT) - genericize_break_stmt (stmt_p); - else if (TREE_CODE (stmt) == OMP_FOR - || TREE_CODE (stmt) == OMP_SIMD - || TREE_CODE (stmt) == OMP_DISTRIBUTE) - genericize_omp_for_stmt (stmt_p, walk_subtrees, data); - else if (TREE_CODE (stmt) == PTRMEM_CST) - { + break; + + case THROW_EXPR: + { + location_t loc = location_of (stmt); + if (TREE_NO_WARNING (stmt)) + /* Never mind. */; + else if (wtd->try_block) + { + if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR + && warning_at (loc, OPT_Wterminate, + "throw will always call terminate()") + && cxx_dialect >= cxx11 + && DECL_DESTRUCTOR_P (current_function_decl)) + inform (loc, "in C++11 destructors default to noexcept"); + } + else + { + if (warn_cxx11_compat && cxx_dialect < cxx11 + && DECL_DESTRUCTOR_P (current_function_decl) + && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl)) + == NULL_TREE) + && (get_defaulted_eh_spec (current_function_decl) + == empty_except_spec)) + warning_at (loc, OPT_Wc__11_compat, + "in C++11 this throw will terminate because " + "destructors default to noexcept"); + } + } + break; + + case CONVERT_EXPR: + gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); + break; + + case FOR_STMT: + genericize_for_stmt (stmt_p, walk_subtrees, data); + break; + case WHILE_STMT: + genericize_while_stmt (stmt_p, walk_subtrees, data); + break; + case DO_STMT: + genericize_do_stmt (stmt_p, walk_subtrees, data); + break; + case SWITCH_STMT: + genericize_switch_stmt (stmt_p, walk_subtrees, data); + break; + case CONTINUE_STMT: + genericize_continue_stmt (stmt_p); + break; + case BREAK_STMT: + genericize_break_stmt (stmt_p); + break; + case OMP_FOR: + case OMP_SIMD: + case OMP_DISTRIBUTE: + genericize_omp_for_stmt (stmt_p, walk_subtrees, data); + break; + + case PTRMEM_CST: /* By the time we get here we're handing off to the back end, so we don't need or want to preserve PTRMEM_CST anymore. */ *stmt_p = cplus_expand_constant (stmt); *walk_subtrees = 0; - } - else if (TREE_CODE (stmt) == MEM_REF) - { + break; + + case MEM_REF: /* For MEM_REF, make sure not to sanitize the second operand even - if it has reference type. It is just an offset with a type + if it has reference type. It is just an offset with a type holding other information. There is no other processing we need to do for INTEGER_CSTs, so just ignore the second argument unconditionally. */ cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); *walk_subtrees = 0; - } - else if (sanitize_flags_p ((SANITIZE_NULL - | SANITIZE_ALIGNMENT | SANITIZE_VPTR)) - && !wtd->no_sanitize_p) - { - if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT) - && TREE_CODE (stmt) == NOP_EXPR + break; + + case NOP_EXPR: + if (!wtd->no_sanitize_p + && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT) && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE) ubsan_maybe_instrument_reference (stmt_p); - else if (TREE_CODE (stmt) == CALL_EXPR) + break; + + case CALL_EXPR: + if (!wtd->no_sanitize_p + && sanitize_flags_p ((SANITIZE_NULL + | SANITIZE_ALIGNMENT | SANITIZE_VPTR))) { tree fn = CALL_EXPR_FN (stmt); if (fn != NULL_TREE @@ -1486,6 +1509,12 @@ cp_genericize_r (tree *stmt_p, int *walk cp_ubsan_maybe_instrument_member_call (stmt); } } + break; + + default: + if (IS_TYPE_OR_DECL_P (stmt)) + *walk_subtrees = 0; + break; } p_set->add (*stmt_p); Jakub
Attachment:
1
Description: Text document
Index Nav: | [Date Index] [Subject Index] [Author Index] [Thread Index] | |
---|---|---|
Message Nav: | [Date Prev] [Date Next] | [Thread Prev] [Thread Next] |