]>
Commit | Line | Data |
---|---|---|
f1ebdfc5 | 1 | /* Branch prediction routines for the GNU compiler. |
23a5b65a | 2 | Copyright (C) 2000-2014 Free Software Foundation, Inc. |
f1ebdfc5 | 3 | |
bfdade77 | 4 | This file is part of GCC. |
f1ebdfc5 | 5 | |
bfdade77 RK |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
bfdade77 | 9 | version. |
f1ebdfc5 | 10 | |
bfdade77 RK |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
f1ebdfc5 | 15 | |
bfdade77 | 16 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
f1ebdfc5 JE |
19 | |
20 | /* References: | |
21 | ||
22 | [1] "Branch Prediction for Free" | |
23 | Ball and Larus; PLDI '93. | |
24 | [2] "Static Branch Frequency and Program Profile Analysis" | |
25 | Wu and Larus; MICRO-27. | |
26 | [3] "Corpus-based Static Branch Prediction" | |
3ef42a0c | 27 | Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */ |
f1ebdfc5 JE |
28 | |
29 | ||
30 | #include "config.h" | |
31 | #include "system.h" | |
4977bab6 ZW |
32 | #include "coretypes.h" |
33 | #include "tm.h" | |
f1ebdfc5 | 34 | #include "tree.h" |
d8a2d370 | 35 | #include "calls.h" |
f1ebdfc5 JE |
36 | #include "rtl.h" |
37 | #include "tm_p.h" | |
efc9bd41 | 38 | #include "hard-reg-set.h" |
60393bbc AM |
39 | #include "predict.h" |
40 | #include "vec.h" | |
83685514 AM |
41 | #include "hashtab.h" |
42 | #include "hash-set.h" | |
83685514 AM |
43 | #include "machmode.h" |
44 | #include "input.h" | |
f1ebdfc5 | 45 | #include "function.h" |
60393bbc AM |
46 | #include "dominance.h" |
47 | #include "cfg.h" | |
48 | #include "cfganal.h" | |
49 | #include "basic-block.h" | |
50 | #include "insn-config.h" | |
51 | #include "regs.h" | |
52 | #include "flags.h" | |
59f2e9d8 | 53 | #include "profile.h" |
f1ebdfc5 | 54 | #include "except.h" |
718f9c0f | 55 | #include "diagnostic-core.h" |
f1ebdfc5 | 56 | #include "recog.h" |
f1ebdfc5 | 57 | #include "expr.h" |
d79f9ec9 | 58 | #include "coverage.h" |
ac5e69da | 59 | #include "sreal.h" |
194734e9 JH |
60 | #include "params.h" |
61 | #include "target.h" | |
3d436d2a | 62 | #include "cfgloop.h" |
b787e7a2 | 63 | #include "hash-map.h" |
2fb9a547 AM |
64 | #include "tree-ssa-alias.h" |
65 | #include "internal-fn.h" | |
66 | #include "gimple-expr.h" | |
67 | #include "is-a.h" | |
442b4905 | 68 | #include "gimple.h" |
5be5c238 | 69 | #include "gimple-iterator.h" |
442b4905 | 70 | #include "gimple-ssa.h" |
c582198b AM |
71 | #include "plugin-api.h" |
72 | #include "ipa-ref.h" | |
442b4905 AM |
73 | #include "cgraph.h" |
74 | #include "tree-cfg.h" | |
75 | #include "tree-phinodes.h" | |
76 | #include "ssa-iterators.h" | |
e28030cf | 77 | #include "tree-ssa-loop-niter.h" |
442b4905 | 78 | #include "tree-ssa-loop.h" |
6de9cd9a | 79 | #include "tree-pass.h" |
b6acab32 JH |
80 | #include "tree-scalar-evolution.h" |
81 | #include "cfgloop.h" | |
8aa18a7d | 82 | |
fbe3b30b SB |
83 | /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE, |
84 | 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */ | |
ac5e69da JZ |
85 | static sreal real_zero, real_one, real_almost_one, real_br_prob_base, |
86 | real_inv_br_prob_base, real_one_half, real_bb_freq_max; | |
f1ebdfc5 | 87 | |
9f215bf5 | 88 | static void combine_predictions_for_insn (rtx_insn *, basic_block); |
6de9cd9a | 89 | static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int); |
3e4b9ad0 | 90 | static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction); |
5210bbc5 | 91 | static void predict_paths_leading_to_edge (edge, enum br_predictor, enum prediction); |
9f215bf5 | 92 | static bool can_predict_insn_p (const rtx_insn *); |
ee92cb46 | 93 | |
4db384c9 JH |
94 | /* Information we hold about each branch predictor. |
95 | Filled using information from predict.def. */ | |
bfdade77 | 96 | |
4db384c9 | 97 | struct predictor_info |
ee92cb46 | 98 | { |
8b60264b KG |
99 | const char *const name; /* Name used in the debugging dumps. */ |
100 | const int hitrate; /* Expected hitrate used by | |
101 | predict_insn_def call. */ | |
102 | const int flags; | |
4db384c9 | 103 | }; |
ee92cb46 | 104 | |
134d3a2e JH |
105 | /* Use given predictor without Dempster-Shaffer theory if it matches |
106 | using first_match heuristics. */ | |
107 | #define PRED_FLAG_FIRST_MATCH 1 | |
108 | ||
109 | /* Recompute hitrate in percent to our representation. */ | |
110 | ||
bfdade77 | 111 | #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100) |
134d3a2e JH |
112 | |
113 | #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS}, | |
bfdade77 | 114 | static const struct predictor_info predictor_info[]= { |
4db384c9 JH |
115 | #include "predict.def" |
116 | ||
dc297297 | 117 | /* Upper bound on predictors. */ |
134d3a2e | 118 | {NULL, 0, 0} |
4db384c9 JH |
119 | }; |
120 | #undef DEF_PREDICTOR | |
194734e9 | 121 | |
3250d724 | 122 | /* Return TRUE if frequency FREQ is considered to be hot. */ |
fb2fed03 JH |
123 | |
124 | static inline bool | |
2eb712b4 | 125 | maybe_hot_frequency_p (struct function *fun, int freq) |
3250d724 | 126 | { |
d52f5295 | 127 | struct cgraph_node *node = cgraph_node::get (fun->decl); |
3250d724 JH |
128 | if (!profile_info || !flag_branch_probabilities) |
129 | { | |
5fefcf92 | 130 | if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED) |
3250d724 | 131 | return false; |
5fefcf92 | 132 | if (node->frequency == NODE_FREQUENCY_HOT) |
3250d724 JH |
133 | return true; |
134 | } | |
ea19eb9f | 135 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) |
c3702ff9 | 136 | return true; |
5fefcf92 | 137 | if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE |
fefa31b5 | 138 | && freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency * 2 / 3)) |
5fefcf92 | 139 | return false; |
39c1b6db MP |
140 | if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0) |
141 | return false; | |
fefa31b5 | 142 | if (freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency |
2eb712b4 | 143 | / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))) |
3250d724 JH |
144 | return false; |
145 | return true; | |
146 | } | |
147 | ||
0208f7da JH |
148 | static gcov_type min_count = -1; |
149 | ||
150 | /* Determine the threshold for hot BB counts. */ | |
151 | ||
152 | gcov_type | |
153 | get_hot_bb_threshold () | |
154 | { | |
155 | gcov_working_set_t *ws; | |
156 | if (min_count == -1) | |
157 | { | |
158 | ws = find_working_set (PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE)); | |
159 | gcc_assert (ws); | |
160 | min_count = ws->min_counter; | |
161 | } | |
162 | return min_count; | |
163 | } | |
164 | ||
165 | /* Set the threshold for hot BB counts. */ | |
166 | ||
167 | void | |
168 | set_hot_bb_threshold (gcov_type min) | |
169 | { | |
170 | min_count = min; | |
171 | } | |
172 | ||
fb2fed03 JH |
173 | /* Return TRUE if frequency FREQ is considered to be hot. */ |
174 | ||
be3c16c4 | 175 | bool |
2eb712b4 | 176 | maybe_hot_count_p (struct function *fun, gcov_type count) |
fb2fed03 | 177 | { |
ea19eb9f | 178 | if (fun && profile_status_for_fn (fun) != PROFILE_READ) |
fb2fed03 JH |
179 | return true; |
180 | /* Code executed at most once is not hot. */ | |
181 | if (profile_info->runs >= count) | |
182 | return false; | |
0208f7da | 183 | return (count >= get_hot_bb_threshold ()); |
fb2fed03 JH |
184 | } |
185 | ||
194734e9 | 186 | /* Return true in case BB can be CPU intensive and should be optimized |
d55d8fc7 | 187 | for maximal performance. */ |
194734e9 JH |
188 | |
189 | bool | |
2eb712b4 | 190 | maybe_hot_bb_p (struct function *fun, const_basic_block bb) |
194734e9 | 191 | { |
2eb712b4 | 192 | gcc_checking_assert (fun); |
ea19eb9f | 193 | if (profile_status_for_fn (fun) == PROFILE_READ) |
2eb712b4 MJ |
194 | return maybe_hot_count_p (fun, bb->count); |
195 | return maybe_hot_frequency_p (fun, bb->frequency); | |
3250d724 JH |
196 | } |
197 | ||
198 | /* Return true in case BB can be CPU intensive and should be optimized | |
199 | for maximal performance. */ | |
200 | ||
201 | bool | |
202 | maybe_hot_edge_p (edge e) | |
203 | { | |
0a6a6ac9 | 204 | if (profile_status_for_fn (cfun) == PROFILE_READ) |
2eb712b4 MJ |
205 | return maybe_hot_count_p (cfun, e->count); |
206 | return maybe_hot_frequency_p (cfun, EDGE_FREQUENCY (e)); | |
194734e9 JH |
207 | } |
208 | ||
79221839 TJ |
209 | /* Return true if profile COUNT and FREQUENCY, or function FUN static |
210 | node frequency reflects never being executed. */ | |
211 | ||
212 | static bool | |
213 | probably_never_executed (struct function *fun, | |
214 | gcov_type count, int frequency) | |
194734e9 | 215 | { |
2eb712b4 | 216 | gcc_checking_assert (fun); |
0a6a6ac9 | 217 | if (profile_status_for_fn (cfun) == PROFILE_READ) |
61a74079 | 218 | { |
bfaa17c1 TJ |
219 | int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); |
220 | if (count * unlikely_count_fraction >= profile_info->runs) | |
61a74079 | 221 | return false; |
79221839 | 222 | if (!frequency) |
61a74079 | 223 | return true; |
fefa31b5 | 224 | if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency) |
61a74079 | 225 | return false; |
fefa31b5 | 226 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count) |
61a74079 | 227 | { |
bfaa17c1 TJ |
228 | gcov_type computed_count; |
229 | /* Check for possibility of overflow, in which case entry bb count | |
230 | is large enough to do the division first without losing much | |
231 | precision. */ | |
fefa31b5 DM |
232 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count < REG_BR_PROB_BASE * |
233 | REG_BR_PROB_BASE) | |
bfaa17c1 TJ |
234 | { |
235 | gcov_type scaled_count | |
fefa31b5 DM |
236 | = frequency * ENTRY_BLOCK_PTR_FOR_FN (cfun)->count * |
237 | unlikely_count_fraction; | |
238 | computed_count = RDIV (scaled_count, | |
239 | ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency); | |
bfaa17c1 TJ |
240 | } |
241 | else | |
242 | { | |
fefa31b5 DM |
243 | computed_count = RDIV (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count, |
244 | ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency); | |
bfaa17c1 TJ |
245 | computed_count *= frequency * unlikely_count_fraction; |
246 | } | |
247 | if (computed_count >= profile_info->runs) | |
248 | return false; | |
61a74079 JH |
249 | } |
250 | return true; | |
251 | } | |
52bf96d2 | 252 | if ((!profile_info || !flag_branch_probabilities) |
d52f5295 | 253 | && (cgraph_node::get (fun->decl)->frequency |
581985d7 | 254 | == NODE_FREQUENCY_UNLIKELY_EXECUTED)) |
52bf96d2 | 255 | return true; |
194734e9 JH |
256 | return false; |
257 | } | |
258 | ||
600b5b1d | 259 | |
79221839 TJ |
260 | /* Return true in case BB is probably never executed. */ |
261 | ||
262 | bool | |
263 | probably_never_executed_bb_p (struct function *fun, const_basic_block bb) | |
264 | { | |
265 | return probably_never_executed (fun, bb->count, bb->frequency); | |
266 | } | |
267 | ||
268 | ||
600b5b1d TJ |
269 | /* Return true in case edge E is probably never executed. */ |
270 | ||
271 | bool | |
272 | probably_never_executed_edge_p (struct function *fun, edge e) | |
273 | { | |
79221839 | 274 | return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e)); |
600b5b1d TJ |
275 | } |
276 | ||
e6416b30 JH |
277 | /* Return true when current function should always be optimized for size. */ |
278 | ||
279 | bool | |
280 | optimize_function_for_size_p (struct function *fun) | |
281 | { | |
282 | if (optimize_size) | |
283 | return true; | |
284 | if (!fun || !fun->decl) | |
285 | return false; | |
d52f5295 ML |
286 | |
287 | cgraph_node *n = cgraph_node::get (fun->decl); | |
288 | return n && n->optimize_for_size_p (); | |
e6416b30 JH |
289 | } |
290 | ||
3debdc1e JH |
291 | /* Return true when current function should always be optimized for speed. */ |
292 | ||
293 | bool | |
294 | optimize_function_for_speed_p (struct function *fun) | |
295 | { | |
296 | return !optimize_function_for_size_p (fun); | |
bf08ebeb JH |
297 | } |
298 | ||
299 | /* Return TRUE when BB should be optimized for size. */ | |
300 | ||
301 | bool | |
cc870036 | 302 | optimize_bb_for_size_p (const_basic_block bb) |
bf08ebeb | 303 | { |
fef5a0d9 RB |
304 | return (optimize_function_for_size_p (cfun) |
305 | || (bb && !maybe_hot_bb_p (cfun, bb))); | |
bf08ebeb JH |
306 | } |
307 | ||
308 | /* Return TRUE when BB should be optimized for speed. */ | |
309 | ||
310 | bool | |
cc870036 | 311 | optimize_bb_for_speed_p (const_basic_block bb) |
bf08ebeb JH |
312 | { |
313 | return !optimize_bb_for_size_p (bb); | |
314 | } | |
315 | ||
316 | /* Return TRUE when BB should be optimized for size. */ | |
317 | ||
318 | bool | |
319 | optimize_edge_for_size_p (edge e) | |
320 | { | |
3debdc1e | 321 | return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e); |
bf08ebeb JH |
322 | } |
323 | ||
324 | /* Return TRUE when BB should be optimized for speed. */ | |
325 | ||
326 | bool | |
327 | optimize_edge_for_speed_p (edge e) | |
328 | { | |
329 | return !optimize_edge_for_size_p (e); | |
330 | } | |
331 | ||
332 | /* Return TRUE when BB should be optimized for size. */ | |
333 | ||
334 | bool | |
335 | optimize_insn_for_size_p (void) | |
336 | { | |
3debdc1e | 337 | return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p; |
bf08ebeb JH |
338 | } |
339 | ||
340 | /* Return TRUE when BB should be optimized for speed. */ | |
341 | ||
342 | bool | |
343 | optimize_insn_for_speed_p (void) | |
344 | { | |
345 | return !optimize_insn_for_size_p (); | |
346 | } | |
347 | ||
cc870036 JH |
348 | /* Return TRUE when LOOP should be optimized for size. */ |
349 | ||
350 | bool | |
351 | optimize_loop_for_size_p (struct loop *loop) | |
352 | { | |
353 | return optimize_bb_for_size_p (loop->header); | |
354 | } | |
355 | ||
356 | /* Return TRUE when LOOP should be optimized for speed. */ | |
357 | ||
358 | bool | |
359 | optimize_loop_for_speed_p (struct loop *loop) | |
360 | { | |
361 | return optimize_bb_for_speed_p (loop->header); | |
362 | } | |
363 | ||
efd8f750 JH |
364 | /* Return TRUE when LOOP nest should be optimized for speed. */ |
365 | ||
366 | bool | |
367 | optimize_loop_nest_for_speed_p (struct loop *loop) | |
368 | { | |
369 | struct loop *l = loop; | |
370 | if (optimize_loop_for_speed_p (loop)) | |
371 | return true; | |
372 | l = loop->inner; | |
c16eb95f | 373 | while (l && l != loop) |
efd8f750 JH |
374 | { |
375 | if (optimize_loop_for_speed_p (l)) | |
376 | return true; | |
377 | if (l->inner) | |
378 | l = l->inner; | |
379 | else if (l->next) | |
380 | l = l->next; | |
381 | else | |
8bcf15f6 JH |
382 | { |
383 | while (l != loop && !l->next) | |
384 | l = loop_outer (l); | |
385 | if (l != loop) | |
386 | l = l->next; | |
387 | } | |
efd8f750 JH |
388 | } |
389 | return false; | |
390 | } | |
391 | ||
392 | /* Return TRUE when LOOP nest should be optimized for size. */ | |
393 | ||
394 | bool | |
395 | optimize_loop_nest_for_size_p (struct loop *loop) | |
396 | { | |
397 | return !optimize_loop_nest_for_speed_p (loop); | |
398 | } | |
399 | ||
3a4fd356 JH |
400 | /* Return true when edge E is likely to be well predictable by branch |
401 | predictor. */ | |
402 | ||
403 | bool | |
404 | predictable_edge_p (edge e) | |
405 | { | |
0a6a6ac9 | 406 | if (profile_status_for_fn (cfun) == PROFILE_ABSENT) |
3a4fd356 JH |
407 | return false; |
408 | if ((e->probability | |
409 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100) | |
410 | || (REG_BR_PROB_BASE - e->probability | |
411 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)) | |
412 | return true; | |
413 | return false; | |
414 | } | |
415 | ||
416 | ||
bf08ebeb JH |
417 | /* Set RTL expansion for BB profile. */ |
418 | ||
419 | void | |
420 | rtl_profile_for_bb (basic_block bb) | |
421 | { | |
2eb712b4 | 422 | crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb); |
bf08ebeb JH |
423 | } |
424 | ||
425 | /* Set RTL expansion for edge profile. */ | |
426 | ||
427 | void | |
428 | rtl_profile_for_edge (edge e) | |
429 | { | |
430 | crtl->maybe_hot_insn_p = maybe_hot_edge_p (e); | |
431 | } | |
432 | ||
433 | /* Set RTL expansion to default mode (i.e. when profile info is not known). */ | |
434 | void | |
435 | default_rtl_profile (void) | |
436 | { | |
437 | crtl->maybe_hot_insn_p = true; | |
438 | } | |
439 | ||
969d70ca JH |
440 | /* Return true if the one of outgoing edges is already predicted by |
441 | PREDICTOR. */ | |
442 | ||
6de9cd9a | 443 | bool |
9678086d | 444 | rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
969d70ca JH |
445 | { |
446 | rtx note; | |
a813c111 | 447 | if (!INSN_P (BB_END (bb))) |
969d70ca | 448 | return false; |
a813c111 | 449 | for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1)) |
969d70ca JH |
450 | if (REG_NOTE_KIND (note) == REG_BR_PRED |
451 | && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor) | |
452 | return true; | |
453 | return false; | |
454 | } | |
ee92cb46 | 455 | |
fba4cb03 LB |
456 | /* Structure representing predictions in tree level. */ |
457 | ||
458 | struct edge_prediction { | |
459 | struct edge_prediction *ep_next; | |
460 | edge ep_edge; | |
461 | enum br_predictor ep_predictor; | |
462 | int ep_probability; | |
463 | }; | |
464 | ||
b787e7a2 TS |
465 | /* This map contains for a basic block the list of predictions for the |
466 | outgoing edges. */ | |
467 | ||
468 | static hash_map<const_basic_block, edge_prediction *> *bb_predictions; | |
469 | ||
6de9cd9a DN |
470 | /* Return true if the one of outgoing edges is already predicted by |
471 | PREDICTOR. */ | |
472 | ||
473 | bool | |
726a989a | 474 | gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
6de9cd9a | 475 | { |
4aab792d | 476 | struct edge_prediction *i; |
b787e7a2 | 477 | edge_prediction **preds = bb_predictions->get (bb); |
f06b0a10 ZD |
478 | |
479 | if (!preds) | |
480 | return false; | |
b8698a0f | 481 | |
b787e7a2 | 482 | for (i = *preds; i; i = i->ep_next) |
59ced947 | 483 | if (i->ep_predictor == predictor) |
6de9cd9a DN |
484 | return true; |
485 | return false; | |
486 | } | |
487 | ||
2c9e13f3 | 488 | /* Return true when the probability of edge is reliable. |
b8698a0f | 489 | |
2c9e13f3 JH |
490 | The profile guessing code is good at predicting branch outcome (ie. |
491 | taken/not taken), that is predicted right slightly over 75% of time. | |
86c33cd0 | 492 | It is however notoriously poor on predicting the probability itself. |
2c9e13f3 JH |
493 | In general the profile appear a lot flatter (with probabilities closer |
494 | to 50%) than the reality so it is bad idea to use it to drive optimization | |
495 | such as those disabling dynamic branch prediction for well predictable | |
496 | branches. | |
497 | ||
498 | There are two exceptions - edges leading to noreturn edges and edges | |
499 | predicted by number of iterations heuristics are predicted well. This macro | |
500 | should be able to distinguish those, but at the moment it simply check for | |
501 | noreturn heuristic that is only one giving probability over 99% or bellow | |
86c33cd0 | 502 | 1%. In future we might want to propagate reliability information across the |
2c9e13f3 JH |
503 | CFG if we find this information useful on multiple places. */ |
504 | static bool | |
505 | probability_reliable_p (int prob) | |
506 | { | |
0a6a6ac9 DM |
507 | return (profile_status_for_fn (cfun) == PROFILE_READ |
508 | || (profile_status_for_fn (cfun) == PROFILE_GUESSED | |
2c9e13f3 JH |
509 | && (prob <= HITRATE (1) || prob >= HITRATE (99)))); |
510 | } | |
511 | ||
512 | /* Same predicate as above, working on edges. */ | |
513 | bool | |
ed7a4b4b | 514 | edge_probability_reliable_p (const_edge e) |
2c9e13f3 JH |
515 | { |
516 | return probability_reliable_p (e->probability); | |
517 | } | |
518 | ||
519 | /* Same predicate as edge_probability_reliable_p, working on notes. */ | |
520 | bool | |
ed7a4b4b | 521 | br_prob_note_reliable_p (const_rtx note) |
2c9e13f3 JH |
522 | { |
523 | gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB); | |
e5af9ddd | 524 | return probability_reliable_p (XINT (note, 0)); |
2c9e13f3 JH |
525 | } |
526 | ||
7d6d381a | 527 | static void |
9f215bf5 | 528 | predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability) |
4db384c9 | 529 | { |
e16acfcd | 530 | gcc_assert (any_condjump_p (insn)); |
d50672ef JH |
531 | if (!flag_guess_branch_prob) |
532 | return; | |
bfdade77 | 533 | |
65c5f2a6 ILT |
534 | add_reg_note (insn, REG_BR_PRED, |
535 | gen_rtx_CONCAT (VOIDmode, | |
536 | GEN_INT ((int) predictor), | |
537 | GEN_INT ((int) probability))); | |
4db384c9 JH |
538 | } |
539 | ||
540 | /* Predict insn by given predictor. */ | |
bfdade77 | 541 | |
4db384c9 | 542 | void |
9f215bf5 | 543 | predict_insn_def (rtx_insn *insn, enum br_predictor predictor, |
79a490a9 | 544 | enum prediction taken) |
4db384c9 JH |
545 | { |
546 | int probability = predictor_info[(int) predictor].hitrate; | |
bfdade77 | 547 | |
4db384c9 JH |
548 | if (taken != TAKEN) |
549 | probability = REG_BR_PROB_BASE - probability; | |
bfdade77 | 550 | |
4db384c9 | 551 | predict_insn (insn, predictor, probability); |
ee92cb46 JH |
552 | } |
553 | ||
554 | /* Predict edge E with given probability if possible. */ | |
bfdade77 | 555 | |
4db384c9 | 556 | void |
6de9cd9a | 557 | rtl_predict_edge (edge e, enum br_predictor predictor, int probability) |
ee92cb46 | 558 | { |
9f215bf5 | 559 | rtx_insn *last_insn; |
a813c111 | 560 | last_insn = BB_END (e->src); |
ee92cb46 JH |
561 | |
562 | /* We can store the branch prediction information only about | |
563 | conditional jumps. */ | |
564 | if (!any_condjump_p (last_insn)) | |
565 | return; | |
566 | ||
567 | /* We always store probability of branching. */ | |
568 | if (e->flags & EDGE_FALLTHRU) | |
569 | probability = REG_BR_PROB_BASE - probability; | |
570 | ||
4db384c9 JH |
571 | predict_insn (last_insn, predictor, probability); |
572 | } | |
573 | ||
6de9cd9a DN |
574 | /* Predict edge E with the given PROBABILITY. */ |
575 | void | |
726a989a | 576 | gimple_predict_edge (edge e, enum br_predictor predictor, int probability) |
6de9cd9a | 577 | { |
0a6a6ac9 | 578 | gcc_assert (profile_status_for_fn (cfun) != PROFILE_GUESSED); |
fefa31b5 DM |
579 | if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && EDGE_COUNT (e->src->succs) > |
580 | 1) | |
a00d11f0 | 581 | && flag_guess_branch_prob && optimize) |
e0342c26 | 582 | { |
f06b0a10 | 583 | struct edge_prediction *i = XNEW (struct edge_prediction); |
b787e7a2 | 584 | edge_prediction *&preds = bb_predictions->get_or_insert (e->src); |
6de9cd9a | 585 | |
b787e7a2 TS |
586 | i->ep_next = preds; |
587 | preds = i; | |
59ced947 RÁE |
588 | i->ep_probability = probability; |
589 | i->ep_predictor = predictor; | |
590 | i->ep_edge = e; | |
e0342c26 | 591 | } |
6de9cd9a DN |
592 | } |
593 | ||
3809e990 JH |
594 | /* Remove all predictions on given basic block that are attached |
595 | to edge E. */ | |
596 | void | |
597 | remove_predictions_associated_with_edge (edge e) | |
598 | { | |
f06b0a10 ZD |
599 | if (!bb_predictions) |
600 | return; | |
601 | ||
b787e7a2 | 602 | edge_prediction **preds = bb_predictions->get (e->src); |
f06b0a10 ZD |
603 | |
604 | if (preds) | |
3809e990 | 605 | { |
b787e7a2 | 606 | struct edge_prediction **prediction = preds; |
f06b0a10 ZD |
607 | struct edge_prediction *next; |
608 | ||
3809e990 JH |
609 | while (*prediction) |
610 | { | |
59ced947 | 611 | if ((*prediction)->ep_edge == e) |
f06b0a10 ZD |
612 | { |
613 | next = (*prediction)->ep_next; | |
614 | free (*prediction); | |
615 | *prediction = next; | |
616 | } | |
3809e990 | 617 | else |
59ced947 | 618 | prediction = &((*prediction)->ep_next); |
3809e990 JH |
619 | } |
620 | } | |
621 | } | |
622 | ||
f06b0a10 ZD |
623 | /* Clears the list of predictions stored for BB. */ |
624 | ||
625 | static void | |
626 | clear_bb_predictions (basic_block bb) | |
627 | { | |
b787e7a2 | 628 | edge_prediction **preds = bb_predictions->get (bb); |
f06b0a10 ZD |
629 | struct edge_prediction *pred, *next; |
630 | ||
631 | if (!preds) | |
632 | return; | |
633 | ||
b787e7a2 | 634 | for (pred = *preds; pred; pred = next) |
f06b0a10 ZD |
635 | { |
636 | next = pred->ep_next; | |
637 | free (pred); | |
638 | } | |
639 | *preds = NULL; | |
640 | } | |
641 | ||
2ffa9932 JH |
642 | /* Return true when we can store prediction on insn INSN. |
643 | At the moment we represent predictions only on conditional | |
644 | jumps, not at computed jump or other complicated cases. */ | |
645 | static bool | |
9f215bf5 | 646 | can_predict_insn_p (const rtx_insn *insn) |
2ffa9932 | 647 | { |
4b4bf941 | 648 | return (JUMP_P (insn) |
2ffa9932 | 649 | && any_condjump_p (insn) |
628f6a4e | 650 | && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2); |
2ffa9932 JH |
651 | } |
652 | ||
4db384c9 | 653 | /* Predict edge E by given predictor if possible. */ |
bfdade77 | 654 | |
4db384c9 | 655 | void |
79a490a9 AJ |
656 | predict_edge_def (edge e, enum br_predictor predictor, |
657 | enum prediction taken) | |
4db384c9 JH |
658 | { |
659 | int probability = predictor_info[(int) predictor].hitrate; | |
660 | ||
661 | if (taken != TAKEN) | |
662 | probability = REG_BR_PROB_BASE - probability; | |
bfdade77 | 663 | |
4db384c9 JH |
664 | predict_edge (e, predictor, probability); |
665 | } | |
666 | ||
667 | /* Invert all branch predictions or probability notes in the INSN. This needs | |
668 | to be done each time we invert the condition used by the jump. */ | |
bfdade77 | 669 | |
4db384c9 | 670 | void |
79a490a9 | 671 | invert_br_probabilities (rtx insn) |
4db384c9 | 672 | { |
bfdade77 RK |
673 | rtx note; |
674 | ||
675 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
676 | if (REG_NOTE_KIND (note) == REG_BR_PROB) | |
e5af9ddd | 677 | XINT (note, 0) = REG_BR_PROB_BASE - XINT (note, 0); |
bfdade77 RK |
678 | else if (REG_NOTE_KIND (note) == REG_BR_PRED) |
679 | XEXP (XEXP (note, 0), 1) | |
680 | = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1))); | |
4db384c9 JH |
681 | } |
682 | ||
683 | /* Dump information about the branch prediction to the output file. */ | |
bfdade77 | 684 | |
4db384c9 | 685 | static void |
6de9cd9a | 686 | dump_prediction (FILE *file, enum br_predictor predictor, int probability, |
79a490a9 | 687 | basic_block bb, int used) |
4db384c9 | 688 | { |
628f6a4e BE |
689 | edge e; |
690 | edge_iterator ei; | |
4db384c9 | 691 | |
6de9cd9a | 692 | if (!file) |
4db384c9 JH |
693 | return; |
694 | ||
628f6a4e BE |
695 | FOR_EACH_EDGE (e, ei, bb->succs) |
696 | if (! (e->flags & EDGE_FALLTHRU)) | |
697 | break; | |
4db384c9 | 698 | |
6de9cd9a | 699 | fprintf (file, " %s heuristics%s: %.1f%%", |
4db384c9 | 700 | predictor_info[predictor].name, |
bfdade77 | 701 | used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE); |
4db384c9 JH |
702 | |
703 | if (bb->count) | |
25c3a4ef | 704 | { |
a9243bfc | 705 | fprintf (file, " exec %"PRId64, bb->count); |
fbc2782e DD |
706 | if (e) |
707 | { | |
a9243bfc | 708 | fprintf (file, " hit %"PRId64, e->count); |
6de9cd9a | 709 | fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count); |
fbc2782e | 710 | } |
25c3a4ef | 711 | } |
bfdade77 | 712 | |
6de9cd9a | 713 | fprintf (file, "\n"); |
4db384c9 JH |
714 | } |
715 | ||
229031d0 | 716 | /* We can not predict the probabilities of outgoing edges of bb. Set them |
87022a6b JH |
717 | evenly and hope for the best. */ |
718 | static void | |
719 | set_even_probabilities (basic_block bb) | |
720 | { | |
721 | int nedges = 0; | |
722 | edge e; | |
628f6a4e | 723 | edge_iterator ei; |
87022a6b | 724 | |
628f6a4e | 725 | FOR_EACH_EDGE (e, ei, bb->succs) |
87022a6b JH |
726 | if (!(e->flags & (EDGE_EH | EDGE_FAKE))) |
727 | nedges ++; | |
628f6a4e | 728 | FOR_EACH_EDGE (e, ei, bb->succs) |
87022a6b JH |
729 | if (!(e->flags & (EDGE_EH | EDGE_FAKE))) |
730 | e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges; | |
731 | else | |
732 | e->probability = 0; | |
733 | } | |
734 | ||
4db384c9 JH |
735 | /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB |
736 | note if not already present. Remove now useless REG_BR_PRED notes. */ | |
bfdade77 | 737 | |
4db384c9 | 738 | static void |
9f215bf5 | 739 | combine_predictions_for_insn (rtx_insn *insn, basic_block bb) |
4db384c9 | 740 | { |
87022a6b JH |
741 | rtx prob_note; |
742 | rtx *pnote; | |
bfdade77 | 743 | rtx note; |
4db384c9 | 744 | int best_probability = PROB_EVEN; |
bbbbb16a | 745 | enum br_predictor best_predictor = END_PREDICTORS; |
134d3a2e JH |
746 | int combined_probability = REG_BR_PROB_BASE / 2; |
747 | int d; | |
d195b46f JH |
748 | bool first_match = false; |
749 | bool found = false; | |
4db384c9 | 750 | |
87022a6b JH |
751 | if (!can_predict_insn_p (insn)) |
752 | { | |
753 | set_even_probabilities (bb); | |
754 | return; | |
755 | } | |
756 | ||
757 | prob_note = find_reg_note (insn, REG_BR_PROB, 0); | |
758 | pnote = ®_NOTES (insn); | |
c263766c RH |
759 | if (dump_file) |
760 | fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn), | |
0b17ab2f | 761 | bb->index); |
4db384c9 JH |
762 | |
763 | /* We implement "first match" heuristics and use probability guessed | |
6de9cd9a | 764 | by predictor with smallest index. */ |
bfdade77 RK |
765 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
766 | if (REG_NOTE_KIND (note) == REG_BR_PRED) | |
767 | { | |
81f40b79 ILT |
768 | enum br_predictor predictor = ((enum br_predictor) |
769 | INTVAL (XEXP (XEXP (note, 0), 0))); | |
bfdade77 RK |
770 | int probability = INTVAL (XEXP (XEXP (note, 0), 1)); |
771 | ||
772 | found = true; | |
773 | if (best_predictor > predictor) | |
774 | best_probability = probability, best_predictor = predictor; | |
775 | ||
776 | d = (combined_probability * probability | |
777 | + (REG_BR_PROB_BASE - combined_probability) | |
778 | * (REG_BR_PROB_BASE - probability)); | |
779 | ||
780 | /* Use FP math to avoid overflows of 32bit integers. */ | |
571a03b8 JJ |
781 | if (d == 0) |
782 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
783 | combined_probability = REG_BR_PROB_BASE / 2; | |
784 | else | |
785 | combined_probability = (((double) combined_probability) * probability | |
786 | * REG_BR_PROB_BASE / d + 0.5); | |
bfdade77 RK |
787 | } |
788 | ||
789 | /* Decide which heuristic to use. In case we didn't match anything, | |
790 | use no_prediction heuristic, in case we did match, use either | |
d195b46f JH |
791 | first match or Dempster-Shaffer theory depending on the flags. */ |
792 | ||
134d3a2e | 793 | if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH) |
d195b46f JH |
794 | first_match = true; |
795 | ||
796 | if (!found) | |
6de9cd9a DN |
797 | dump_prediction (dump_file, PRED_NO_PREDICTION, |
798 | combined_probability, bb, true); | |
d195b46f JH |
799 | else |
800 | { | |
6de9cd9a DN |
801 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, |
802 | bb, !first_match); | |
803 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, | |
804 | bb, first_match); | |
d195b46f JH |
805 | } |
806 | ||
807 | if (first_match) | |
134d3a2e | 808 | combined_probability = best_probability; |
6de9cd9a | 809 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true); |
d195b46f JH |
810 | |
811 | while (*pnote) | |
812 | { | |
813 | if (REG_NOTE_KIND (*pnote) == REG_BR_PRED) | |
814 | { | |
81f40b79 ILT |
815 | enum br_predictor predictor = ((enum br_predictor) |
816 | INTVAL (XEXP (XEXP (*pnote, 0), 0))); | |
d195b46f JH |
817 | int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1)); |
818 | ||
6de9cd9a | 819 | dump_prediction (dump_file, predictor, probability, bb, |
d195b46f | 820 | !first_match || best_predictor == predictor); |
6a4d6760 | 821 | *pnote = XEXP (*pnote, 1); |
d195b46f JH |
822 | } |
823 | else | |
6a4d6760 | 824 | pnote = &XEXP (*pnote, 1); |
d195b46f | 825 | } |
bfdade77 | 826 | |
4db384c9 JH |
827 | if (!prob_note) |
828 | { | |
e5af9ddd | 829 | add_int_reg_note (insn, REG_BR_PROB, combined_probability); |
bfdade77 | 830 | |
134d3a2e JH |
831 | /* Save the prediction into CFG in case we are seeing non-degenerated |
832 | conditional jump. */ | |
c5cbcccf | 833 | if (!single_succ_p (bb)) |
134d3a2e JH |
834 | { |
835 | BRANCH_EDGE (bb)->probability = combined_probability; | |
bfdade77 RK |
836 | FALLTHRU_EDGE (bb)->probability |
837 | = REG_BR_PROB_BASE - combined_probability; | |
134d3a2e | 838 | } |
4db384c9 | 839 | } |
c5cbcccf | 840 | else if (!single_succ_p (bb)) |
e53de54d | 841 | { |
e5af9ddd | 842 | int prob = XINT (prob_note, 0); |
e53de54d JH |
843 | |
844 | BRANCH_EDGE (bb)->probability = prob; | |
845 | FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob; | |
846 | } | |
847 | else | |
c5cbcccf | 848 | single_succ_edge (bb)->probability = REG_BR_PROB_BASE; |
ee92cb46 JH |
849 | } |
850 | ||
6de9cd9a DN |
851 | /* Combine predictions into single probability and store them into CFG. |
852 | Remove now useless prediction entries. */ | |
f1ebdfc5 | 853 | |
6de9cd9a | 854 | static void |
10d22567 | 855 | combine_predictions_for_bb (basic_block bb) |
f1ebdfc5 | 856 | { |
6de9cd9a | 857 | int best_probability = PROB_EVEN; |
bbbbb16a | 858 | enum br_predictor best_predictor = END_PREDICTORS; |
6de9cd9a DN |
859 | int combined_probability = REG_BR_PROB_BASE / 2; |
860 | int d; | |
861 | bool first_match = false; | |
862 | bool found = false; | |
863 | struct edge_prediction *pred; | |
864 | int nedges = 0; | |
865 | edge e, first = NULL, second = NULL; | |
628f6a4e | 866 | edge_iterator ei; |
f1ebdfc5 | 867 | |
628f6a4e | 868 | FOR_EACH_EDGE (e, ei, bb->succs) |
6de9cd9a DN |
869 | if (!(e->flags & (EDGE_EH | EDGE_FAKE))) |
870 | { | |
628f6a4e | 871 | nedges ++; |
6de9cd9a DN |
872 | if (first && !second) |
873 | second = e; | |
874 | if (!first) | |
875 | first = e; | |
876 | } | |
877 | ||
b8698a0f | 878 | /* When there is no successor or only one choice, prediction is easy. |
6de9cd9a DN |
879 | |
880 | We are lazy for now and predict only basic blocks with two outgoing | |
881 | edges. It is possible to predict generic case too, but we have to | |
882 | ignore first match heuristics and do more involved combining. Implement | |
883 | this later. */ | |
884 | if (nedges != 2) | |
885 | { | |
87022a6b JH |
886 | if (!bb->count) |
887 | set_even_probabilities (bb); | |
f06b0a10 | 888 | clear_bb_predictions (bb); |
10d22567 ZD |
889 | if (dump_file) |
890 | fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n", | |
6de9cd9a DN |
891 | nedges, bb->index); |
892 | return; | |
893 | } | |
894 | ||
10d22567 ZD |
895 | if (dump_file) |
896 | fprintf (dump_file, "Predictions for bb %i\n", bb->index); | |
6de9cd9a | 897 | |
b787e7a2 | 898 | edge_prediction **preds = bb_predictions->get (bb); |
f06b0a10 | 899 | if (preds) |
6de9cd9a | 900 | { |
f06b0a10 ZD |
901 | /* We implement "first match" heuristics and use probability guessed |
902 | by predictor with smallest index. */ | |
b787e7a2 | 903 | for (pred = *preds; pred; pred = pred->ep_next) |
f06b0a10 | 904 | { |
bbbbb16a | 905 | enum br_predictor predictor = pred->ep_predictor; |
f06b0a10 | 906 | int probability = pred->ep_probability; |
6de9cd9a | 907 | |
f06b0a10 ZD |
908 | if (pred->ep_edge != first) |
909 | probability = REG_BR_PROB_BASE - probability; | |
6de9cd9a | 910 | |
f06b0a10 | 911 | found = true; |
c0ee0021 JH |
912 | /* First match heuristics would be widly confused if we predicted |
913 | both directions. */ | |
f06b0a10 | 914 | if (best_predictor > predictor) |
c0ee0021 JH |
915 | { |
916 | struct edge_prediction *pred2; | |
917 | int prob = probability; | |
918 | ||
ed9c79e1 JJ |
919 | for (pred2 = (struct edge_prediction *) *preds; |
920 | pred2; pred2 = pred2->ep_next) | |
c0ee0021 JH |
921 | if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor) |
922 | { | |
923 | int probability2 = pred->ep_probability; | |
924 | ||
925 | if (pred2->ep_edge != first) | |
926 | probability2 = REG_BR_PROB_BASE - probability2; | |
927 | ||
b8698a0f | 928 | if ((probability < REG_BR_PROB_BASE / 2) != |
c0ee0021 JH |
929 | (probability2 < REG_BR_PROB_BASE / 2)) |
930 | break; | |
931 | ||
932 | /* If the same predictor later gave better result, go for it! */ | |
933 | if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability)) | |
934 | || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability))) | |
935 | prob = probability2; | |
936 | } | |
937 | if (!pred2) | |
938 | best_probability = prob, best_predictor = predictor; | |
939 | } | |
6de9cd9a | 940 | |
f06b0a10 ZD |
941 | d = (combined_probability * probability |
942 | + (REG_BR_PROB_BASE - combined_probability) | |
943 | * (REG_BR_PROB_BASE - probability)); | |
6de9cd9a | 944 | |
f06b0a10 ZD |
945 | /* Use FP math to avoid overflows of 32bit integers. */ |
946 | if (d == 0) | |
947 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
948 | combined_probability = REG_BR_PROB_BASE / 2; | |
949 | else | |
950 | combined_probability = (((double) combined_probability) | |
951 | * probability | |
952 | * REG_BR_PROB_BASE / d + 0.5); | |
953 | } | |
6de9cd9a DN |
954 | } |
955 | ||
956 | /* Decide which heuristic to use. In case we didn't match anything, | |
957 | use no_prediction heuristic, in case we did match, use either | |
958 | first match or Dempster-Shaffer theory depending on the flags. */ | |
959 | ||
960 | if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH) | |
961 | first_match = true; | |
962 | ||
963 | if (!found) | |
10d22567 | 964 | dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true); |
6de9cd9a DN |
965 | else |
966 | { | |
10d22567 | 967 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb, |
6de9cd9a | 968 | !first_match); |
10d22567 | 969 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb, |
6de9cd9a DN |
970 | first_match); |
971 | } | |
972 | ||
973 | if (first_match) | |
974 | combined_probability = best_probability; | |
10d22567 | 975 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true); |
6de9cd9a | 976 | |
f06b0a10 | 977 | if (preds) |
6de9cd9a | 978 | { |
d3bfe4de | 979 | for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next) |
f06b0a10 | 980 | { |
bbbbb16a | 981 | enum br_predictor predictor = pred->ep_predictor; |
f06b0a10 | 982 | int probability = pred->ep_probability; |
6de9cd9a | 983 | |
f06b0a10 ZD |
984 | if (pred->ep_edge != EDGE_SUCC (bb, 0)) |
985 | probability = REG_BR_PROB_BASE - probability; | |
986 | dump_prediction (dump_file, predictor, probability, bb, | |
987 | !first_match || best_predictor == predictor); | |
988 | } | |
6de9cd9a | 989 | } |
f06b0a10 | 990 | clear_bb_predictions (bb); |
6de9cd9a | 991 | |
87022a6b JH |
992 | if (!bb->count) |
993 | { | |
994 | first->probability = combined_probability; | |
995 | second->probability = REG_BR_PROB_BASE - combined_probability; | |
996 | } | |
6de9cd9a DN |
997 | } |
998 | ||
9c04723a DC |
999 | /* Check if T1 and T2 satisfy the IV_COMPARE condition. |
1000 | Return the SSA_NAME if the condition satisfies, NULL otherwise. | |
1001 | ||
1002 | T1 and T2 should be one of the following cases: | |
1003 | 1. T1 is SSA_NAME, T2 is NULL | |
1004 | 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4] | |
1005 | 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */ | |
1006 | ||
1007 | static tree | |
1008 | strips_small_constant (tree t1, tree t2) | |
1009 | { | |
1010 | tree ret = NULL; | |
1011 | int value = 0; | |
1012 | ||
1013 | if (!t1) | |
1014 | return NULL; | |
1015 | else if (TREE_CODE (t1) == SSA_NAME) | |
1016 | ret = t1; | |
9541ffee | 1017 | else if (tree_fits_shwi_p (t1)) |
9439e9a1 | 1018 | value = tree_to_shwi (t1); |
9c04723a DC |
1019 | else |
1020 | return NULL; | |
1021 | ||
1022 | if (!t2) | |
1023 | return ret; | |
9541ffee | 1024 | else if (tree_fits_shwi_p (t2)) |
9439e9a1 | 1025 | value = tree_to_shwi (t2); |
9c04723a DC |
1026 | else if (TREE_CODE (t2) == SSA_NAME) |
1027 | { | |
1028 | if (ret) | |
1029 | return NULL; | |
1030 | else | |
1031 | ret = t2; | |
1032 | } | |
1033 | ||
1034 | if (value <= 4 && value >= -4) | |
1035 | return ret; | |
1036 | else | |
1037 | return NULL; | |
1038 | } | |
1039 | ||
1040 | /* Return the SSA_NAME in T or T's operands. | |
1041 | Return NULL if SSA_NAME cannot be found. */ | |
1042 | ||
1043 | static tree | |
1044 | get_base_value (tree t) | |
1045 | { | |
1046 | if (TREE_CODE (t) == SSA_NAME) | |
1047 | return t; | |
1048 | ||
1049 | if (!BINARY_CLASS_P (t)) | |
1050 | return NULL; | |
1051 | ||
1052 | switch (TREE_OPERAND_LENGTH (t)) | |
1053 | { | |
1054 | case 1: | |
1055 | return strips_small_constant (TREE_OPERAND (t, 0), NULL); | |
1056 | case 2: | |
1057 | return strips_small_constant (TREE_OPERAND (t, 0), | |
1058 | TREE_OPERAND (t, 1)); | |
1059 | default: | |
1060 | return NULL; | |
1061 | } | |
1062 | } | |
1063 | ||
1064 | /* Check the compare STMT in LOOP. If it compares an induction | |
1065 | variable to a loop invariant, return true, and save | |
1066 | LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP. | |
1067 | Otherwise return false and set LOOP_INVAIANT to NULL. */ | |
1068 | ||
1069 | static bool | |
1070 | is_comparison_with_loop_invariant_p (gimple stmt, struct loop *loop, | |
1071 | tree *loop_invariant, | |
1072 | enum tree_code *compare_code, | |
ecd4f20a | 1073 | tree *loop_step, |
9c04723a DC |
1074 | tree *loop_iv_base) |
1075 | { | |
1076 | tree op0, op1, bound, base; | |
1077 | affine_iv iv0, iv1; | |
1078 | enum tree_code code; | |
ecd4f20a | 1079 | tree step; |
9c04723a DC |
1080 | |
1081 | code = gimple_cond_code (stmt); | |
1082 | *loop_invariant = NULL; | |
1083 | ||
1084 | switch (code) | |
1085 | { | |
1086 | case GT_EXPR: | |
1087 | case GE_EXPR: | |
1088 | case NE_EXPR: | |
1089 | case LT_EXPR: | |
1090 | case LE_EXPR: | |
1091 | case EQ_EXPR: | |
1092 | break; | |
1093 | ||
1094 | default: | |
1095 | return false; | |
1096 | } | |
1097 | ||
1098 | op0 = gimple_cond_lhs (stmt); | |
1099 | op1 = gimple_cond_rhs (stmt); | |
1100 | ||
1101 | if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST) | |
1102 | || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST)) | |
1103 | return false; | |
1104 | if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true)) | |
1105 | return false; | |
1106 | if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true)) | |
1107 | return false; | |
1108 | if (TREE_CODE (iv0.step) != INTEGER_CST | |
1109 | || TREE_CODE (iv1.step) != INTEGER_CST) | |
1110 | return false; | |
1111 | if ((integer_zerop (iv0.step) && integer_zerop (iv1.step)) | |
1112 | || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step))) | |
1113 | return false; | |
1114 | ||
1115 | if (integer_zerop (iv0.step)) | |
1116 | { | |
1117 | if (code != NE_EXPR && code != EQ_EXPR) | |
1118 | code = invert_tree_comparison (code, false); | |
1119 | bound = iv0.base; | |
1120 | base = iv1.base; | |
9541ffee | 1121 | if (tree_fits_shwi_p (iv1.step)) |
ecd4f20a | 1122 | step = iv1.step; |
9c04723a DC |
1123 | else |
1124 | return false; | |
1125 | } | |
1126 | else | |
1127 | { | |
1128 | bound = iv1.base; | |
1129 | base = iv0.base; | |
9541ffee | 1130 | if (tree_fits_shwi_p (iv0.step)) |
ecd4f20a | 1131 | step = iv0.step; |
9c04723a DC |
1132 | else |
1133 | return false; | |
1134 | } | |
1135 | ||
1136 | if (TREE_CODE (bound) != INTEGER_CST) | |
1137 | bound = get_base_value (bound); | |
1138 | if (!bound) | |
1139 | return false; | |
1140 | if (TREE_CODE (base) != INTEGER_CST) | |
1141 | base = get_base_value (base); | |
1142 | if (!base) | |
1143 | return false; | |
1144 | ||
1145 | *loop_invariant = bound; | |
1146 | *compare_code = code; | |
1147 | *loop_step = step; | |
1148 | *loop_iv_base = base; | |
1149 | return true; | |
1150 | } | |
1151 | ||
1152 | /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */ | |
1153 | ||
1154 | static bool | |
1155 | expr_coherent_p (tree t1, tree t2) | |
1156 | { | |
1157 | gimple stmt; | |
1158 | tree ssa_name_1 = NULL; | |
1159 | tree ssa_name_2 = NULL; | |
1160 | ||
1161 | gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST); | |
1162 | gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST); | |
1163 | ||
1164 | if (t1 == t2) | |
1165 | return true; | |
1166 | ||
1167 | if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST) | |
1168 | return true; | |
1169 | if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST) | |
1170 | return false; | |
1171 | ||
1172 | /* Check to see if t1 is expressed/defined with t2. */ | |
1173 | stmt = SSA_NAME_DEF_STMT (t1); | |
1174 | gcc_assert (stmt != NULL); | |
1175 | if (is_gimple_assign (stmt)) | |
1176 | { | |
1177 | ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1178 | if (ssa_name_1 && ssa_name_1 == t2) | |
1179 | return true; | |
1180 | } | |
1181 | ||
1182 | /* Check to see if t2 is expressed/defined with t1. */ | |
1183 | stmt = SSA_NAME_DEF_STMT (t2); | |
1184 | gcc_assert (stmt != NULL); | |
1185 | if (is_gimple_assign (stmt)) | |
1186 | { | |
1187 | ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1188 | if (ssa_name_2 && ssa_name_2 == t1) | |
1189 | return true; | |
1190 | } | |
1191 | ||
1192 | /* Compare if t1 and t2's def_stmts are identical. */ | |
1193 | if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2) | |
1194 | return true; | |
1195 | else | |
1196 | return false; | |
1197 | } | |
1198 | ||
1199 | /* Predict branch probability of BB when BB contains a branch that compares | |
1200 | an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The | |
1201 | loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP. | |
1202 | ||
1203 | E.g. | |
1204 | for (int i = 0; i < bound; i++) { | |
1205 | if (i < bound - 2) | |
1206 | computation_1(); | |
1207 | else | |
1208 | computation_2(); | |
1209 | } | |
1210 | ||
1211 | In this loop, we will predict the branch inside the loop to be taken. */ | |
1212 | ||
1213 | static void | |
1214 | predict_iv_comparison (struct loop *loop, basic_block bb, | |
1215 | tree loop_bound_var, | |
1216 | tree loop_iv_base_var, | |
1217 | enum tree_code loop_bound_code, | |
1218 | int loop_bound_step) | |
1219 | { | |
1220 | gimple stmt; | |
1221 | tree compare_var, compare_base; | |
1222 | enum tree_code compare_code; | |
ecd4f20a | 1223 | tree compare_step_var; |
9c04723a DC |
1224 | edge then_edge; |
1225 | edge_iterator ei; | |
1226 | ||
1227 | if (predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED) | |
1228 | || predicted_by_p (bb, PRED_LOOP_ITERATIONS) | |
1229 | || predicted_by_p (bb, PRED_LOOP_EXIT)) | |
1230 | return; | |
1231 | ||
1232 | stmt = last_stmt (bb); | |
1233 | if (!stmt || gimple_code (stmt) != GIMPLE_COND) | |
1234 | return; | |
1235 | if (!is_comparison_with_loop_invariant_p (stmt, loop, &compare_var, | |
1236 | &compare_code, | |
ecd4f20a | 1237 | &compare_step_var, |
9c04723a DC |
1238 | &compare_base)) |
1239 | return; | |
1240 | ||
1241 | /* Find the taken edge. */ | |
1242 | FOR_EACH_EDGE (then_edge, ei, bb->succs) | |
1243 | if (then_edge->flags & EDGE_TRUE_VALUE) | |
1244 | break; | |
1245 | ||
1246 | /* When comparing an IV to a loop invariant, NE is more likely to be | |
1247 | taken while EQ is more likely to be not-taken. */ | |
1248 | if (compare_code == NE_EXPR) | |
1249 | { | |
1250 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1251 | return; | |
1252 | } | |
1253 | else if (compare_code == EQ_EXPR) | |
1254 | { | |
1255 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1256 | return; | |
1257 | } | |
1258 | ||
1259 | if (!expr_coherent_p (loop_iv_base_var, compare_base)) | |
1260 | return; | |
1261 | ||
1262 | /* If loop bound, base and compare bound are all constants, we can | |
1263 | calculate the probability directly. */ | |
9541ffee RS |
1264 | if (tree_fits_shwi_p (loop_bound_var) |
1265 | && tree_fits_shwi_p (compare_var) | |
1266 | && tree_fits_shwi_p (compare_base)) | |
9c04723a DC |
1267 | { |
1268 | int probability; | |
807e902e KZ |
1269 | bool overflow, overall_overflow = false; |
1270 | widest_int compare_count, tem; | |
ecd4f20a MP |
1271 | |
1272 | /* (loop_bound - base) / compare_step */ | |
807e902e KZ |
1273 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1274 | wi::to_widest (compare_base), SIGNED, &overflow); | |
1275 | overall_overflow |= overflow; | |
1276 | widest_int loop_count = wi::div_trunc (tem, | |
1277 | wi::to_widest (compare_step_var), | |
1278 | SIGNED, &overflow); | |
1279 | overall_overflow |= overflow; | |
1280 | ||
1281 | if (!wi::neg_p (wi::to_widest (compare_step_var)) | |
9c04723a | 1282 | ^ (compare_code == LT_EXPR || compare_code == LE_EXPR)) |
ecd4f20a MP |
1283 | { |
1284 | /* (loop_bound - compare_bound) / compare_step */ | |
807e902e KZ |
1285 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1286 | wi::to_widest (compare_var), SIGNED, &overflow); | |
1287 | overall_overflow |= overflow; | |
1288 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), | |
1289 | SIGNED, &overflow); | |
1290 | overall_overflow |= overflow; | |
ecd4f20a | 1291 | } |
9c04723a | 1292 | else |
ecd4f20a MP |
1293 | { |
1294 | /* (compare_bound - base) / compare_step */ | |
807e902e KZ |
1295 | tem = wi::sub (wi::to_widest (compare_var), |
1296 | wi::to_widest (compare_base), SIGNED, &overflow); | |
1297 | overall_overflow |= overflow; | |
1298 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), | |
1299 | SIGNED, &overflow); | |
1300 | overall_overflow |= overflow; | |
ecd4f20a | 1301 | } |
9c04723a | 1302 | if (compare_code == LE_EXPR || compare_code == GE_EXPR) |
ecd4f20a | 1303 | ++compare_count; |
9c04723a | 1304 | if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR) |
ecd4f20a | 1305 | ++loop_count; |
807e902e KZ |
1306 | if (wi::neg_p (compare_count)) |
1307 | compare_count = 0; | |
1308 | if (wi::neg_p (loop_count)) | |
1309 | loop_count = 0; | |
1310 | if (loop_count == 0) | |
9c04723a | 1311 | probability = 0; |
807e902e | 1312 | else if (wi::cmps (compare_count, loop_count) == 1) |
9c04723a DC |
1313 | probability = REG_BR_PROB_BASE; |
1314 | else | |
ecd4f20a | 1315 | { |
807e902e KZ |
1316 | tem = compare_count * REG_BR_PROB_BASE; |
1317 | tem = wi::udiv_trunc (tem, loop_count); | |
ecd4f20a MP |
1318 | probability = tem.to_uhwi (); |
1319 | } | |
1320 | ||
807e902e | 1321 | if (!overall_overflow) |
ecd4f20a MP |
1322 | predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability); |
1323 | ||
9c04723a DC |
1324 | return; |
1325 | } | |
1326 | ||
1327 | if (expr_coherent_p (loop_bound_var, compare_var)) | |
1328 | { | |
1329 | if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR) | |
1330 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1331 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1332 | else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR) | |
1333 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1334 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1335 | else if (loop_bound_code == NE_EXPR) | |
1336 | { | |
1337 | /* If the loop backedge condition is "(i != bound)", we do | |
1338 | the comparison based on the step of IV: | |
1339 | * step < 0 : backedge condition is like (i > bound) | |
1340 | * step > 0 : backedge condition is like (i < bound) */ | |
1341 | gcc_assert (loop_bound_step != 0); | |
1342 | if (loop_bound_step > 0 | |
1343 | && (compare_code == LT_EXPR | |
1344 | || compare_code == LE_EXPR)) | |
1345 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1346 | else if (loop_bound_step < 0 | |
1347 | && (compare_code == GT_EXPR | |
1348 | || compare_code == GE_EXPR)) | |
1349 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1350 | else | |
1351 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1352 | } | |
1353 | else | |
1354 | /* The branch is predicted not-taken if loop_bound_code is | |
1355 | opposite with compare_code. */ | |
1356 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1357 | } | |
1358 | else if (expr_coherent_p (loop_iv_base_var, compare_var)) | |
1359 | { | |
1360 | /* For cases like: | |
1361 | for (i = s; i < h; i++) | |
1362 | if (i > s + 2) .... | |
1363 | The branch should be predicted taken. */ | |
1364 | if (loop_bound_step > 0 | |
1365 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1366 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1367 | else if (loop_bound_step < 0 | |
1368 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1369 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1370 | else | |
1371 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1372 | } | |
1373 | } | |
16fdb75f DC |
1374 | |
1375 | /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop | |
1376 | exits are resulted from short-circuit conditions that will generate an | |
1377 | if_tmp. E.g.: | |
1378 | ||
1379 | if (foo() || global > 10) | |
1380 | break; | |
1381 | ||
1382 | This will be translated into: | |
1383 | ||
1384 | BB3: | |
1385 | loop header... | |
1386 | BB4: | |
1387 | if foo() goto BB6 else goto BB5 | |
1388 | BB5: | |
1389 | if global > 10 goto BB6 else goto BB7 | |
1390 | BB6: | |
1391 | goto BB7 | |
1392 | BB7: | |
1393 | iftmp = (PHI 0(BB5), 1(BB6)) | |
1394 | if iftmp == 1 goto BB8 else goto BB3 | |
1395 | BB8: | |
1396 | outside of the loop... | |
1397 | ||
1398 | The edge BB7->BB8 is loop exit because BB8 is outside of the loop. | |
1399 | From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop | |
1400 | exits. This function takes BB7->BB8 as input, and finds out the extra loop | |
1401 | exits to predict them using PRED_LOOP_EXIT. */ | |
1402 | ||
1403 | static void | |
1404 | predict_extra_loop_exits (edge exit_edge) | |
1405 | { | |
1406 | unsigned i; | |
1407 | bool check_value_one; | |
1408 | gimple phi_stmt; | |
1409 | tree cmp_rhs, cmp_lhs; | |
1410 | gimple cmp_stmt = last_stmt (exit_edge->src); | |
1411 | ||
1412 | if (!cmp_stmt || gimple_code (cmp_stmt) != GIMPLE_COND) | |
1413 | return; | |
1414 | cmp_rhs = gimple_cond_rhs (cmp_stmt); | |
1415 | cmp_lhs = gimple_cond_lhs (cmp_stmt); | |
1416 | if (!TREE_CONSTANT (cmp_rhs) | |
1417 | || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs))) | |
1418 | return; | |
1419 | if (TREE_CODE (cmp_lhs) != SSA_NAME) | |
1420 | return; | |
1421 | ||
1422 | /* If check_value_one is true, only the phi_args with value '1' will lead | |
1423 | to loop exit. Otherwise, only the phi_args with value '0' will lead to | |
1424 | loop exit. */ | |
1425 | check_value_one = (((integer_onep (cmp_rhs)) | |
1426 | ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR)) | |
1427 | ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0)); | |
1428 | ||
1429 | phi_stmt = SSA_NAME_DEF_STMT (cmp_lhs); | |
1430 | if (!phi_stmt || gimple_code (phi_stmt) != GIMPLE_PHI) | |
1431 | return; | |
1432 | ||
1433 | for (i = 0; i < gimple_phi_num_args (phi_stmt); i++) | |
1434 | { | |
1435 | edge e1; | |
1436 | edge_iterator ei; | |
1437 | tree val = gimple_phi_arg_def (phi_stmt, i); | |
1438 | edge e = gimple_phi_arg_edge (phi_stmt, i); | |
1439 | ||
1440 | if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val))) | |
1441 | continue; | |
1442 | if ((check_value_one ^ integer_onep (val)) == 1) | |
1443 | continue; | |
1444 | if (EDGE_COUNT (e->src->succs) != 1) | |
1445 | { | |
1446 | predict_paths_leading_to_edge (e, PRED_LOOP_EXIT, NOT_TAKEN); | |
1447 | continue; | |
1448 | } | |
1449 | ||
1450 | FOR_EACH_EDGE (e1, ei, e->src->preds) | |
1451 | predict_paths_leading_to_edge (e1, PRED_LOOP_EXIT, NOT_TAKEN); | |
1452 | } | |
1453 | } | |
1454 | ||
d73be268 ZD |
1455 | /* Predict edge probabilities by exploiting loop structure. */ |
1456 | ||
6de9cd9a | 1457 | static void |
d73be268 | 1458 | predict_loops (void) |
6de9cd9a | 1459 | { |
42fd6772 | 1460 | struct loop *loop; |
0b92ff33 | 1461 | |
65169dcf JE |
1462 | /* Try to predict out blocks in a loop that are not part of a |
1463 | natural loop. */ | |
f0bd40b1 | 1464 | FOR_EACH_LOOP (loop, 0) |
f1ebdfc5 | 1465 | { |
2ecfd709 | 1466 | basic_block bb, *bbs; |
ca83d385 | 1467 | unsigned j, n_exits; |
9771b263 | 1468 | vec<edge> exits; |
992c31e6 | 1469 | struct tree_niter_desc niter_desc; |
ca83d385 | 1470 | edge ex; |
9c04723a DC |
1471 | struct nb_iter_bound *nb_iter; |
1472 | enum tree_code loop_bound_code = ERROR_MARK; | |
ecd4f20a | 1473 | tree loop_bound_step = NULL; |
9c04723a DC |
1474 | tree loop_bound_var = NULL; |
1475 | tree loop_iv_base = NULL; | |
1476 | gimple stmt = NULL; | |
f1ebdfc5 | 1477 | |
ca83d385 | 1478 | exits = get_loop_exit_edges (loop); |
9771b263 | 1479 | n_exits = exits.length (); |
f481cd49 JH |
1480 | if (!n_exits) |
1481 | { | |
9771b263 | 1482 | exits.release (); |
f481cd49 JH |
1483 | continue; |
1484 | } | |
0dd0e980 | 1485 | |
9771b263 | 1486 | FOR_EACH_VEC_ELT (exits, j, ex) |
b6acab32 | 1487 | { |
992c31e6 | 1488 | tree niter = NULL; |
4839cb59 ZD |
1489 | HOST_WIDE_INT nitercst; |
1490 | int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS); | |
1491 | int probability; | |
1492 | enum br_predictor predictor; | |
b6acab32 | 1493 | |
16fdb75f DC |
1494 | predict_extra_loop_exits (ex); |
1495 | ||
46deac6c | 1496 | if (number_of_iterations_exit (loop, ex, &niter_desc, false, false)) |
992c31e6 JH |
1497 | niter = niter_desc.niter; |
1498 | if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST) | |
ca83d385 | 1499 | niter = loop_niter_by_eval (loop, ex); |
b6acab32 | 1500 | |
992c31e6 JH |
1501 | if (TREE_CODE (niter) == INTEGER_CST) |
1502 | { | |
cc269bb6 | 1503 | if (tree_fits_uhwi_p (niter) |
2aa579ad JJ |
1504 | && max |
1505 | && compare_tree_int (niter, max - 1) == -1) | |
ae7e9ddd | 1506 | nitercst = tree_to_uhwi (niter) + 1; |
992c31e6 | 1507 | else |
4839cb59 ZD |
1508 | nitercst = max; |
1509 | predictor = PRED_LOOP_ITERATIONS; | |
1510 | } | |
1511 | /* If we have just one exit and we can derive some information about | |
1512 | the number of iterations of the loop from the statements inside | |
1513 | the loop, use it to predict this exit. */ | |
1514 | else if (n_exits == 1) | |
1515 | { | |
652c4c71 | 1516 | nitercst = estimated_stmt_executions_int (loop); |
4839cb59 ZD |
1517 | if (nitercst < 0) |
1518 | continue; | |
1519 | if (nitercst > max) | |
1520 | nitercst = max; | |
b6acab32 | 1521 | |
4839cb59 | 1522 | predictor = PRED_LOOP_ITERATIONS_GUESSED; |
992c31e6 | 1523 | } |
4839cb59 ZD |
1524 | else |
1525 | continue; | |
1526 | ||
2aa579ad JJ |
1527 | /* If the prediction for number of iterations is zero, do not |
1528 | predict the exit edges. */ | |
1529 | if (nitercst == 0) | |
1530 | continue; | |
1531 | ||
4839cb59 ZD |
1532 | probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst); |
1533 | predict_edge (ex, predictor, probability); | |
b6acab32 | 1534 | } |
9771b263 | 1535 | exits.release (); |
3d436d2a | 1536 | |
9c04723a DC |
1537 | /* Find information about loop bound variables. */ |
1538 | for (nb_iter = loop->bounds; nb_iter; | |
1539 | nb_iter = nb_iter->next) | |
1540 | if (nb_iter->stmt | |
1541 | && gimple_code (nb_iter->stmt) == GIMPLE_COND) | |
1542 | { | |
1543 | stmt = nb_iter->stmt; | |
1544 | break; | |
1545 | } | |
1546 | if (!stmt && last_stmt (loop->header) | |
1547 | && gimple_code (last_stmt (loop->header)) == GIMPLE_COND) | |
1548 | stmt = last_stmt (loop->header); | |
1549 | if (stmt) | |
1550 | is_comparison_with_loop_invariant_p (stmt, loop, | |
1551 | &loop_bound_var, | |
1552 | &loop_bound_code, | |
1553 | &loop_bound_step, | |
1554 | &loop_iv_base); | |
1555 | ||
2ecfd709 | 1556 | bbs = get_loop_body (loop); |
6de9cd9a | 1557 | |
2ecfd709 ZD |
1558 | for (j = 0; j < loop->num_nodes; j++) |
1559 | { | |
1560 | int header_found = 0; | |
1561 | edge e; | |
628f6a4e | 1562 | edge_iterator ei; |
2ecfd709 ZD |
1563 | |
1564 | bb = bbs[j]; | |
bfdade77 | 1565 | |
969d70ca JH |
1566 | /* Bypass loop heuristics on continue statement. These |
1567 | statements construct loops via "non-loop" constructs | |
1568 | in the source language and are better to be handled | |
1569 | separately. */ | |
992c31e6 | 1570 | if (predicted_by_p (bb, PRED_CONTINUE)) |
969d70ca JH |
1571 | continue; |
1572 | ||
2ecfd709 ZD |
1573 | /* Loop branch heuristics - predict an edge back to a |
1574 | loop's head as taken. */ | |
9ff3d2de JL |
1575 | if (bb == loop->latch) |
1576 | { | |
1577 | e = find_edge (loop->latch, loop->header); | |
1578 | if (e) | |
1579 | { | |
1580 | header_found = 1; | |
1581 | predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN); | |
1582 | } | |
1583 | } | |
bfdade77 | 1584 | |
2ecfd709 | 1585 | /* Loop exit heuristics - predict an edge exiting the loop if the |
d55d8fc7 | 1586 | conditional has no loop header successors as not taken. */ |
4839cb59 ZD |
1587 | if (!header_found |
1588 | /* If we already used more reliable loop exit predictors, do not | |
1589 | bother with PRED_LOOP_EXIT. */ | |
1590 | && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED) | |
1591 | && !predicted_by_p (bb, PRED_LOOP_ITERATIONS)) | |
2c9e13f3 JH |
1592 | { |
1593 | /* For loop with many exits we don't want to predict all exits | |
1594 | with the pretty large probability, because if all exits are | |
1595 | considered in row, the loop would be predicted to iterate | |
1596 | almost never. The code to divide probability by number of | |
1597 | exits is very rough. It should compute the number of exits | |
1598 | taken in each patch through function (not the overall number | |
1599 | of exits that might be a lot higher for loops with wide switch | |
1600 | statements in them) and compute n-th square root. | |
1601 | ||
1602 | We limit the minimal probability by 2% to avoid | |
1603 | EDGE_PROBABILITY_RELIABLE from trusting the branch prediction | |
1604 | as this was causing regression in perl benchmark containing such | |
1605 | a wide loop. */ | |
b8698a0f | 1606 | |
2c9e13f3 JH |
1607 | int probability = ((REG_BR_PROB_BASE |
1608 | - predictor_info [(int) PRED_LOOP_EXIT].hitrate) | |
1609 | / n_exits); | |
1610 | if (probability < HITRATE (2)) | |
1611 | probability = HITRATE (2); | |
1612 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1613 | if (e->dest->index < NUM_FIXED_BLOCKS | |
1614 | || !flow_bb_inside_loop_p (loop, e->dest)) | |
1615 | predict_edge (e, PRED_LOOP_EXIT, probability); | |
1616 | } | |
9c04723a DC |
1617 | if (loop_bound_var) |
1618 | predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base, | |
1619 | loop_bound_code, | |
9439e9a1 | 1620 | tree_to_shwi (loop_bound_step)); |
2ecfd709 | 1621 | } |
b8698a0f | 1622 | |
e0a21ab9 | 1623 | /* Free basic blocks from get_loop_body. */ |
36579663 | 1624 | free (bbs); |
f1ebdfc5 | 1625 | } |
6de9cd9a DN |
1626 | } |
1627 | ||
87022a6b JH |
1628 | /* Attempt to predict probabilities of BB outgoing edges using local |
1629 | properties. */ | |
1630 | static void | |
1631 | bb_estimate_probability_locally (basic_block bb) | |
1632 | { | |
9f215bf5 | 1633 | rtx_insn *last_insn = BB_END (bb); |
87022a6b JH |
1634 | rtx cond; |
1635 | ||
1636 | if (! can_predict_insn_p (last_insn)) | |
1637 | return; | |
1638 | cond = get_condition (last_insn, NULL, false, false); | |
1639 | if (! cond) | |
1640 | return; | |
1641 | ||
1642 | /* Try "pointer heuristic." | |
1643 | A comparison ptr == 0 is predicted as false. | |
1644 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
1645 | if (COMPARISON_P (cond) | |
1646 | && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0))) | |
1647 | || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1))))) | |
1648 | { | |
1649 | if (GET_CODE (cond) == EQ) | |
1650 | predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN); | |
1651 | else if (GET_CODE (cond) == NE) | |
1652 | predict_insn_def (last_insn, PRED_POINTER, TAKEN); | |
1653 | } | |
1654 | else | |
1655 | ||
1656 | /* Try "opcode heuristic." | |
1657 | EQ tests are usually false and NE tests are usually true. Also, | |
1658 | most quantities are positive, so we can make the appropriate guesses | |
1659 | about signed comparisons against zero. */ | |
1660 | switch (GET_CODE (cond)) | |
1661 | { | |
1662 | case CONST_INT: | |
1663 | /* Unconditional branch. */ | |
1664 | predict_insn_def (last_insn, PRED_UNCONDITIONAL, | |
1665 | cond == const0_rtx ? NOT_TAKEN : TAKEN); | |
1666 | break; | |
1667 | ||
1668 | case EQ: | |
1669 | case UNEQ: | |
1670 | /* Floating point comparisons appears to behave in a very | |
1671 | unpredictable way because of special role of = tests in | |
1672 | FP code. */ | |
1673 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
1674 | ; | |
1675 | /* Comparisons with 0 are often used for booleans and there is | |
1676 | nothing useful to predict about them. */ | |
1677 | else if (XEXP (cond, 1) == const0_rtx | |
1678 | || XEXP (cond, 0) == const0_rtx) | |
1679 | ; | |
1680 | else | |
1681 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN); | |
1682 | break; | |
1683 | ||
1684 | case NE: | |
1685 | case LTGT: | |
1686 | /* Floating point comparisons appears to behave in a very | |
1687 | unpredictable way because of special role of = tests in | |
1688 | FP code. */ | |
1689 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
1690 | ; | |
1691 | /* Comparisons with 0 are often used for booleans and there is | |
1692 | nothing useful to predict about them. */ | |
1693 | else if (XEXP (cond, 1) == const0_rtx | |
1694 | || XEXP (cond, 0) == const0_rtx) | |
1695 | ; | |
1696 | else | |
1697 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN); | |
1698 | break; | |
1699 | ||
1700 | case ORDERED: | |
1701 | predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN); | |
1702 | break; | |
1703 | ||
1704 | case UNORDERED: | |
1705 | predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN); | |
1706 | break; | |
1707 | ||
1708 | case LE: | |
1709 | case LT: | |
1710 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
1711 | || XEXP (cond, 1) == constm1_rtx) | |
1712 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN); | |
1713 | break; | |
1714 | ||
1715 | case GE: | |
1716 | case GT: | |
1717 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
1718 | || XEXP (cond, 1) == constm1_rtx) | |
1719 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN); | |
1720 | break; | |
1721 | ||
1722 | default: | |
1723 | break; | |
1724 | } | |
1725 | } | |
1726 | ||
229031d0 | 1727 | /* Set edge->probability for each successor edge of BB. */ |
87022a6b JH |
1728 | void |
1729 | guess_outgoing_edge_probabilities (basic_block bb) | |
1730 | { | |
1731 | bb_estimate_probability_locally (bb); | |
1732 | combine_predictions_for_insn (BB_END (bb), bb); | |
1733 | } | |
6de9cd9a | 1734 | \f |
ed9c79e1 | 1735 | static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor); |
726a989a RB |
1736 | |
1737 | /* Helper function for expr_expected_value. */ | |
42f97fd2 JH |
1738 | |
1739 | static tree | |
c08472ea | 1740 | expr_expected_value_1 (tree type, tree op0, enum tree_code code, |
ed9c79e1 | 1741 | tree op1, bitmap visited, enum br_predictor *predictor) |
42f97fd2 | 1742 | { |
726a989a RB |
1743 | gimple def; |
1744 | ||
ed9c79e1 JJ |
1745 | if (predictor) |
1746 | *predictor = PRED_UNCONDITIONAL; | |
1747 | ||
726a989a | 1748 | if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) |
42f97fd2 | 1749 | { |
726a989a RB |
1750 | if (TREE_CONSTANT (op0)) |
1751 | return op0; | |
1752 | ||
1753 | if (code != SSA_NAME) | |
1754 | return NULL_TREE; | |
1755 | ||
1756 | def = SSA_NAME_DEF_STMT (op0); | |
42f97fd2 JH |
1757 | |
1758 | /* If we were already here, break the infinite cycle. */ | |
fcaa4ca4 | 1759 | if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0))) |
42f97fd2 | 1760 | return NULL; |
42f97fd2 | 1761 | |
726a989a | 1762 | if (gimple_code (def) == GIMPLE_PHI) |
42f97fd2 JH |
1763 | { |
1764 | /* All the arguments of the PHI node must have the same constant | |
1765 | length. */ | |
726a989a | 1766 | int i, n = gimple_phi_num_args (def); |
42f97fd2 | 1767 | tree val = NULL, new_val; |
6de9cd9a | 1768 | |
726a989a | 1769 | for (i = 0; i < n; i++) |
42f97fd2 JH |
1770 | { |
1771 | tree arg = PHI_ARG_DEF (def, i); | |
ed9c79e1 | 1772 | enum br_predictor predictor2; |
42f97fd2 JH |
1773 | |
1774 | /* If this PHI has itself as an argument, we cannot | |
1775 | determine the string length of this argument. However, | |
1f838355 | 1776 | if we can find an expected constant value for the other |
42f97fd2 JH |
1777 | PHI args then we can still be sure that this is |
1778 | likely a constant. So be optimistic and just | |
1779 | continue with the next argument. */ | |
1780 | if (arg == PHI_RESULT (def)) | |
1781 | continue; | |
1782 | ||
ed9c79e1 JJ |
1783 | new_val = expr_expected_value (arg, visited, &predictor2); |
1784 | ||
1785 | /* It is difficult to combine value predictors. Simply assume | |
1786 | that later predictor is weaker and take its prediction. */ | |
1787 | if (predictor && *predictor < predictor2) | |
1788 | *predictor = predictor2; | |
42f97fd2 JH |
1789 | if (!new_val) |
1790 | return NULL; | |
1791 | if (!val) | |
1792 | val = new_val; | |
1793 | else if (!operand_equal_p (val, new_val, false)) | |
1794 | return NULL; | |
1795 | } | |
1796 | return val; | |
1797 | } | |
726a989a | 1798 | if (is_gimple_assign (def)) |
42f97fd2 | 1799 | { |
726a989a RB |
1800 | if (gimple_assign_lhs (def) != op0) |
1801 | return NULL; | |
42f97fd2 | 1802 | |
726a989a RB |
1803 | return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)), |
1804 | gimple_assign_rhs1 (def), | |
1805 | gimple_assign_rhs_code (def), | |
1806 | gimple_assign_rhs2 (def), | |
ed9c79e1 | 1807 | visited, predictor); |
726a989a RB |
1808 | } |
1809 | ||
1810 | if (is_gimple_call (def)) | |
1811 | { | |
1812 | tree decl = gimple_call_fndecl (def); | |
1813 | if (!decl) | |
ed9c79e1 JJ |
1814 | { |
1815 | if (gimple_call_internal_p (def) | |
1816 | && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT) | |
1817 | { | |
1818 | gcc_assert (gimple_call_num_args (def) == 3); | |
1819 | tree val = gimple_call_arg (def, 0); | |
1820 | if (TREE_CONSTANT (val)) | |
1821 | return val; | |
1822 | if (predictor) | |
1823 | { | |
ed9c79e1 JJ |
1824 | tree val2 = gimple_call_arg (def, 2); |
1825 | gcc_assert (TREE_CODE (val2) == INTEGER_CST | |
1826 | && tree_fits_uhwi_p (val2) | |
1827 | && tree_to_uhwi (val2) < END_PREDICTORS); | |
1828 | *predictor = (enum br_predictor) tree_to_uhwi (val2); | |
1829 | } | |
1830 | return gimple_call_arg (def, 1); | |
1831 | } | |
1832 | return NULL; | |
1833 | } | |
c08472ea RH |
1834 | if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
1835 | switch (DECL_FUNCTION_CODE (decl)) | |
1836 | { | |
1837 | case BUILT_IN_EXPECT: | |
1838 | { | |
1839 | tree val; | |
1840 | if (gimple_call_num_args (def) != 2) | |
1841 | return NULL; | |
1842 | val = gimple_call_arg (def, 0); | |
1843 | if (TREE_CONSTANT (val)) | |
1844 | return val; | |
ed9c79e1 JJ |
1845 | if (predictor) |
1846 | *predictor = PRED_BUILTIN_EXPECT; | |
c08472ea RH |
1847 | return gimple_call_arg (def, 1); |
1848 | } | |
726a989a | 1849 | |
c08472ea RH |
1850 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N: |
1851 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1: | |
1852 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2: | |
1853 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4: | |
1854 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8: | |
1855 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16: | |
1856 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE: | |
1857 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N: | |
1858 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: | |
1859 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: | |
1860 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: | |
1861 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: | |
1862 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: | |
1863 | /* Assume that any given atomic operation has low contention, | |
1864 | and thus the compare-and-swap operation succeeds. */ | |
ed9c79e1 JJ |
1865 | if (predictor) |
1866 | *predictor = PRED_COMPARE_AND_SWAP; | |
c08472ea | 1867 | return boolean_true_node; |
083e891e MP |
1868 | default: |
1869 | break; | |
726a989a | 1870 | } |
42f97fd2 | 1871 | } |
726a989a RB |
1872 | |
1873 | return NULL; | |
42f97fd2 | 1874 | } |
726a989a RB |
1875 | |
1876 | if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) | |
42f97fd2 | 1877 | { |
726a989a | 1878 | tree res; |
ed9c79e1 JJ |
1879 | enum br_predictor predictor2; |
1880 | op0 = expr_expected_value (op0, visited, predictor); | |
42f97fd2 JH |
1881 | if (!op0) |
1882 | return NULL; | |
ed9c79e1 JJ |
1883 | op1 = expr_expected_value (op1, visited, &predictor2); |
1884 | if (predictor && *predictor < predictor2) | |
1885 | *predictor = predictor2; | |
42f97fd2 JH |
1886 | if (!op1) |
1887 | return NULL; | |
726a989a | 1888 | res = fold_build2 (code, type, op0, op1); |
42f97fd2 JH |
1889 | if (TREE_CONSTANT (res)) |
1890 | return res; | |
1891 | return NULL; | |
1892 | } | |
726a989a | 1893 | if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) |
42f97fd2 | 1894 | { |
726a989a | 1895 | tree res; |
ed9c79e1 | 1896 | op0 = expr_expected_value (op0, visited, predictor); |
42f97fd2 JH |
1897 | if (!op0) |
1898 | return NULL; | |
726a989a | 1899 | res = fold_build1 (code, type, op0); |
42f97fd2 JH |
1900 | if (TREE_CONSTANT (res)) |
1901 | return res; | |
1902 | return NULL; | |
1903 | } | |
1904 | return NULL; | |
1905 | } | |
726a989a | 1906 | |
b8698a0f | 1907 | /* Return constant EXPR will likely have at execution time, NULL if unknown. |
726a989a RB |
1908 | The function is used by builtin_expect branch predictor so the evidence |
1909 | must come from this construct and additional possible constant folding. | |
b8698a0f | 1910 | |
726a989a RB |
1911 | We may want to implement more involved value guess (such as value range |
1912 | propagation based prediction), but such tricks shall go to new | |
1913 | implementation. */ | |
1914 | ||
1915 | static tree | |
ed9c79e1 JJ |
1916 | expr_expected_value (tree expr, bitmap visited, |
1917 | enum br_predictor *predictor) | |
726a989a RB |
1918 | { |
1919 | enum tree_code code; | |
1920 | tree op0, op1; | |
1921 | ||
1922 | if (TREE_CONSTANT (expr)) | |
ed9c79e1 JJ |
1923 | { |
1924 | if (predictor) | |
1925 | *predictor = PRED_UNCONDITIONAL; | |
1926 | return expr; | |
1927 | } | |
726a989a RB |
1928 | |
1929 | extract_ops_from_tree (expr, &code, &op0, &op1); | |
1930 | return expr_expected_value_1 (TREE_TYPE (expr), | |
ed9c79e1 | 1931 | op0, code, op1, visited, predictor); |
726a989a | 1932 | } |
42f97fd2 | 1933 | \f |
6de9cd9a DN |
1934 | /* Predict using opcode of the last statement in basic block. */ |
1935 | static void | |
1936 | tree_predict_by_opcode (basic_block bb) | |
1937 | { | |
726a989a | 1938 | gimple stmt = last_stmt (bb); |
6de9cd9a | 1939 | edge then_edge; |
726a989a | 1940 | tree op0, op1; |
6de9cd9a | 1941 | tree type; |
42f97fd2 | 1942 | tree val; |
726a989a | 1943 | enum tree_code cmp; |
42f97fd2 | 1944 | bitmap visited; |
628f6a4e | 1945 | edge_iterator ei; |
ed9c79e1 | 1946 | enum br_predictor predictor; |
6de9cd9a | 1947 | |
726a989a | 1948 | if (!stmt || gimple_code (stmt) != GIMPLE_COND) |
6de9cd9a | 1949 | return; |
628f6a4e | 1950 | FOR_EACH_EDGE (then_edge, ei, bb->succs) |
6de9cd9a | 1951 | if (then_edge->flags & EDGE_TRUE_VALUE) |
628f6a4e | 1952 | break; |
726a989a RB |
1953 | op0 = gimple_cond_lhs (stmt); |
1954 | op1 = gimple_cond_rhs (stmt); | |
1955 | cmp = gimple_cond_code (stmt); | |
6de9cd9a | 1956 | type = TREE_TYPE (op0); |
8bdbfff5 | 1957 | visited = BITMAP_ALLOC (NULL); |
ed9c79e1 JJ |
1958 | val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited, |
1959 | &predictor); | |
8bdbfff5 | 1960 | BITMAP_FREE (visited); |
ed9c79e1 | 1961 | if (val && TREE_CODE (val) == INTEGER_CST) |
42f97fd2 | 1962 | { |
ed9c79e1 JJ |
1963 | if (predictor == PRED_BUILTIN_EXPECT) |
1964 | { | |
1965 | int percent = PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY); | |
942df739 | 1966 | |
ed9c79e1 JJ |
1967 | gcc_assert (percent >= 0 && percent <= 100); |
1968 | if (integer_zerop (val)) | |
1969 | percent = 100 - percent; | |
1970 | predict_edge (then_edge, PRED_BUILTIN_EXPECT, HITRATE (percent)); | |
1971 | } | |
1972 | else | |
1973 | predict_edge (then_edge, predictor, | |
1974 | integer_zerop (val) ? NOT_TAKEN : TAKEN); | |
42f97fd2 | 1975 | } |
6de9cd9a DN |
1976 | /* Try "pointer heuristic." |
1977 | A comparison ptr == 0 is predicted as false. | |
1978 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
1979 | if (POINTER_TYPE_P (type)) | |
1980 | { | |
726a989a | 1981 | if (cmp == EQ_EXPR) |
6de9cd9a | 1982 | predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN); |
726a989a | 1983 | else if (cmp == NE_EXPR) |
6de9cd9a DN |
1984 | predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN); |
1985 | } | |
1986 | else | |
1987 | ||
1988 | /* Try "opcode heuristic." | |
1989 | EQ tests are usually false and NE tests are usually true. Also, | |
1990 | most quantities are positive, so we can make the appropriate guesses | |
1991 | about signed comparisons against zero. */ | |
726a989a | 1992 | switch (cmp) |
6de9cd9a DN |
1993 | { |
1994 | case EQ_EXPR: | |
1995 | case UNEQ_EXPR: | |
1996 | /* Floating point comparisons appears to behave in a very | |
1997 | unpredictable way because of special role of = tests in | |
1998 | FP code. */ | |
1999 | if (FLOAT_TYPE_P (type)) | |
2000 | ; | |
2001 | /* Comparisons with 0 are often used for booleans and there is | |
2002 | nothing useful to predict about them. */ | |
726a989a | 2003 | else if (integer_zerop (op0) || integer_zerop (op1)) |
6de9cd9a DN |
2004 | ; |
2005 | else | |
2006 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN); | |
2007 | break; | |
2008 | ||
2009 | case NE_EXPR: | |
d1a7edaf | 2010 | case LTGT_EXPR: |
6de9cd9a DN |
2011 | /* Floating point comparisons appears to behave in a very |
2012 | unpredictable way because of special role of = tests in | |
2013 | FP code. */ | |
2014 | if (FLOAT_TYPE_P (type)) | |
2015 | ; | |
2016 | /* Comparisons with 0 are often used for booleans and there is | |
2017 | nothing useful to predict about them. */ | |
2018 | else if (integer_zerop (op0) | |
726a989a | 2019 | || integer_zerop (op1)) |
6de9cd9a DN |
2020 | ; |
2021 | else | |
2022 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN); | |
2023 | break; | |
2024 | ||
2025 | case ORDERED_EXPR: | |
2026 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN); | |
2027 | break; | |
2028 | ||
2029 | case UNORDERED_EXPR: | |
2030 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN); | |
2031 | break; | |
2032 | ||
2033 | case LE_EXPR: | |
2034 | case LT_EXPR: | |
726a989a RB |
2035 | if (integer_zerop (op1) |
2036 | || integer_onep (op1) | |
2037 | || integer_all_onesp (op1) | |
2038 | || real_zerop (op1) | |
2039 | || real_onep (op1) | |
2040 | || real_minus_onep (op1)) | |
6de9cd9a DN |
2041 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN); |
2042 | break; | |
2043 | ||
2044 | case GE_EXPR: | |
2045 | case GT_EXPR: | |
726a989a RB |
2046 | if (integer_zerop (op1) |
2047 | || integer_onep (op1) | |
2048 | || integer_all_onesp (op1) | |
2049 | || real_zerop (op1) | |
2050 | || real_onep (op1) | |
2051 | || real_minus_onep (op1)) | |
6de9cd9a DN |
2052 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN); |
2053 | break; | |
2054 | ||
2055 | default: | |
2056 | break; | |
2057 | } | |
2058 | } | |
2059 | ||
bb033fd8 | 2060 | /* Try to guess whether the value of return means error code. */ |
726a989a | 2061 | |
bb033fd8 JH |
2062 | static enum br_predictor |
2063 | return_prediction (tree val, enum prediction *prediction) | |
2064 | { | |
2065 | /* VOID. */ | |
2066 | if (!val) | |
2067 | return PRED_NO_PREDICTION; | |
2068 | /* Different heuristics for pointers and scalars. */ | |
2069 | if (POINTER_TYPE_P (TREE_TYPE (val))) | |
2070 | { | |
2071 | /* NULL is usually not returned. */ | |
2072 | if (integer_zerop (val)) | |
2073 | { | |
2074 | *prediction = NOT_TAKEN; | |
2075 | return PRED_NULL_RETURN; | |
2076 | } | |
2077 | } | |
2078 | else if (INTEGRAL_TYPE_P (TREE_TYPE (val))) | |
2079 | { | |
2080 | /* Negative return values are often used to indicate | |
2081 | errors. */ | |
2082 | if (TREE_CODE (val) == INTEGER_CST | |
2083 | && tree_int_cst_sgn (val) < 0) | |
2084 | { | |
2085 | *prediction = NOT_TAKEN; | |
2086 | return PRED_NEGATIVE_RETURN; | |
2087 | } | |
2088 | /* Constant return values seems to be commonly taken. | |
2089 | Zero/one often represent booleans so exclude them from the | |
2090 | heuristics. */ | |
2091 | if (TREE_CONSTANT (val) | |
2092 | && (!integer_zerop (val) && !integer_onep (val))) | |
2093 | { | |
2094 | *prediction = TAKEN; | |
75b6bb62 | 2095 | return PRED_CONST_RETURN; |
bb033fd8 JH |
2096 | } |
2097 | } | |
2098 | return PRED_NO_PREDICTION; | |
2099 | } | |
2100 | ||
2101 | /* Find the basic block with return expression and look up for possible | |
2102 | return value trying to apply RETURN_PREDICTION heuristics. */ | |
2103 | static void | |
3e4b9ad0 | 2104 | apply_return_prediction (void) |
bb033fd8 | 2105 | { |
726a989a | 2106 | gimple return_stmt = NULL; |
bb033fd8 JH |
2107 | tree return_val; |
2108 | edge e; | |
726a989a | 2109 | gimple phi; |
bb033fd8 JH |
2110 | int phi_num_args, i; |
2111 | enum br_predictor pred; | |
2112 | enum prediction direction; | |
628f6a4e | 2113 | edge_iterator ei; |
bb033fd8 | 2114 | |
fefa31b5 | 2115 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
bb033fd8 JH |
2116 | { |
2117 | return_stmt = last_stmt (e->src); | |
8b11009b | 2118 | if (return_stmt |
726a989a | 2119 | && gimple_code (return_stmt) == GIMPLE_RETURN) |
bb033fd8 JH |
2120 | break; |
2121 | } | |
2122 | if (!e) | |
2123 | return; | |
726a989a | 2124 | return_val = gimple_return_retval (return_stmt); |
bb033fd8 JH |
2125 | if (!return_val) |
2126 | return; | |
bb033fd8 JH |
2127 | if (TREE_CODE (return_val) != SSA_NAME |
2128 | || !SSA_NAME_DEF_STMT (return_val) | |
726a989a | 2129 | || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI) |
bb033fd8 | 2130 | return; |
726a989a RB |
2131 | phi = SSA_NAME_DEF_STMT (return_val); |
2132 | phi_num_args = gimple_phi_num_args (phi); | |
bb033fd8 JH |
2133 | pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction); |
2134 | ||
2135 | /* Avoid the degenerate case where all return values form the function | |
2136 | belongs to same category (ie they are all positive constants) | |
2137 | so we can hardly say something about them. */ | |
2138 | for (i = 1; i < phi_num_args; i++) | |
2139 | if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction)) | |
2140 | break; | |
2141 | if (i != phi_num_args) | |
2142 | for (i = 0; i < phi_num_args; i++) | |
2143 | { | |
2144 | pred = return_prediction (PHI_ARG_DEF (phi, i), &direction); | |
2145 | if (pred != PRED_NO_PREDICTION) | |
5210bbc5 JH |
2146 | predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred, |
2147 | direction); | |
bb033fd8 JH |
2148 | } |
2149 | } | |
2150 | ||
2151 | /* Look for basic block that contains unlikely to happen events | |
2152 | (such as noreturn calls) and mark all paths leading to execution | |
2153 | of this basic blocks as unlikely. */ | |
2154 | ||
2155 | static void | |
2156 | tree_bb_level_predictions (void) | |
2157 | { | |
2158 | basic_block bb; | |
c0ee0021 JH |
2159 | bool has_return_edges = false; |
2160 | edge e; | |
2161 | edge_iterator ei; | |
2162 | ||
fefa31b5 | 2163 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
c0ee0021 JH |
2164 | if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH))) |
2165 | { | |
2166 | has_return_edges = true; | |
2167 | break; | |
2168 | } | |
bb033fd8 | 2169 | |
3e4b9ad0 | 2170 | apply_return_prediction (); |
bb033fd8 | 2171 | |
11cd3bed | 2172 | FOR_EACH_BB_FN (bb, cfun) |
bb033fd8 | 2173 | { |
726a989a | 2174 | gimple_stmt_iterator gsi; |
bb033fd8 | 2175 | |
7299cb99 | 2176 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
bb033fd8 | 2177 | { |
726a989a | 2178 | gimple stmt = gsi_stmt (gsi); |
52bf96d2 | 2179 | tree decl; |
daac0317 | 2180 | |
726a989a | 2181 | if (is_gimple_call (stmt)) |
bb033fd8 | 2182 | { |
c0ee0021 JH |
2183 | if ((gimple_call_flags (stmt) & ECF_NORETURN) |
2184 | && has_return_edges) | |
726a989a RB |
2185 | predict_paths_leading_to (bb, PRED_NORETURN, |
2186 | NOT_TAKEN); | |
2187 | decl = gimple_call_fndecl (stmt); | |
2188 | if (decl | |
2189 | && lookup_attribute ("cold", | |
2190 | DECL_ATTRIBUTES (decl))) | |
2191 | predict_paths_leading_to (bb, PRED_COLD_FUNCTION, | |
2192 | NOT_TAKEN); | |
bb033fd8 | 2193 | } |
726a989a RB |
2194 | else if (gimple_code (stmt) == GIMPLE_PREDICT) |
2195 | { | |
2196 | predict_paths_leading_to (bb, gimple_predict_predictor (stmt), | |
2197 | gimple_predict_outcome (stmt)); | |
7299cb99 JH |
2198 | /* Keep GIMPLE_PREDICT around so early inlining will propagate |
2199 | hints to callers. */ | |
726a989a | 2200 | } |
bb033fd8 JH |
2201 | } |
2202 | } | |
bb033fd8 JH |
2203 | } |
2204 | ||
f06b0a10 ZD |
2205 | #ifdef ENABLE_CHECKING |
2206 | ||
b787e7a2 | 2207 | /* Callback for hash_map::traverse, asserts that the pointer map is |
f06b0a10 ZD |
2208 | empty. */ |
2209 | ||
b787e7a2 TS |
2210 | bool |
2211 | assert_is_empty (const_basic_block const &, edge_prediction *const &value, | |
2212 | void *) | |
f06b0a10 | 2213 | { |
b787e7a2 | 2214 | gcc_assert (!value); |
f06b0a10 ZD |
2215 | return false; |
2216 | } | |
2217 | #endif | |
2218 | ||
8e88f9fd SP |
2219 | /* Predict branch probabilities and estimate profile for basic block BB. */ |
2220 | ||
2221 | static void | |
2222 | tree_estimate_probability_bb (basic_block bb) | |
2223 | { | |
2224 | edge e; | |
2225 | edge_iterator ei; | |
2226 | gimple last; | |
2227 | ||
2228 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2229 | { | |
e45abe1f | 2230 | /* Predict edges to user labels with attributes. */ |
fefa31b5 | 2231 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
e45abe1f RH |
2232 | { |
2233 | gimple_stmt_iterator gi; | |
2234 | for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi)) | |
2235 | { | |
2236 | gimple stmt = gsi_stmt (gi); | |
2237 | tree decl; | |
2238 | ||
2239 | if (gimple_code (stmt) != GIMPLE_LABEL) | |
2240 | break; | |
2241 | decl = gimple_label_label (stmt); | |
2242 | if (DECL_ARTIFICIAL (decl)) | |
2243 | continue; | |
2244 | ||
2245 | /* Finally, we have a user-defined label. */ | |
2246 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl))) | |
2247 | predict_edge_def (e, PRED_COLD_LABEL, NOT_TAKEN); | |
2248 | else if (lookup_attribute ("hot", DECL_ATTRIBUTES (decl))) | |
2249 | predict_edge_def (e, PRED_HOT_LABEL, TAKEN); | |
2250 | } | |
2251 | } | |
2252 | ||
8e88f9fd SP |
2253 | /* Predict early returns to be probable, as we've already taken |
2254 | care for error returns and other cases are often used for | |
2255 | fast paths through function. | |
2256 | ||
2257 | Since we've already removed the return statements, we are | |
2258 | looking for CFG like: | |
2259 | ||
2260 | if (conditional) | |
2261 | { | |
2262 | .. | |
2263 | goto return_block | |
2264 | } | |
2265 | some other blocks | |
2266 | return_block: | |
2267 | return_stmt. */ | |
2268 | if (e->dest != bb->next_bb | |
fefa31b5 | 2269 | && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) |
8e88f9fd | 2270 | && single_succ_p (e->dest) |
fefa31b5 | 2271 | && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun) |
8e88f9fd SP |
2272 | && (last = last_stmt (e->dest)) != NULL |
2273 | && gimple_code (last) == GIMPLE_RETURN) | |
2274 | { | |
2275 | edge e1; | |
2276 | edge_iterator ei1; | |
2277 | ||
2278 | if (single_succ_p (bb)) | |
2279 | { | |
2280 | FOR_EACH_EDGE (e1, ei1, bb->preds) | |
2281 | if (!predicted_by_p (e1->src, PRED_NULL_RETURN) | |
2282 | && !predicted_by_p (e1->src, PRED_CONST_RETURN) | |
2283 | && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)) | |
2284 | predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN); | |
2285 | } | |
2286 | else | |
2287 | if (!predicted_by_p (e->src, PRED_NULL_RETURN) | |
2288 | && !predicted_by_p (e->src, PRED_CONST_RETURN) | |
2289 | && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN)) | |
2290 | predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN); | |
2291 | } | |
2292 | ||
2293 | /* Look for block we are guarding (ie we dominate it, | |
2294 | but it doesn't postdominate us). */ | |
fefa31b5 | 2295 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb |
8e88f9fd SP |
2296 | && dominated_by_p (CDI_DOMINATORS, e->dest, e->src) |
2297 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest)) | |
2298 | { | |
2299 | gimple_stmt_iterator bi; | |
2300 | ||
2301 | /* The call heuristic claims that a guarded function call | |
2302 | is improbable. This is because such calls are often used | |
2303 | to signal exceptional situations such as printing error | |
2304 | messages. */ | |
2305 | for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi); | |
2306 | gsi_next (&bi)) | |
2307 | { | |
2308 | gimple stmt = gsi_stmt (bi); | |
2309 | if (is_gimple_call (stmt) | |
2310 | /* Constant and pure calls are hardly used to signalize | |
2311 | something exceptional. */ | |
2312 | && gimple_has_side_effects (stmt)) | |
2313 | { | |
2314 | predict_edge_def (e, PRED_CALL, NOT_TAKEN); | |
2315 | break; | |
2316 | } | |
2317 | } | |
2318 | } | |
2319 | } | |
2320 | tree_predict_by_opcode (bb); | |
2321 | } | |
2322 | ||
2323 | /* Predict branch probabilities and estimate profile of the tree CFG. | |
2324 | This function can be called from the loop optimizers to recompute | |
2325 | the profile information. */ | |
2326 | ||
2327 | void | |
6de9cd9a DN |
2328 | tree_estimate_probability (void) |
2329 | { | |
2330 | basic_block bb; | |
6de9cd9a | 2331 | |
bb033fd8 | 2332 | add_noreturn_fake_exit_edges (); |
6de9cd9a | 2333 | connect_infinite_loops_to_exit (); |
c7b852c8 ZD |
2334 | /* We use loop_niter_by_eval, which requires that the loops have |
2335 | preheaders. */ | |
2336 | create_preheaders (CP_SIMPLE_PREHEADERS); | |
6de9cd9a DN |
2337 | calculate_dominance_info (CDI_POST_DOMINATORS); |
2338 | ||
b787e7a2 | 2339 | bb_predictions = new hash_map<const_basic_block, edge_prediction *>; |
bb033fd8 | 2340 | tree_bb_level_predictions (); |
4839cb59 | 2341 | record_loop_exits (); |
8e88f9fd | 2342 | |
0fc822d0 | 2343 | if (number_of_loops (cfun) > 1) |
d73be268 | 2344 | predict_loops (); |
6de9cd9a | 2345 | |
11cd3bed | 2346 | FOR_EACH_BB_FN (bb, cfun) |
8e88f9fd | 2347 | tree_estimate_probability_bb (bb); |
6de9cd9a | 2348 | |
11cd3bed | 2349 | FOR_EACH_BB_FN (bb, cfun) |
10d22567 | 2350 | combine_predictions_for_bb (bb); |
861f9cd0 | 2351 | |
f06b0a10 | 2352 | #ifdef ENABLE_CHECKING |
b787e7a2 | 2353 | bb_predictions->traverse<void *, assert_is_empty> (NULL); |
f06b0a10 | 2354 | #endif |
b787e7a2 | 2355 | delete bb_predictions; |
f06b0a10 ZD |
2356 | bb_predictions = NULL; |
2357 | ||
67fa7880 | 2358 | estimate_bb_frequencies (false); |
6de9cd9a | 2359 | free_dominance_info (CDI_POST_DOMINATORS); |
6809cbf9 | 2360 | remove_fake_exit_edges (); |
8e88f9fd | 2361 | } |
994a57cd | 2362 | \f |
fa10beec | 2363 | /* Predict edges to successors of CUR whose sources are not postdominated by |
3e4b9ad0 | 2364 | BB by PRED and recurse to all postdominators. */ |
bb033fd8 JH |
2365 | |
2366 | static void | |
3e4b9ad0 JH |
2367 | predict_paths_for_bb (basic_block cur, basic_block bb, |
2368 | enum br_predictor pred, | |
0f3b7e9a JH |
2369 | enum prediction taken, |
2370 | bitmap visited) | |
bb033fd8 JH |
2371 | { |
2372 | edge e; | |
628f6a4e | 2373 | edge_iterator ei; |
3e4b9ad0 | 2374 | basic_block son; |
bb033fd8 | 2375 | |
3e4b9ad0 JH |
2376 | /* We are looking for all edges forming edge cut induced by |
2377 | set of all blocks postdominated by BB. */ | |
2378 | FOR_EACH_EDGE (e, ei, cur->preds) | |
2379 | if (e->src->index >= NUM_FIXED_BLOCKS | |
2380 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb)) | |
bb033fd8 | 2381 | { |
450997ef JH |
2382 | edge e2; |
2383 | edge_iterator ei2; | |
2384 | bool found = false; | |
2385 | ||
5210bbc5 JH |
2386 | /* Ignore fake edges and eh, we predict them as not taken anyway. */ |
2387 | if (e->flags & (EDGE_EH | EDGE_FAKE)) | |
450997ef | 2388 | continue; |
3e4b9ad0 | 2389 | gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb)); |
450997ef | 2390 | |
0f3b7e9a | 2391 | /* See if there is an edge from e->src that is not abnormal |
450997ef JH |
2392 | and does not lead to BB. */ |
2393 | FOR_EACH_EDGE (e2, ei2, e->src->succs) | |
2394 | if (e2 != e | |
5210bbc5 | 2395 | && !(e2->flags & (EDGE_EH | EDGE_FAKE)) |
450997ef JH |
2396 | && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb)) |
2397 | { | |
2398 | found = true; | |
2399 | break; | |
2400 | } | |
2401 | ||
2402 | /* If there is non-abnormal path leaving e->src, predict edge | |
2403 | using predictor. Otherwise we need to look for paths | |
0f3b7e9a JH |
2404 | leading to e->src. |
2405 | ||
2406 | The second may lead to infinite loop in the case we are predicitng | |
2407 | regions that are only reachable by abnormal edges. We simply | |
2408 | prevent visiting given BB twice. */ | |
450997ef JH |
2409 | if (found) |
2410 | predict_edge_def (e, pred, taken); | |
993716bd | 2411 | else if (bitmap_set_bit (visited, e->src->index)) |
0f3b7e9a | 2412 | predict_paths_for_bb (e->src, e->src, pred, taken, visited); |
bb033fd8 | 2413 | } |
3e4b9ad0 JH |
2414 | for (son = first_dom_son (CDI_POST_DOMINATORS, cur); |
2415 | son; | |
2416 | son = next_dom_son (CDI_POST_DOMINATORS, son)) | |
0f3b7e9a | 2417 | predict_paths_for_bb (son, bb, pred, taken, visited); |
3e4b9ad0 | 2418 | } |
bb033fd8 | 2419 | |
3e4b9ad0 JH |
2420 | /* Sets branch probabilities according to PREDiction and |
2421 | FLAGS. */ | |
bb033fd8 | 2422 | |
3e4b9ad0 JH |
2423 | static void |
2424 | predict_paths_leading_to (basic_block bb, enum br_predictor pred, | |
2425 | enum prediction taken) | |
2426 | { | |
0f3b7e9a JH |
2427 | bitmap visited = BITMAP_ALLOC (NULL); |
2428 | predict_paths_for_bb (bb, bb, pred, taken, visited); | |
2429 | BITMAP_FREE (visited); | |
bb033fd8 | 2430 | } |
5210bbc5 JH |
2431 | |
2432 | /* Like predict_paths_leading_to but take edge instead of basic block. */ | |
2433 | ||
2434 | static void | |
2435 | predict_paths_leading_to_edge (edge e, enum br_predictor pred, | |
2436 | enum prediction taken) | |
2437 | { | |
2438 | bool has_nonloop_edge = false; | |
2439 | edge_iterator ei; | |
2440 | edge e2; | |
2441 | ||
2442 | basic_block bb = e->src; | |
2443 | FOR_EACH_EDGE (e2, ei, bb->succs) | |
2444 | if (e2->dest != e->src && e2->dest != e->dest | |
2445 | && !(e->flags & (EDGE_EH | EDGE_FAKE)) | |
2446 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest)) | |
2447 | { | |
2448 | has_nonloop_edge = true; | |
2449 | break; | |
2450 | } | |
2451 | if (!has_nonloop_edge) | |
0f3b7e9a JH |
2452 | { |
2453 | bitmap visited = BITMAP_ALLOC (NULL); | |
2454 | predict_paths_for_bb (bb, bb, pred, taken, visited); | |
2455 | BITMAP_FREE (visited); | |
2456 | } | |
5210bbc5 JH |
2457 | else |
2458 | predict_edge_def (e, pred, taken); | |
2459 | } | |
969d70ca | 2460 | \f |
57cb6d52 | 2461 | /* This is used to carry information about basic blocks. It is |
861f9cd0 JH |
2462 | attached to the AUX field of the standard CFG block. */ |
2463 | ||
11478306 | 2464 | struct block_info |
861f9cd0 JH |
2465 | { |
2466 | /* Estimated frequency of execution of basic_block. */ | |
ac5e69da | 2467 | sreal frequency; |
861f9cd0 JH |
2468 | |
2469 | /* To keep queue of basic blocks to process. */ | |
2470 | basic_block next; | |
2471 | ||
eaec9b3d | 2472 | /* Number of predecessors we need to visit first. */ |
754d9299 | 2473 | int npredecessors; |
11478306 | 2474 | }; |
861f9cd0 JH |
2475 | |
2476 | /* Similar information for edges. */ | |
11478306 | 2477 | struct edge_prob_info |
861f9cd0 | 2478 | { |
569b7f6a | 2479 | /* In case edge is a loopback edge, the probability edge will be reached |
861f9cd0 | 2480 | in case header is. Estimated number of iterations of the loop can be |
8aa18a7d | 2481 | then computed as 1 / (1 - back_edge_prob). */ |
ac5e69da | 2482 | sreal back_edge_prob; |
569b7f6a | 2483 | /* True if the edge is a loopback edge in the natural loop. */ |
2c45a16a | 2484 | unsigned int back_edge:1; |
11478306 | 2485 | }; |
861f9cd0 | 2486 | |
11478306 | 2487 | #define BLOCK_INFO(B) ((block_info *) (B)->aux) |
59f2e9d8 | 2488 | #undef EDGE_INFO |
11478306 | 2489 | #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux) |
861f9cd0 JH |
2490 | |
2491 | /* Helper function for estimate_bb_frequencies. | |
598ec7bd ZD |
2492 | Propagate the frequencies in blocks marked in |
2493 | TOVISIT, starting in HEAD. */ | |
bfdade77 | 2494 | |
861f9cd0 | 2495 | static void |
598ec7bd | 2496 | propagate_freq (basic_block head, bitmap tovisit) |
861f9cd0 | 2497 | { |
e0082a72 ZD |
2498 | basic_block bb; |
2499 | basic_block last; | |
b9af0016 | 2500 | unsigned i; |
861f9cd0 JH |
2501 | edge e; |
2502 | basic_block nextbb; | |
8a998e0c | 2503 | bitmap_iterator bi; |
247a370b | 2504 | |
eaec9b3d | 2505 | /* For each basic block we need to visit count number of his predecessors |
247a370b | 2506 | we need to visit first. */ |
8a998e0c | 2507 | EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi) |
247a370b | 2508 | { |
8a998e0c JL |
2509 | edge_iterator ei; |
2510 | int count = 0; | |
2511 | ||
06e28de2 | 2512 | bb = BASIC_BLOCK_FOR_FN (cfun, i); |
bfdade77 | 2513 | |
8a998e0c JL |
2514 | FOR_EACH_EDGE (e, ei, bb->preds) |
2515 | { | |
2516 | bool visit = bitmap_bit_p (tovisit, e->src->index); | |
2517 | ||
2518 | if (visit && !(e->flags & EDGE_DFS_BACK)) | |
2519 | count++; | |
2520 | else if (visit && dump_file && !EDGE_INFO (e)->back_edge) | |
2521 | fprintf (dump_file, | |
2522 | "Irreducible region hit, ignoring edge to %i->%i\n", | |
2523 | e->src->index, bb->index); | |
247a370b | 2524 | } |
b9af0016 | 2525 | BLOCK_INFO (bb)->npredecessors = count; |
b35366ce | 2526 | /* When function never returns, we will never process exit block. */ |
fefa31b5 | 2527 | if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
b35366ce | 2528 | bb->count = bb->frequency = 0; |
247a370b | 2529 | } |
861f9cd0 | 2530 | |
8aa18a7d | 2531 | memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one)); |
e0082a72 ZD |
2532 | last = head; |
2533 | for (bb = head; bb; bb = nextbb) | |
861f9cd0 | 2534 | { |
628f6a4e | 2535 | edge_iterator ei; |
ac5e69da | 2536 | sreal cyclic_probability, frequency; |
8aa18a7d JH |
2537 | |
2538 | memcpy (&cyclic_probability, &real_zero, sizeof (real_zero)); | |
2539 | memcpy (&frequency, &real_zero, sizeof (real_zero)); | |
861f9cd0 JH |
2540 | |
2541 | nextbb = BLOCK_INFO (bb)->next; | |
2542 | BLOCK_INFO (bb)->next = NULL; | |
2543 | ||
2544 | /* Compute frequency of basic block. */ | |
2545 | if (bb != head) | |
2546 | { | |
247a370b | 2547 | #ifdef ENABLE_CHECKING |
628f6a4e | 2548 | FOR_EACH_EDGE (e, ei, bb->preds) |
e16acfcd NS |
2549 | gcc_assert (!bitmap_bit_p (tovisit, e->src->index) |
2550 | || (e->flags & EDGE_DFS_BACK)); | |
247a370b | 2551 | #endif |
861f9cd0 | 2552 | |
628f6a4e | 2553 | FOR_EACH_EDGE (e, ei, bb->preds) |
861f9cd0 | 2554 | if (EDGE_INFO (e)->back_edge) |
8aa18a7d | 2555 | { |
ac5e69da JZ |
2556 | sreal_add (&cyclic_probability, &cyclic_probability, |
2557 | &EDGE_INFO (e)->back_edge_prob); | |
8aa18a7d | 2558 | } |
247a370b | 2559 | else if (!(e->flags & EDGE_DFS_BACK)) |
8aa18a7d | 2560 | { |
ac5e69da | 2561 | sreal tmp; |
8aa18a7d JH |
2562 | |
2563 | /* frequency += (e->probability | |
2564 | * BLOCK_INFO (e->src)->frequency / | |
2565 | REG_BR_PROB_BASE); */ | |
2566 | ||
ac5e69da JZ |
2567 | sreal_init (&tmp, e->probability, 0); |
2568 | sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency); | |
2569 | sreal_mul (&tmp, &tmp, &real_inv_br_prob_base); | |
2570 | sreal_add (&frequency, &frequency, &tmp); | |
8aa18a7d JH |
2571 | } |
2572 | ||
ac5e69da JZ |
2573 | if (sreal_compare (&cyclic_probability, &real_zero) == 0) |
2574 | { | |
2575 | memcpy (&BLOCK_INFO (bb)->frequency, &frequency, | |
2576 | sizeof (frequency)); | |
2577 | } | |
fbe3b30b SB |
2578 | else |
2579 | { | |
ac5e69da JZ |
2580 | if (sreal_compare (&cyclic_probability, &real_almost_one) > 0) |
2581 | { | |
2582 | memcpy (&cyclic_probability, &real_almost_one, | |
2583 | sizeof (real_almost_one)); | |
2584 | } | |
861f9cd0 | 2585 | |
79a490a9 | 2586 | /* BLOCK_INFO (bb)->frequency = frequency |
ac5e69da | 2587 | / (1 - cyclic_probability) */ |
861f9cd0 | 2588 | |
ac5e69da JZ |
2589 | sreal_sub (&cyclic_probability, &real_one, &cyclic_probability); |
2590 | sreal_div (&BLOCK_INFO (bb)->frequency, | |
2591 | &frequency, &cyclic_probability); | |
fbe3b30b | 2592 | } |
861f9cd0 JH |
2593 | } |
2594 | ||
8a998e0c | 2595 | bitmap_clear_bit (tovisit, bb->index); |
861f9cd0 | 2596 | |
9ff3d2de JL |
2597 | e = find_edge (bb, head); |
2598 | if (e) | |
2599 | { | |
2600 | sreal tmp; | |
b8698a0f | 2601 | |
9ff3d2de JL |
2602 | /* EDGE_INFO (e)->back_edge_prob |
2603 | = ((e->probability * BLOCK_INFO (bb)->frequency) | |
2604 | / REG_BR_PROB_BASE); */ | |
b8698a0f | 2605 | |
9ff3d2de JL |
2606 | sreal_init (&tmp, e->probability, 0); |
2607 | sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency); | |
2608 | sreal_mul (&EDGE_INFO (e)->back_edge_prob, | |
2609 | &tmp, &real_inv_br_prob_base); | |
2610 | } | |
861f9cd0 | 2611 | |
57cb6d52 | 2612 | /* Propagate to successor blocks. */ |
628f6a4e | 2613 | FOR_EACH_EDGE (e, ei, bb->succs) |
247a370b | 2614 | if (!(e->flags & EDGE_DFS_BACK) |
754d9299 | 2615 | && BLOCK_INFO (e->dest)->npredecessors) |
861f9cd0 | 2616 | { |
754d9299 JM |
2617 | BLOCK_INFO (e->dest)->npredecessors--; |
2618 | if (!BLOCK_INFO (e->dest)->npredecessors) | |
247a370b JH |
2619 | { |
2620 | if (!nextbb) | |
2621 | nextbb = e->dest; | |
2622 | else | |
2623 | BLOCK_INFO (last)->next = e->dest; | |
b8698a0f | 2624 | |
247a370b JH |
2625 | last = e->dest; |
2626 | } | |
628f6a4e | 2627 | } |
861f9cd0 JH |
2628 | } |
2629 | } | |
2630 | ||
67fa7880 | 2631 | /* Estimate frequencies in loops at same nest level. */ |
bfdade77 | 2632 | |
861f9cd0 | 2633 | static void |
598ec7bd | 2634 | estimate_loops_at_level (struct loop *first_loop) |
861f9cd0 | 2635 | { |
2ecfd709 | 2636 | struct loop *loop; |
861f9cd0 JH |
2637 | |
2638 | for (loop = first_loop; loop; loop = loop->next) | |
2639 | { | |
861f9cd0 | 2640 | edge e; |
2ecfd709 | 2641 | basic_block *bbs; |
3d436d2a | 2642 | unsigned i; |
598ec7bd | 2643 | bitmap tovisit = BITMAP_ALLOC (NULL); |
861f9cd0 | 2644 | |
598ec7bd | 2645 | estimate_loops_at_level (loop->inner); |
79a490a9 | 2646 | |
598ec7bd ZD |
2647 | /* Find current loop back edge and mark it. */ |
2648 | e = loop_latch_edge (loop); | |
2649 | EDGE_INFO (e)->back_edge = 1; | |
2ecfd709 ZD |
2650 | |
2651 | bbs = get_loop_body (loop); | |
2652 | for (i = 0; i < loop->num_nodes; i++) | |
8a998e0c | 2653 | bitmap_set_bit (tovisit, bbs[i]->index); |
2ecfd709 | 2654 | free (bbs); |
598ec7bd ZD |
2655 | propagate_freq (loop->header, tovisit); |
2656 | BITMAP_FREE (tovisit); | |
861f9cd0 JH |
2657 | } |
2658 | } | |
2659 | ||
2f8e468b | 2660 | /* Propagates frequencies through structure of loops. */ |
598ec7bd ZD |
2661 | |
2662 | static void | |
d73be268 | 2663 | estimate_loops (void) |
598ec7bd ZD |
2664 | { |
2665 | bitmap tovisit = BITMAP_ALLOC (NULL); | |
2666 | basic_block bb; | |
2667 | ||
2668 | /* Start by estimating the frequencies in the loops. */ | |
0fc822d0 | 2669 | if (number_of_loops (cfun) > 1) |
d73be268 | 2670 | estimate_loops_at_level (current_loops->tree_root->inner); |
598ec7bd ZD |
2671 | |
2672 | /* Now propagate the frequencies through all the blocks. */ | |
04a90bec | 2673 | FOR_ALL_BB_FN (bb, cfun) |
598ec7bd ZD |
2674 | { |
2675 | bitmap_set_bit (tovisit, bb->index); | |
2676 | } | |
fefa31b5 | 2677 | propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit); |
598ec7bd ZD |
2678 | BITMAP_FREE (tovisit); |
2679 | } | |
2680 | ||
eb4b92c1 | 2681 | /* Drop the profile for NODE to guessed, and update its frequency based on |
4c7d0777 | 2682 | whether it is expected to be hot given the CALL_COUNT. */ |
eb4b92c1 TJ |
2683 | |
2684 | static void | |
4c7d0777 | 2685 | drop_profile (struct cgraph_node *node, gcov_type call_count) |
eb4b92c1 TJ |
2686 | { |
2687 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); | |
4c7d0777 TJ |
2688 | /* In the case where this was called by another function with a |
2689 | dropped profile, call_count will be 0. Since there are no | |
2690 | non-zero call counts to this function, we don't know for sure | |
2691 | whether it is hot, and therefore it will be marked normal below. */ | |
2692 | bool hot = maybe_hot_count_p (NULL, call_count); | |
eb4b92c1 TJ |
2693 | |
2694 | if (dump_file) | |
2695 | fprintf (dump_file, | |
2696 | "Dropping 0 profile for %s/%i. %s based on calls.\n", | |
fec39fa6 | 2697 | node->name (), node->order, |
eb4b92c1 TJ |
2698 | hot ? "Function is hot" : "Function is normal"); |
2699 | /* We only expect to miss profiles for functions that are reached | |
2700 | via non-zero call edges in cases where the function may have | |
2701 | been linked from another module or library (COMDATs and extern | |
4c7d0777 TJ |
2702 | templates). See the comments below for handle_missing_profiles. |
2703 | Also, only warn in cases where the missing counts exceed the | |
2704 | number of training runs. In certain cases with an execv followed | |
2705 | by a no-return call the profile for the no-return call is not | |
2706 | dumped and there can be a mismatch. */ | |
2707 | if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl) | |
2708 | && call_count > profile_info->runs) | |
eb4b92c1 TJ |
2709 | { |
2710 | if (flag_profile_correction) | |
2711 | { | |
2712 | if (dump_file) | |
2713 | fprintf (dump_file, | |
2714 | "Missing counts for called function %s/%i\n", | |
fec39fa6 | 2715 | node->name (), node->order); |
eb4b92c1 TJ |
2716 | } |
2717 | else | |
4c7d0777 | 2718 | warning (0, "Missing counts for called function %s/%i", |
fec39fa6 | 2719 | node->name (), node->order); |
eb4b92c1 TJ |
2720 | } |
2721 | ||
ea19eb9f | 2722 | profile_status_for_fn (fn) |
eb4b92c1 TJ |
2723 | = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT); |
2724 | node->frequency | |
2725 | = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL; | |
2726 | } | |
2727 | ||
2728 | /* In the case of COMDAT routines, multiple object files will contain the same | |
2729 | function and the linker will select one for the binary. In that case | |
2730 | all the other copies from the profile instrument binary will be missing | |
2731 | profile counts. Look for cases where this happened, due to non-zero | |
2732 | call counts going to 0-count functions, and drop the profile to guessed | |
2733 | so that we can use the estimated probabilities and avoid optimizing only | |
2734 | for size. | |
2735 | ||
2736 | The other case where the profile may be missing is when the routine | |
2737 | is not going to be emitted to the object file, e.g. for "extern template" | |
2738 | class methods. Those will be marked DECL_EXTERNAL. Emit a warning in | |
2739 | all other cases of non-zero calls to 0-count functions. */ | |
2740 | ||
2741 | void | |
2742 | handle_missing_profiles (void) | |
2743 | { | |
2744 | struct cgraph_node *node; | |
2745 | int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); | |
2746 | vec<struct cgraph_node *> worklist; | |
2747 | worklist.create (64); | |
2748 | ||
2749 | /* See if 0 count function has non-0 count callers. In this case we | |
2750 | lost some profile. Drop its function profile to PROFILE_GUESSED. */ | |
2751 | FOR_EACH_DEFINED_FUNCTION (node) | |
2752 | { | |
2753 | struct cgraph_edge *e; | |
2754 | gcov_type call_count = 0; | |
9cec31f4 | 2755 | gcov_type max_tp_first_run = 0; |
eb4b92c1 TJ |
2756 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); |
2757 | ||
2758 | if (node->count) | |
2759 | continue; | |
2760 | for (e = node->callers; e; e = e->next_caller) | |
9cec31f4 | 2761 | { |
eb4b92c1 | 2762 | call_count += e->count; |
9cec31f4 ML |
2763 | |
2764 | if (e->caller->tp_first_run > max_tp_first_run) | |
2765 | max_tp_first_run = e->caller->tp_first_run; | |
2766 | } | |
2767 | ||
2768 | /* If time profile is missing, let assign the maximum that comes from | |
2769 | caller functions. */ | |
2770 | if (!node->tp_first_run && max_tp_first_run) | |
2771 | node->tp_first_run = max_tp_first_run + 1; | |
2772 | ||
eb4b92c1 TJ |
2773 | if (call_count |
2774 | && fn && fn->cfg | |
2775 | && (call_count * unlikely_count_fraction >= profile_info->runs)) | |
2776 | { | |
4c7d0777 | 2777 | drop_profile (node, call_count); |
eb4b92c1 TJ |
2778 | worklist.safe_push (node); |
2779 | } | |
2780 | } | |
2781 | ||
2782 | /* Propagate the profile dropping to other 0-count COMDATs that are | |
2783 | potentially called by COMDATs we already dropped the profile on. */ | |
2784 | while (worklist.length () > 0) | |
2785 | { | |
2786 | struct cgraph_edge *e; | |
2787 | ||
2788 | node = worklist.pop (); | |
2789 | for (e = node->callees; e; e = e->next_caller) | |
2790 | { | |
2791 | struct cgraph_node *callee = e->callee; | |
2792 | struct function *fn = DECL_STRUCT_FUNCTION (callee->decl); | |
2793 | ||
2794 | if (callee->count > 0) | |
2795 | continue; | |
2796 | if (DECL_COMDAT (callee->decl) && fn && fn->cfg | |
ea19eb9f | 2797 | && profile_status_for_fn (fn) == PROFILE_READ) |
eb4b92c1 | 2798 | { |
4c7d0777 | 2799 | drop_profile (node, 0); |
eb4b92c1 TJ |
2800 | worklist.safe_push (callee); |
2801 | } | |
2802 | } | |
2803 | } | |
2804 | worklist.release (); | |
2805 | } | |
2806 | ||
02307675 R |
2807 | /* Convert counts measured by profile driven feedback to frequencies. |
2808 | Return nonzero iff there was any nonzero execution count. */ | |
bfdade77 | 2809 | |
bbd236a1 | 2810 | int |
79a490a9 | 2811 | counts_to_freqs (void) |
861f9cd0 | 2812 | { |
02307675 | 2813 | gcov_type count_max, true_count_max = 0; |
e0082a72 | 2814 | basic_block bb; |
0b17ab2f | 2815 | |
eb4b92c1 TJ |
2816 | /* Don't overwrite the estimated frequencies when the profile for |
2817 | the function is missing. We may drop this function PROFILE_GUESSED | |
2818 | later in drop_profile (). */ | |
be3c16c4 | 2819 | if (!flag_auto_profile && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count) |
eb4b92c1 TJ |
2820 | return 0; |
2821 | ||
fefa31b5 | 2822 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
02307675 | 2823 | true_count_max = MAX (bb->count, true_count_max); |
861f9cd0 | 2824 | |
02307675 | 2825 | count_max = MAX (true_count_max, 1); |
fefa31b5 | 2826 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
e0082a72 | 2827 | bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max; |
6bad2617 | 2828 | |
02307675 | 2829 | return true_count_max; |
861f9cd0 JH |
2830 | } |
2831 | ||
bfdade77 RK |
2832 | /* Return true if function is likely to be expensive, so there is no point to |
2833 | optimize performance of prologue, epilogue or do inlining at the expense | |
d55d8fc7 | 2834 | of code size growth. THRESHOLD is the limit of number of instructions |
bfdade77 RK |
2835 | function can execute at average to be still considered not expensive. */ |
2836 | ||
6ab16dd9 | 2837 | bool |
79a490a9 | 2838 | expensive_function_p (int threshold) |
6ab16dd9 JH |
2839 | { |
2840 | unsigned int sum = 0; | |
e0082a72 | 2841 | basic_block bb; |
5197bd50 | 2842 | unsigned int limit; |
6ab16dd9 JH |
2843 | |
2844 | /* We can not compute accurately for large thresholds due to scaled | |
2845 | frequencies. */ | |
e16acfcd | 2846 | gcc_assert (threshold <= BB_FREQ_MAX); |
6ab16dd9 | 2847 | |
eaec9b3d | 2848 | /* Frequencies are out of range. This either means that function contains |
6ab16dd9 JH |
2849 | internal loop executing more than BB_FREQ_MAX times or profile feedback |
2850 | is available and function has not been executed at all. */ | |
fefa31b5 | 2851 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency == 0) |
6ab16dd9 | 2852 | return true; |
6a4d6760 | 2853 | |
6ab16dd9 | 2854 | /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */ |
fefa31b5 | 2855 | limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency * threshold; |
11cd3bed | 2856 | FOR_EACH_BB_FN (bb, cfun) |
6ab16dd9 | 2857 | { |
9f215bf5 | 2858 | rtx_insn *insn; |
6ab16dd9 | 2859 | |
39718607 | 2860 | FOR_BB_INSNS (bb, insn) |
bfdade77 RK |
2861 | if (active_insn_p (insn)) |
2862 | { | |
2863 | sum += bb->frequency; | |
2864 | if (sum > limit) | |
2865 | return true; | |
6ab16dd9 JH |
2866 | } |
2867 | } | |
bfdade77 | 2868 | |
6ab16dd9 JH |
2869 | return false; |
2870 | } | |
2871 | ||
67fa7880 TJ |
2872 | /* Estimate and propagate basic block frequencies using the given branch |
2873 | probabilities. If FORCE is true, the frequencies are used to estimate | |
2874 | the counts even when there are already non-zero profile counts. */ | |
bfdade77 | 2875 | |
45a80bb9 | 2876 | void |
67fa7880 | 2877 | estimate_bb_frequencies (bool force) |
861f9cd0 | 2878 | { |
e0082a72 | 2879 | basic_block bb; |
ac5e69da | 2880 | sreal freq_max; |
8aa18a7d | 2881 | |
0a6a6ac9 | 2882 | if (force || profile_status_for_fn (cfun) != PROFILE_READ || !counts_to_freqs ()) |
194734e9 | 2883 | { |
c4f6b78e RE |
2884 | static int real_values_initialized = 0; |
2885 | ||
2886 | if (!real_values_initialized) | |
2887 | { | |
85bb9c2a | 2888 | real_values_initialized = 1; |
c4f6b78e RE |
2889 | sreal_init (&real_zero, 0, 0); |
2890 | sreal_init (&real_one, 1, 0); | |
2891 | sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0); | |
2892 | sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0); | |
2893 | sreal_init (&real_one_half, 1, -1); | |
2894 | sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base); | |
2895 | sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base); | |
2896 | } | |
861f9cd0 | 2897 | |
194734e9 | 2898 | mark_dfs_back_edges (); |
194734e9 | 2899 | |
fefa31b5 DM |
2900 | single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability = |
2901 | REG_BR_PROB_BASE; | |
194734e9 JH |
2902 | |
2903 | /* Set up block info for each basic block. */ | |
11478306 JH |
2904 | alloc_aux_for_blocks (sizeof (block_info)); |
2905 | alloc_aux_for_edges (sizeof (edge_prob_info)); | |
fefa31b5 | 2906 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
861f9cd0 | 2907 | { |
861f9cd0 | 2908 | edge e; |
628f6a4e | 2909 | edge_iterator ei; |
194734e9 | 2910 | |
628f6a4e | 2911 | FOR_EACH_EDGE (e, ei, bb->succs) |
861f9cd0 | 2912 | { |
ac5e69da JZ |
2913 | sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0); |
2914 | sreal_mul (&EDGE_INFO (e)->back_edge_prob, | |
2915 | &EDGE_INFO (e)->back_edge_prob, | |
2916 | &real_inv_br_prob_base); | |
861f9cd0 | 2917 | } |
861f9cd0 | 2918 | } |
bfdade77 | 2919 | |
67fa7880 TJ |
2920 | /* First compute frequencies locally for each loop from innermost |
2921 | to outermost to examine frequencies for back edges. */ | |
d73be268 | 2922 | estimate_loops (); |
861f9cd0 | 2923 | |
194734e9 | 2924 | memcpy (&freq_max, &real_zero, sizeof (real_zero)); |
11cd3bed | 2925 | FOR_EACH_BB_FN (bb, cfun) |
ac5e69da JZ |
2926 | if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0) |
2927 | memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max)); | |
fbe3b30b | 2928 | |
ac5e69da | 2929 | sreal_div (&freq_max, &real_bb_freq_max, &freq_max); |
fefa31b5 | 2930 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
8aa18a7d | 2931 | { |
ac5e69da | 2932 | sreal tmp; |
bfdade77 | 2933 | |
ac5e69da JZ |
2934 | sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max); |
2935 | sreal_add (&tmp, &tmp, &real_one_half); | |
2936 | bb->frequency = sreal_to_int (&tmp); | |
194734e9 | 2937 | } |
bfdade77 | 2938 | |
194734e9 JH |
2939 | free_aux_for_blocks (); |
2940 | free_aux_for_edges (); | |
2941 | } | |
2942 | compute_function_frequency (); | |
194734e9 | 2943 | } |
861f9cd0 | 2944 | |
194734e9 | 2945 | /* Decide whether function is hot, cold or unlikely executed. */ |
965b98d0 | 2946 | void |
79a490a9 | 2947 | compute_function_frequency (void) |
194734e9 | 2948 | { |
e0082a72 | 2949 | basic_block bb; |
d52f5295 | 2950 | struct cgraph_node *node = cgraph_node::get (current_function_decl); |
daf5c770 | 2951 | |
844db5d0 JH |
2952 | if (DECL_STATIC_CONSTRUCTOR (current_function_decl) |
2953 | || MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
2954 | node->only_called_at_startup = true; | |
2955 | if (DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
2956 | node->only_called_at_exit = true; | |
e0082a72 | 2957 | |
0a6a6ac9 | 2958 | if (profile_status_for_fn (cfun) != PROFILE_READ) |
52bf96d2 | 2959 | { |
5fefcf92 | 2960 | int flags = flags_from_decl_or_type (current_function_decl); |
52bf96d2 JH |
2961 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)) |
2962 | != NULL) | |
5fefcf92 | 2963 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; |
52bf96d2 JH |
2964 | else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl)) |
2965 | != NULL) | |
5fefcf92 JH |
2966 | node->frequency = NODE_FREQUENCY_HOT; |
2967 | else if (flags & ECF_NORETURN) | |
2968 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
2969 | else if (MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
2970 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
2971 | else if (DECL_STATIC_CONSTRUCTOR (current_function_decl) | |
2972 | || DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
2973 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
52bf96d2 JH |
2974 | return; |
2975 | } | |
daf5c770 JH |
2976 | |
2977 | /* Only first time try to drop function into unlikely executed. | |
2978 | After inlining the roundoff errors may confuse us. | |
2979 | Ipa-profile pass will drop functions only called from unlikely | |
2980 | functions to unlikely and that is most of what we care about. */ | |
2981 | if (!cfun->after_inlining) | |
2982 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
11cd3bed | 2983 | FOR_EACH_BB_FN (bb, cfun) |
861f9cd0 | 2984 | { |
2eb712b4 | 2985 | if (maybe_hot_bb_p (cfun, bb)) |
194734e9 | 2986 | { |
5fefcf92 | 2987 | node->frequency = NODE_FREQUENCY_HOT; |
194734e9 JH |
2988 | return; |
2989 | } | |
2eb712b4 | 2990 | if (!probably_never_executed_bb_p (cfun, bb)) |
5fefcf92 | 2991 | node->frequency = NODE_FREQUENCY_NORMAL; |
861f9cd0 | 2992 | } |
194734e9 | 2993 | } |
861f9cd0 | 2994 | |
2e28e797 JH |
2995 | /* Build PREDICT_EXPR. */ |
2996 | tree | |
2997 | build_predict_expr (enum br_predictor predictor, enum prediction taken) | |
2998 | { | |
9d7e5c4d | 2999 | tree t = build1 (PREDICT_EXPR, void_type_node, |
9f616812 | 3000 | build_int_cst (integer_type_node, predictor)); |
bbbbb16a | 3001 | SET_PREDICT_EXPR_OUTCOME (t, taken); |
2e28e797 JH |
3002 | return t; |
3003 | } | |
3004 | ||
3005 | const char * | |
3006 | predictor_name (enum br_predictor predictor) | |
3007 | { | |
3008 | return predictor_info[predictor].name; | |
3009 | } | |
3010 | ||
be55bfe6 TS |
3011 | /* Predict branch probabilities and estimate profile of the tree CFG. */ |
3012 | ||
27a4cd48 DM |
3013 | namespace { |
3014 | ||
3015 | const pass_data pass_data_profile = | |
3016 | { | |
3017 | GIMPLE_PASS, /* type */ | |
3018 | "profile_estimate", /* name */ | |
3019 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
3020 | TV_BRANCH_PROB, /* tv_id */ |
3021 | PROP_cfg, /* properties_required */ | |
3022 | 0, /* properties_provided */ | |
3023 | 0, /* properties_destroyed */ | |
3024 | 0, /* todo_flags_start */ | |
3bea341f | 3025 | 0, /* todo_flags_finish */ |
6de9cd9a | 3026 | }; |
7299cb99 | 3027 | |
27a4cd48 DM |
3028 | class pass_profile : public gimple_opt_pass |
3029 | { | |
3030 | public: | |
c3284718 RS |
3031 | pass_profile (gcc::context *ctxt) |
3032 | : gimple_opt_pass (pass_data_profile, ctxt) | |
27a4cd48 DM |
3033 | {} |
3034 | ||
3035 | /* opt_pass methods: */ | |
1a3d085c | 3036 | virtual bool gate (function *) { return flag_guess_branch_prob; } |
be55bfe6 | 3037 | virtual unsigned int execute (function *); |
27a4cd48 DM |
3038 | |
3039 | }; // class pass_profile | |
3040 | ||
be55bfe6 TS |
3041 | unsigned int |
3042 | pass_profile::execute (function *fun) | |
3043 | { | |
3044 | unsigned nb_loops; | |
3045 | ||
3046 | loop_optimizer_init (LOOPS_NORMAL); | |
3047 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3048 | flow_loops_dump (dump_file, NULL, 0); | |
3049 | ||
3050 | mark_irreducible_loops (); | |
3051 | ||
3052 | nb_loops = number_of_loops (fun); | |
3053 | if (nb_loops > 1) | |
3054 | scev_initialize (); | |
3055 | ||
3056 | tree_estimate_probability (); | |
3057 | ||
3058 | if (nb_loops > 1) | |
3059 | scev_finalize (); | |
3060 | ||
3061 | loop_optimizer_finalize (); | |
3062 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3063 | gimple_dump_cfg (dump_file, dump_flags); | |
3064 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) | |
3065 | profile_status_for_fn (fun) = PROFILE_GUESSED; | |
3066 | return 0; | |
3067 | } | |
3068 | ||
27a4cd48 DM |
3069 | } // anon namespace |
3070 | ||
3071 | gimple_opt_pass * | |
3072 | make_pass_profile (gcc::context *ctxt) | |
3073 | { | |
3074 | return new pass_profile (ctxt); | |
3075 | } | |
3076 | ||
3077 | namespace { | |
3078 | ||
3079 | const pass_data pass_data_strip_predict_hints = | |
3080 | { | |
3081 | GIMPLE_PASS, /* type */ | |
3082 | "*strip_predict_hints", /* name */ | |
3083 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
3084 | TV_BRANCH_PROB, /* tv_id */ |
3085 | PROP_cfg, /* properties_required */ | |
3086 | 0, /* properties_provided */ | |
3087 | 0, /* properties_destroyed */ | |
3088 | 0, /* todo_flags_start */ | |
3bea341f | 3089 | 0, /* todo_flags_finish */ |
7299cb99 | 3090 | }; |
b35366ce | 3091 | |
27a4cd48 DM |
3092 | class pass_strip_predict_hints : public gimple_opt_pass |
3093 | { | |
3094 | public: | |
c3284718 RS |
3095 | pass_strip_predict_hints (gcc::context *ctxt) |
3096 | : gimple_opt_pass (pass_data_strip_predict_hints, ctxt) | |
27a4cd48 DM |
3097 | {} |
3098 | ||
3099 | /* opt_pass methods: */ | |
65d3284b | 3100 | opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); } |
be55bfe6 | 3101 | virtual unsigned int execute (function *); |
27a4cd48 DM |
3102 | |
3103 | }; // class pass_strip_predict_hints | |
3104 | ||
be55bfe6 TS |
3105 | /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements |
3106 | we no longer need. */ | |
3107 | unsigned int | |
3108 | pass_strip_predict_hints::execute (function *fun) | |
3109 | { | |
3110 | basic_block bb; | |
3111 | gimple ass_stmt; | |
3112 | tree var; | |
3113 | ||
3114 | FOR_EACH_BB_FN (bb, fun) | |
3115 | { | |
3116 | gimple_stmt_iterator bi; | |
3117 | for (bi = gsi_start_bb (bb); !gsi_end_p (bi);) | |
3118 | { | |
3119 | gimple stmt = gsi_stmt (bi); | |
3120 | ||
3121 | if (gimple_code (stmt) == GIMPLE_PREDICT) | |
3122 | { | |
3123 | gsi_remove (&bi, true); | |
3124 | continue; | |
3125 | } | |
3126 | else if (is_gimple_call (stmt)) | |
3127 | { | |
3128 | tree fndecl = gimple_call_fndecl (stmt); | |
3129 | ||
3130 | if ((fndecl | |
3131 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
3132 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT | |
3133 | && gimple_call_num_args (stmt) == 2) | |
3134 | || (gimple_call_internal_p (stmt) | |
3135 | && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)) | |
3136 | { | |
3137 | var = gimple_call_lhs (stmt); | |
3138 | if (var) | |
3139 | { | |
3140 | ass_stmt | |
3141 | = gimple_build_assign (var, gimple_call_arg (stmt, 0)); | |
3142 | gsi_replace (&bi, ass_stmt, true); | |
3143 | } | |
3144 | else | |
3145 | { | |
3146 | gsi_remove (&bi, true); | |
3147 | continue; | |
3148 | } | |
3149 | } | |
3150 | } | |
3151 | gsi_next (&bi); | |
3152 | } | |
3153 | } | |
3154 | return 0; | |
3155 | } | |
3156 | ||
27a4cd48 DM |
3157 | } // anon namespace |
3158 | ||
3159 | gimple_opt_pass * | |
3160 | make_pass_strip_predict_hints (gcc::context *ctxt) | |
3161 | { | |
3162 | return new pass_strip_predict_hints (ctxt); | |
3163 | } | |
3164 | ||
b35366ce JH |
3165 | /* Rebuild function frequencies. Passes are in general expected to |
3166 | maintain profile by hand, however in some cases this is not possible: | |
3167 | for example when inlining several functions with loops freuqencies might run | |
3168 | out of scale and thus needs to be recomputed. */ | |
3169 | ||
3170 | void | |
3171 | rebuild_frequencies (void) | |
3172 | { | |
a222c01a | 3173 | timevar_push (TV_REBUILD_FREQUENCIES); |
67fa7880 TJ |
3174 | |
3175 | /* When the max bb count in the function is small, there is a higher | |
3176 | chance that there were truncation errors in the integer scaling | |
3177 | of counts by inlining and other optimizations. This could lead | |
3178 | to incorrect classification of code as being cold when it isn't. | |
3179 | In that case, force the estimation of bb counts/frequencies from the | |
3180 | branch probabilities, rather than computing frequencies from counts, | |
3181 | which may also lead to frequencies incorrectly reduced to 0. There | |
3182 | is less precision in the probabilities, so we only do this for small | |
3183 | max counts. */ | |
3184 | gcov_type count_max = 0; | |
3185 | basic_block bb; | |
fefa31b5 | 3186 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
67fa7880 TJ |
3187 | count_max = MAX (bb->count, count_max); |
3188 | ||
0a6a6ac9 | 3189 | if (profile_status_for_fn (cfun) == PROFILE_GUESSED |
be3c16c4 DC |
3190 | || (!flag_auto_profile && profile_status_for_fn (cfun) == PROFILE_READ |
3191 | && count_max < REG_BR_PROB_BASE/10)) | |
b35366ce JH |
3192 | { |
3193 | loop_optimizer_init (0); | |
3194 | add_noreturn_fake_exit_edges (); | |
3195 | mark_irreducible_loops (); | |
3196 | connect_infinite_loops_to_exit (); | |
67fa7880 | 3197 | estimate_bb_frequencies (true); |
b35366ce JH |
3198 | remove_fake_exit_edges (); |
3199 | loop_optimizer_finalize (); | |
3200 | } | |
0a6a6ac9 | 3201 | else if (profile_status_for_fn (cfun) == PROFILE_READ) |
b35366ce JH |
3202 | counts_to_freqs (); |
3203 | else | |
3204 | gcc_unreachable (); | |
a222c01a | 3205 | timevar_pop (TV_REBUILD_FREQUENCIES); |
b35366ce | 3206 | } |