]> gcc.gnu.org Git - gcc.git/blame - gcc/predict.c
sdata-section.h (SDATA_SECTION): Don't use an attribute if SKIP_ATTRIBUTE is defined.
[gcc.git] / gcc / predict.c
CommitLineData
f1ebdfc5 1/* Branch prediction routines for the GNU compiler.
2f89bbc1 2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
f1ebdfc5 3
bfdade77 4This file is part of GCC.
f1ebdfc5 5
bfdade77
RK
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 2, or (at your option) any later
9version.
f1ebdfc5 10
bfdade77
RK
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
f1ebdfc5 15
bfdade77
RK
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to the Free
18Software Foundation, 59 Temple Place - Suite 330, Boston, MA
1902111-1307, USA. */
f1ebdfc5
JE
20
21/* References:
22
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
3ef42a0c 28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
f1ebdfc5
JE
29
30
31#include "config.h"
32#include "system.h"
4977bab6
ZW
33#include "coretypes.h"
34#include "tm.h"
f1ebdfc5
JE
35#include "tree.h"
36#include "rtl.h"
37#include "tm_p.h"
efc9bd41 38#include "hard-reg-set.h"
f1ebdfc5
JE
39#include "basic-block.h"
40#include "insn-config.h"
41#include "regs.h"
f1ebdfc5
JE
42#include "flags.h"
43#include "output.h"
44#include "function.h"
45#include "except.h"
46#include "toplev.h"
47#include "recog.h"
f1ebdfc5 48#include "expr.h"
4db384c9 49#include "predict.h"
d79f9ec9 50#include "coverage.h"
ac5e69da 51#include "sreal.h"
194734e9
JH
52#include "params.h"
53#include "target.h"
3d436d2a 54#include "cfgloop.h"
6de9cd9a
DN
55#include "tree-flow.h"
56#include "ggc.h"
57#include "tree-dump.h"
58#include "tree-pass.h"
59#include "timevar.h"
b6acab32
JH
60#include "tree-scalar-evolution.h"
61#include "cfgloop.h"
8aa18a7d 62
fbe3b30b
SB
63/* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
64 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
ac5e69da
JZ
65static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
66 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
f1ebdfc5 67
c66f079e 68/* Random guesstimation given names. */
c66f079e 69#define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
c66f079e 70#define PROB_EVEN (REG_BR_PROB_BASE / 2)
c66f079e
RH
71#define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72#define PROB_ALWAYS (REG_BR_PROB_BASE)
f1ebdfc5 73
79a490a9 74static void combine_predictions_for_insn (rtx, basic_block);
6de9cd9a 75static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
79a490a9
AJ
76static void estimate_loops_at_level (struct loop *loop);
77static void propagate_freq (struct loop *);
78static void estimate_bb_frequencies (struct loops *);
02307675 79static int counts_to_freqs (void);
79a490a9
AJ
80static bool last_basic_block_p (basic_block);
81static void compute_function_frequency (void);
82static void choose_function_section (void);
83static bool can_predict_insn_p (rtx);
ee92cb46 84
4db384c9
JH
85/* Information we hold about each branch predictor.
86 Filled using information from predict.def. */
bfdade77 87
4db384c9 88struct predictor_info
ee92cb46 89{
8b60264b
KG
90 const char *const name; /* Name used in the debugging dumps. */
91 const int hitrate; /* Expected hitrate used by
92 predict_insn_def call. */
93 const int flags;
4db384c9 94};
ee92cb46 95
134d3a2e
JH
96/* Use given predictor without Dempster-Shaffer theory if it matches
97 using first_match heuristics. */
98#define PRED_FLAG_FIRST_MATCH 1
99
100/* Recompute hitrate in percent to our representation. */
101
bfdade77 102#define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
134d3a2e
JH
103
104#define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
bfdade77 105static const struct predictor_info predictor_info[]= {
4db384c9
JH
106#include "predict.def"
107
dc297297 108 /* Upper bound on predictors. */
134d3a2e 109 {NULL, 0, 0}
4db384c9
JH
110};
111#undef DEF_PREDICTOR
194734e9
JH
112
113/* Return true in case BB can be CPU intensive and should be optimized
d55d8fc7 114 for maximal performance. */
194734e9
JH
115
116bool
79a490a9 117maybe_hot_bb_p (basic_block bb)
194734e9 118{
cdb23767 119 if (profile_info && flag_branch_probabilities
194734e9 120 && (bb->count
cdb23767 121 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
194734e9
JH
122 return false;
123 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
124 return false;
125 return true;
126}
127
128/* Return true in case BB is cold and should be optimized for size. */
129
130bool
79a490a9 131probably_cold_bb_p (basic_block bb)
194734e9 132{
cdb23767 133 if (profile_info && flag_branch_probabilities
194734e9 134 && (bb->count
cdb23767 135 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
194734e9
JH
136 return true;
137 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
138 return true;
139 return false;
140}
141
142/* Return true in case BB is probably never executed. */
143bool
79a490a9 144probably_never_executed_bb_p (basic_block bb)
194734e9 145{
cdb23767
NS
146 if (profile_info && flag_branch_probabilities)
147 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
194734e9
JH
148 return false;
149}
150
969d70ca
JH
151/* Return true if the one of outgoing edges is already predicted by
152 PREDICTOR. */
153
6de9cd9a
DN
154bool
155rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
969d70ca
JH
156{
157 rtx note;
a813c111 158 if (!INSN_P (BB_END (bb)))
969d70ca 159 return false;
a813c111 160 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
969d70ca
JH
161 if (REG_NOTE_KIND (note) == REG_BR_PRED
162 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
163 return true;
164 return false;
165}
ee92cb46 166
6de9cd9a
DN
167/* Return true if the one of outgoing edges is already predicted by
168 PREDICTOR. */
169
170bool
171tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
172{
173 struct edge_prediction *i = bb_ann (bb)->predictions;
174 for (i = bb_ann (bb)->predictions; i; i = i->next)
175 if (i->predictor == predictor)
176 return true;
177 return false;
178}
179
4db384c9 180void
79a490a9 181predict_insn (rtx insn, enum br_predictor predictor, int probability)
4db384c9 182{
8127d0e0
NS
183 if (!any_condjump_p (insn))
184 abort ();
d50672ef
JH
185 if (!flag_guess_branch_prob)
186 return;
bfdade77 187
ee92cb46 188 REG_NOTES (insn)
4db384c9
JH
189 = gen_rtx_EXPR_LIST (REG_BR_PRED,
190 gen_rtx_CONCAT (VOIDmode,
191 GEN_INT ((int) predictor),
192 GEN_INT ((int) probability)),
193 REG_NOTES (insn));
194}
195
196/* Predict insn by given predictor. */
bfdade77 197
4db384c9 198void
79a490a9
AJ
199predict_insn_def (rtx insn, enum br_predictor predictor,
200 enum prediction taken)
4db384c9
JH
201{
202 int probability = predictor_info[(int) predictor].hitrate;
bfdade77 203
4db384c9
JH
204 if (taken != TAKEN)
205 probability = REG_BR_PROB_BASE - probability;
bfdade77 206
4db384c9 207 predict_insn (insn, predictor, probability);
ee92cb46
JH
208}
209
210/* Predict edge E with given probability if possible. */
bfdade77 211
4db384c9 212void
6de9cd9a 213rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
ee92cb46
JH
214{
215 rtx last_insn;
a813c111 216 last_insn = BB_END (e->src);
ee92cb46
JH
217
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn))
221 return;
222
223 /* We always store probability of branching. */
224 if (e->flags & EDGE_FALLTHRU)
225 probability = REG_BR_PROB_BASE - probability;
226
4db384c9
JH
227 predict_insn (last_insn, predictor, probability);
228}
229
6de9cd9a
DN
230/* Predict edge E with the given PROBABILITY. */
231void
232tree_predict_edge (edge e, enum br_predictor predictor, int probability)
233{
234 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
235
236 i->next = bb_ann (e->src)->predictions;
237 bb_ann (e->src)->predictions = i;
238 i->probability = probability;
239 i->predictor = predictor;
240 i->edge = e;
241}
242
2ffa9932
JH
243/* Return true when we can store prediction on insn INSN.
244 At the moment we represent predictions only on conditional
245 jumps, not at computed jump or other complicated cases. */
246static bool
79a490a9 247can_predict_insn_p (rtx insn)
2ffa9932 248{
4b4bf941 249 return (JUMP_P (insn)
2ffa9932
JH
250 && any_condjump_p (insn)
251 && BLOCK_FOR_INSN (insn)->succ->succ_next);
252}
253
4db384c9 254/* Predict edge E by given predictor if possible. */
bfdade77 255
4db384c9 256void
79a490a9
AJ
257predict_edge_def (edge e, enum br_predictor predictor,
258 enum prediction taken)
4db384c9
JH
259{
260 int probability = predictor_info[(int) predictor].hitrate;
261
262 if (taken != TAKEN)
263 probability = REG_BR_PROB_BASE - probability;
bfdade77 264
4db384c9
JH
265 predict_edge (e, predictor, probability);
266}
267
268/* Invert all branch predictions or probability notes in the INSN. This needs
269 to be done each time we invert the condition used by the jump. */
bfdade77 270
4db384c9 271void
79a490a9 272invert_br_probabilities (rtx insn)
4db384c9 273{
bfdade77
RK
274 rtx note;
275
276 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
277 if (REG_NOTE_KIND (note) == REG_BR_PROB)
278 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
279 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
280 XEXP (XEXP (note, 0), 1)
281 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
4db384c9
JH
282}
283
284/* Dump information about the branch prediction to the output file. */
bfdade77 285
4db384c9 286static void
6de9cd9a 287dump_prediction (FILE *file, enum br_predictor predictor, int probability,
79a490a9 288 basic_block bb, int used)
4db384c9
JH
289{
290 edge e = bb->succ;
291
6de9cd9a 292 if (!file)
4db384c9
JH
293 return;
294
fbc2782e 295 while (e && (e->flags & EDGE_FALLTHRU))
4db384c9
JH
296 e = e->succ_next;
297
6de9cd9a 298 fprintf (file, " %s heuristics%s: %.1f%%",
4db384c9 299 predictor_info[predictor].name,
bfdade77 300 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
4db384c9
JH
301
302 if (bb->count)
25c3a4ef 303 {
6de9cd9a
DN
304 fprintf (file, " exec ");
305 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
fbc2782e
DD
306 if (e)
307 {
6de9cd9a
DN
308 fprintf (file, " hit ");
309 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
310 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
fbc2782e 311 }
25c3a4ef 312 }
bfdade77 313
6de9cd9a 314 fprintf (file, "\n");
4db384c9
JH
315}
316
229031d0 317/* We can not predict the probabilities of outgoing edges of bb. Set them
87022a6b
JH
318 evenly and hope for the best. */
319static void
320set_even_probabilities (basic_block bb)
321{
322 int nedges = 0;
323 edge e;
324
325 for (e = bb->succ; e; e = e->succ_next)
326 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
327 nedges ++;
328 for (e = bb->succ; e; e = e->succ_next)
329 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
330 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
331 else
332 e->probability = 0;
333}
334
4db384c9
JH
335/* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
336 note if not already present. Remove now useless REG_BR_PRED notes. */
bfdade77 337
4db384c9 338static void
79a490a9 339combine_predictions_for_insn (rtx insn, basic_block bb)
4db384c9 340{
87022a6b
JH
341 rtx prob_note;
342 rtx *pnote;
bfdade77 343 rtx note;
4db384c9
JH
344 int best_probability = PROB_EVEN;
345 int best_predictor = END_PREDICTORS;
134d3a2e
JH
346 int combined_probability = REG_BR_PROB_BASE / 2;
347 int d;
d195b46f
JH
348 bool first_match = false;
349 bool found = false;
4db384c9 350
87022a6b
JH
351 if (!can_predict_insn_p (insn))
352 {
353 set_even_probabilities (bb);
354 return;
355 }
356
357 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
358 pnote = &REG_NOTES (insn);
c263766c
RH
359 if (dump_file)
360 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
0b17ab2f 361 bb->index);
4db384c9
JH
362
363 /* We implement "first match" heuristics and use probability guessed
6de9cd9a 364 by predictor with smallest index. */
bfdade77
RK
365 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
366 if (REG_NOTE_KIND (note) == REG_BR_PRED)
367 {
368 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
369 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
370
371 found = true;
372 if (best_predictor > predictor)
373 best_probability = probability, best_predictor = predictor;
374
375 d = (combined_probability * probability
376 + (REG_BR_PROB_BASE - combined_probability)
377 * (REG_BR_PROB_BASE - probability));
378
379 /* Use FP math to avoid overflows of 32bit integers. */
571a03b8
JJ
380 if (d == 0)
381 /* If one probability is 0% and one 100%, avoid division by zero. */
382 combined_probability = REG_BR_PROB_BASE / 2;
383 else
384 combined_probability = (((double) combined_probability) * probability
385 * REG_BR_PROB_BASE / d + 0.5);
bfdade77
RK
386 }
387
388 /* Decide which heuristic to use. In case we didn't match anything,
389 use no_prediction heuristic, in case we did match, use either
d195b46f
JH
390 first match or Dempster-Shaffer theory depending on the flags. */
391
134d3a2e 392 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
d195b46f
JH
393 first_match = true;
394
395 if (!found)
6de9cd9a
DN
396 dump_prediction (dump_file, PRED_NO_PREDICTION,
397 combined_probability, bb, true);
d195b46f
JH
398 else
399 {
6de9cd9a
DN
400 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
401 bb, !first_match);
402 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
403 bb, first_match);
d195b46f
JH
404 }
405
406 if (first_match)
134d3a2e 407 combined_probability = best_probability;
6de9cd9a 408 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
d195b46f
JH
409
410 while (*pnote)
411 {
412 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
413 {
414 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
415 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
416
6de9cd9a 417 dump_prediction (dump_file, predictor, probability, bb,
d195b46f 418 !first_match || best_predictor == predictor);
6a4d6760 419 *pnote = XEXP (*pnote, 1);
d195b46f
JH
420 }
421 else
6a4d6760 422 pnote = &XEXP (*pnote, 1);
d195b46f 423 }
bfdade77 424
4db384c9
JH
425 if (!prob_note)
426 {
427 REG_NOTES (insn)
428 = gen_rtx_EXPR_LIST (REG_BR_PROB,
134d3a2e 429 GEN_INT (combined_probability), REG_NOTES (insn));
bfdade77 430
134d3a2e
JH
431 /* Save the prediction into CFG in case we are seeing non-degenerated
432 conditional jump. */
433 if (bb->succ->succ_next)
434 {
435 BRANCH_EDGE (bb)->probability = combined_probability;
bfdade77
RK
436 FALLTHRU_EDGE (bb)->probability
437 = REG_BR_PROB_BASE - combined_probability;
134d3a2e 438 }
4db384c9 439 }
ee92cb46
JH
440}
441
6de9cd9a
DN
442/* Combine predictions into single probability and store them into CFG.
443 Remove now useless prediction entries. */
f1ebdfc5 444
6de9cd9a
DN
445static void
446combine_predictions_for_bb (FILE *file, basic_block bb)
f1ebdfc5 447{
6de9cd9a
DN
448 int best_probability = PROB_EVEN;
449 int best_predictor = END_PREDICTORS;
450 int combined_probability = REG_BR_PROB_BASE / 2;
451 int d;
452 bool first_match = false;
453 bool found = false;
454 struct edge_prediction *pred;
455 int nedges = 0;
456 edge e, first = NULL, second = NULL;
f1ebdfc5 457
6de9cd9a
DN
458 for (e = bb->succ; e; e = e->succ_next)
459 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
460 {
461 nedges ++;
462 if (first && !second)
463 second = e;
464 if (!first)
465 first = e;
466 }
467
468 /* When there is no successor or only one choice, prediction is easy.
469
470 We are lazy for now and predict only basic blocks with two outgoing
471 edges. It is possible to predict generic case too, but we have to
472 ignore first match heuristics and do more involved combining. Implement
473 this later. */
474 if (nedges != 2)
475 {
87022a6b
JH
476 if (!bb->count)
477 set_even_probabilities (bb);
6de9cd9a
DN
478 bb_ann (bb)->predictions = NULL;
479 if (file)
480 fprintf (file, "%i edges in bb %i predicted to even probabilities\n",
481 nedges, bb->index);
482 return;
483 }
484
485 if (file)
486 fprintf (file, "Predictions for bb %i\n", bb->index);
487
488 /* We implement "first match" heuristics and use probability guessed
489 by predictor with smallest index. */
490 for (pred = bb_ann (bb)->predictions; pred; pred = pred->next)
491 {
492 int predictor = pred->predictor;
493 int probability = pred->probability;
494
495 if (pred->edge != first)
496 probability = REG_BR_PROB_BASE - probability;
497
498 found = true;
499 if (best_predictor > predictor)
500 best_probability = probability, best_predictor = predictor;
501
502 d = (combined_probability * probability
503 + (REG_BR_PROB_BASE - combined_probability)
504 * (REG_BR_PROB_BASE - probability));
505
506 /* Use FP math to avoid overflows of 32bit integers. */
507 if (d == 0)
508 /* If one probability is 0% and one 100%, avoid division by zero. */
509 combined_probability = REG_BR_PROB_BASE / 2;
510 else
511 combined_probability = (((double) combined_probability) * probability
512 * REG_BR_PROB_BASE / d + 0.5);
513 }
514
515 /* Decide which heuristic to use. In case we didn't match anything,
516 use no_prediction heuristic, in case we did match, use either
517 first match or Dempster-Shaffer theory depending on the flags. */
518
519 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
520 first_match = true;
521
522 if (!found)
523 dump_prediction (file, PRED_NO_PREDICTION, combined_probability, bb, true);
524 else
525 {
526 dump_prediction (file, PRED_DS_THEORY, combined_probability, bb,
527 !first_match);
528 dump_prediction (file, PRED_FIRST_MATCH, best_probability, bb,
529 first_match);
530 }
531
532 if (first_match)
533 combined_probability = best_probability;
534 dump_prediction (file, PRED_COMBINED, combined_probability, bb, true);
535
536 for (pred = bb_ann (bb)->predictions; pred; pred = pred->next)
537 {
538 int predictor = pred->predictor;
539 int probability = pred->probability;
540
541 if (pred->edge != bb->succ)
542 probability = REG_BR_PROB_BASE - probability;
543 dump_prediction (file, predictor, probability, bb,
544 !first_match || best_predictor == predictor);
545 }
546 bb_ann (bb)->predictions = NULL;
547
87022a6b
JH
548 if (!bb->count)
549 {
550 first->probability = combined_probability;
551 second->probability = REG_BR_PROB_BASE - combined_probability;
552 }
6de9cd9a
DN
553}
554
555/* Predict edge probabilities by exploiting loop structure.
b6acab32
JH
556 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
557 RTL otherwise use tree based approach. */
6de9cd9a 558static void
b6acab32 559predict_loops (struct loops *loops_info, bool rtlsimpleloops)
6de9cd9a
DN
560{
561 unsigned i;
0b92ff33 562
b6acab32
JH
563 if (!rtlsimpleloops)
564 scev_initialize (loops_info);
565
65169dcf
JE
566 /* Try to predict out blocks in a loop that are not part of a
567 natural loop. */
2ecfd709 568 for (i = 1; i < loops_info->num; i++)
f1ebdfc5 569 {
2ecfd709 570 basic_block bb, *bbs;
3d436d2a 571 unsigned j;
0dd0e980 572 int exits;
2ecfd709 573 struct loop *loop = loops_info->parray[i];
50654f6c 574 struct niter_desc desc;
3d436d2a 575 unsigned HOST_WIDE_INT niter;
f1ebdfc5 576
d47cc544 577 flow_loop_scan (loop, LOOP_EXIT_EDGES);
0dd0e980
JH
578 exits = loop->num_exits;
579
b6acab32 580 if (rtlsimpleloops)
3d436d2a 581 {
6de9cd9a
DN
582 iv_analysis_loop_init (loop);
583 find_simple_exit (loop, &desc);
584
585 if (desc.simple_p && desc.const_iter)
586 {
587 int prob;
588 niter = desc.niter + 1;
589 if (niter == 0) /* We might overflow here. */
590 niter = desc.niter;
591
592 prob = (REG_BR_PROB_BASE
593 - (REG_BR_PROB_BASE + niter /2) / niter);
594 /* Branch prediction algorithm gives 0 frequency for everything
595 after the end of loop for loop having 0 probability to finish. */
596 if (prob == REG_BR_PROB_BASE)
597 prob = REG_BR_PROB_BASE - 1;
598 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
599 prob);
600 }
3d436d2a 601 }
b6acab32
JH
602 else
603 {
604 edge *exits;
605 unsigned j, n_exits;
606 struct tree_niter_desc niter_desc;
607
608 exits = get_loop_exit_edges (loop, &n_exits);
609 for (j = 0; j < n_exits; j++)
610 {
611 tree niter = NULL;
612
613 if (number_of_iterations_exit (loop, exits[j], &niter_desc))
614 niter = niter_desc.niter;
615 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
616 niter = loop_niter_by_eval (loop, exits[j]);
617
618 if (TREE_CODE (niter) == INTEGER_CST)
619 {
620 int probability;
621 if (host_integerp (niter, 1)
622 && tree_int_cst_lt (niter,
623 build_int_cstu (NULL_TREE,
624 REG_BR_PROB_BASE - 1)))
625 {
626 HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
627 probability = (REG_BR_PROB_BASE + nitercst / 2) / nitercst;
628 }
629 else
630 probability = 1;
631
632 predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
633 }
634 }
635
636 free (exits);
637 }
3d436d2a 638
2ecfd709 639 bbs = get_loop_body (loop);
6de9cd9a 640
2ecfd709
ZD
641 for (j = 0; j < loop->num_nodes; j++)
642 {
643 int header_found = 0;
644 edge e;
645
646 bb = bbs[j];
bfdade77 647
969d70ca
JH
648 /* Bypass loop heuristics on continue statement. These
649 statements construct loops via "non-loop" constructs
650 in the source language and are better to be handled
651 separately. */
b6acab32 652 if ((rtlsimpleloops && !can_predict_insn_p (BB_END (bb)))
2ffa9932 653 || predicted_by_p (bb, PRED_CONTINUE))
969d70ca
JH
654 continue;
655
2ecfd709
ZD
656 /* Loop branch heuristics - predict an edge back to a
657 loop's head as taken. */
658 for (e = bb->succ; e; e = e->succ_next)
659 if (e->dest == loop->header
660 && e->src == loop->latch)
661 {
662 header_found = 1;
663 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
664 }
bfdade77 665
2ecfd709 666 /* Loop exit heuristics - predict an edge exiting the loop if the
d55d8fc7 667 conditional has no loop header successors as not taken. */
2ecfd709
ZD
668 if (!header_found)
669 for (e = bb->succ; e; e = e->succ_next)
670 if (e->dest->index < 0
671 || !flow_bb_inside_loop_p (loop, e->dest))
672 predict_edge
673 (e, PRED_LOOP_EXIT,
674 (REG_BR_PROB_BASE
675 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
676 / exits);
677 }
36579663 678
e0a21ab9 679 /* Free basic blocks from get_loop_body. */
36579663 680 free (bbs);
f1ebdfc5 681 }
b6acab32
JH
682
683 if (!rtlsimpleloops)
684 scev_reset ();
6de9cd9a
DN
685}
686
87022a6b
JH
687/* Attempt to predict probabilities of BB outgoing edges using local
688 properties. */
689static void
690bb_estimate_probability_locally (basic_block bb)
691{
692 rtx last_insn = BB_END (bb);
693 rtx cond;
694
695 if (! can_predict_insn_p (last_insn))
696 return;
697 cond = get_condition (last_insn, NULL, false, false);
698 if (! cond)
699 return;
700
701 /* Try "pointer heuristic."
702 A comparison ptr == 0 is predicted as false.
703 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
704 if (COMPARISON_P (cond)
705 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
706 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
707 {
708 if (GET_CODE (cond) == EQ)
709 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
710 else if (GET_CODE (cond) == NE)
711 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
712 }
713 else
714
715 /* Try "opcode heuristic."
716 EQ tests are usually false and NE tests are usually true. Also,
717 most quantities are positive, so we can make the appropriate guesses
718 about signed comparisons against zero. */
719 switch (GET_CODE (cond))
720 {
721 case CONST_INT:
722 /* Unconditional branch. */
723 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
724 cond == const0_rtx ? NOT_TAKEN : TAKEN);
725 break;
726
727 case EQ:
728 case UNEQ:
729 /* Floating point comparisons appears to behave in a very
730 unpredictable way because of special role of = tests in
731 FP code. */
732 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
733 ;
734 /* Comparisons with 0 are often used for booleans and there is
735 nothing useful to predict about them. */
736 else if (XEXP (cond, 1) == const0_rtx
737 || XEXP (cond, 0) == const0_rtx)
738 ;
739 else
740 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
741 break;
742
743 case NE:
744 case LTGT:
745 /* Floating point comparisons appears to behave in a very
746 unpredictable way because of special role of = tests in
747 FP code. */
748 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
749 ;
750 /* Comparisons with 0 are often used for booleans and there is
751 nothing useful to predict about them. */
752 else if (XEXP (cond, 1) == const0_rtx
753 || XEXP (cond, 0) == const0_rtx)
754 ;
755 else
756 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
757 break;
758
759 case ORDERED:
760 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
761 break;
762
763 case UNORDERED:
764 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
765 break;
766
767 case LE:
768 case LT:
769 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
770 || XEXP (cond, 1) == constm1_rtx)
771 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
772 break;
773
774 case GE:
775 case GT:
776 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
777 || XEXP (cond, 1) == constm1_rtx)
778 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
779 break;
780
781 default:
782 break;
783 }
784}
785
6de9cd9a
DN
786/* Statically estimate the probability that a branch will be taken and produce
787 estimated profile. When profile feedback is present never executed portions
788 of function gets estimated. */
789
790void
791estimate_probability (struct loops *loops_info)
792{
793 basic_block bb;
794
795 connect_infinite_loops_to_exit ();
796 calculate_dominance_info (CDI_DOMINATORS);
797 calculate_dominance_info (CDI_POST_DOMINATORS);
798
799 predict_loops (loops_info, true);
f1ebdfc5 800
50654f6c
ZD
801 iv_analysis_done ();
802
134d3a2e 803 /* Attempt to predict conditional jumps using a number of heuristics. */
e0082a72 804 FOR_EACH_BB (bb)
f1ebdfc5 805 {
a813c111 806 rtx last_insn = BB_END (bb);
152897b1 807 edge e;
f1ebdfc5 808
2ffa9932 809 if (! can_predict_insn_p (last_insn))
f1ebdfc5 810 continue;
9bcbfc52 811
0b92ff33
JH
812 for (e = bb->succ; e; e = e->succ_next)
813 {
969d70ca
JH
814 /* Predict early returns to be probable, as we've already taken
815 care for error returns and other are often used for fast paths
816 trought function. */
817 if ((e->dest == EXIT_BLOCK_PTR
818 || (e->dest->succ && !e->dest->succ->succ_next
819 && e->dest->succ->dest == EXIT_BLOCK_PTR))
820 && !predicted_by_p (bb, PRED_NULL_RETURN)
821 && !predicted_by_p (bb, PRED_CONST_RETURN)
822 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
823 && !last_basic_block_p (e->dest))
824 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
0b92ff33 825
454ff5cb 826 /* Look for block we are guarding (i.e. we dominate it,
0b92ff33 827 but it doesn't postdominate us). */
bfdade77 828 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
d47cc544
SB
829 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
830 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
0b92ff33
JH
831 {
832 rtx insn;
bfdade77 833
0b92ff33
JH
834 /* The call heuristic claims that a guarded function call
835 is improbable. This is because such calls are often used
836 to signal exceptional situations such as printing error
837 messages. */
a813c111 838 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
0b92ff33 839 insn = NEXT_INSN (insn))
4b4bf941 840 if (CALL_P (insn)
0b92ff33
JH
841 /* Constant and pure calls are hardly used to signalize
842 something exceptional. */
24a28584 843 && ! CONST_OR_PURE_CALL_P (insn))
0b92ff33
JH
844 {
845 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
846 break;
847 }
848 }
849 }
87022a6b 850 bb_estimate_probability_locally (bb);
f1ebdfc5 851 }
4db384c9
JH
852
853 /* Attach the combined probability to each conditional jump. */
e0082a72 854 FOR_EACH_BB (bb)
58016611 855 combine_predictions_for_insn (BB_END (bb), bb);
6de9cd9a 856
58016611 857 remove_fake_edges ();
6de9cd9a 858 estimate_bb_frequencies (loops_info);
d47cc544 859 free_dominance_info (CDI_POST_DOMINATORS);
878f99d2
JH
860 if (profile_status == PROFILE_ABSENT)
861 profile_status = PROFILE_GUESSED;
6de9cd9a 862}
87022a6b 863
229031d0 864/* Set edge->probability for each successor edge of BB. */
87022a6b
JH
865void
866guess_outgoing_edge_probabilities (basic_block bb)
867{
868 bb_estimate_probability_locally (bb);
869 combine_predictions_for_insn (BB_END (bb), bb);
870}
6de9cd9a 871\f
42f97fd2
JH
872/* Return constant EXPR will likely have at execution time, NULL if unknown.
873 The function is used by builtin_expect branch predictor so the evidence
874 must come from this construct and additional possible constant folding.
875
876 We may want to implement more involved value guess (such as value range
877 propagation based prediction), but such tricks shall go to new
878 implementation. */
879
880static tree
881expr_expected_value (tree expr, bitmap visited)
882{
883 if (TREE_CONSTANT (expr))
884 return expr;
885 else if (TREE_CODE (expr) == SSA_NAME)
886 {
887 tree def = SSA_NAME_DEF_STMT (expr);
888
889 /* If we were already here, break the infinite cycle. */
890 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
891 return NULL;
892 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
893
894 if (TREE_CODE (def) == PHI_NODE)
895 {
896 /* All the arguments of the PHI node must have the same constant
897 length. */
898 int i;
899 tree val = NULL, new_val;
6de9cd9a 900
42f97fd2
JH
901 for (i = 0; i < PHI_NUM_ARGS (def); i++)
902 {
903 tree arg = PHI_ARG_DEF (def, i);
904
905 /* If this PHI has itself as an argument, we cannot
906 determine the string length of this argument. However,
b01d837f 907 if we can find a expected constant value for the other
42f97fd2
JH
908 PHI args then we can still be sure that this is
909 likely a constant. So be optimistic and just
910 continue with the next argument. */
911 if (arg == PHI_RESULT (def))
912 continue;
913
914 new_val = expr_expected_value (arg, visited);
915 if (!new_val)
916 return NULL;
917 if (!val)
918 val = new_val;
919 else if (!operand_equal_p (val, new_val, false))
920 return NULL;
921 }
922 return val;
923 }
924 if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
925 return NULL;
926 return expr_expected_value (TREE_OPERAND (def, 1), visited);
927 }
928 else if (TREE_CODE (expr) == CALL_EXPR)
929 {
930 tree decl = get_callee_fndecl (expr);
931 if (!decl)
932 return NULL;
933 if (DECL_BUILT_IN (decl) && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
934 {
935 tree arglist = TREE_OPERAND (expr, 1);
936 tree val;
937
938 if (arglist == NULL_TREE
939 || TREE_CHAIN (arglist) == NULL_TREE)
940 return NULL;
941 val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
942 if (TREE_CONSTANT (val))
943 return val;
944 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
945 }
946 }
096759eb 947 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
42f97fd2
JH
948 {
949 tree op0, op1, res;
950 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
951 if (!op0)
952 return NULL;
953 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
954 if (!op1)
955 return NULL;
956 res = fold (build (TREE_CODE (expr), TREE_TYPE (expr), op0, op1));
957 if (TREE_CONSTANT (res))
958 return res;
959 return NULL;
960 }
096759eb 961 if (UNARY_CLASS_P (expr))
42f97fd2
JH
962 {
963 tree op0, res;
964 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
965 if (!op0)
966 return NULL;
967 res = fold (build1 (TREE_CODE (expr), TREE_TYPE (expr), op0));
968 if (TREE_CONSTANT (res))
969 return res;
970 return NULL;
971 }
972 return NULL;
973}
974\f
975/* Get rid of all builtin_expect calls we no longer need. */
976static void
977strip_builtin_expect (void)
978{
979 basic_block bb;
980 FOR_EACH_BB (bb)
981 {
982 block_stmt_iterator bi;
983 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
984 {
985 tree stmt = bsi_stmt (bi);
986 tree fndecl;
987 tree arglist;
988
989 if (TREE_CODE (stmt) == MODIFY_EXPR
990 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
991 && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
992 && DECL_BUILT_IN (fndecl)
993 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
994 && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
995 && TREE_CHAIN (arglist))
996 {
997 TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
998 modify_stmt (stmt);
999 }
1000 }
1001 }
1002}
1003\f
6de9cd9a
DN
1004/* Predict using opcode of the last statement in basic block. */
1005static void
1006tree_predict_by_opcode (basic_block bb)
1007{
1008 tree stmt = last_stmt (bb);
1009 edge then_edge;
1010 tree cond;
1011 tree op0;
1012 tree type;
42f97fd2
JH
1013 tree val;
1014 bitmap visited;
6de9cd9a
DN
1015
1016 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1017 return;
1018 for (then_edge = bb->succ; then_edge; then_edge = then_edge->succ_next)
1019 if (then_edge->flags & EDGE_TRUE_VALUE)
1020 break;
1021 cond = TREE_OPERAND (stmt, 0);
6615c446 1022 if (!COMPARISON_CLASS_P (cond))
6de9cd9a
DN
1023 return;
1024 op0 = TREE_OPERAND (cond, 0);
1025 type = TREE_TYPE (op0);
42f97fd2
JH
1026 visited = BITMAP_XMALLOC ();
1027 val = expr_expected_value (cond, visited);
1028 BITMAP_XFREE (visited);
1029 if (val)
1030 {
1031 if (integer_zerop (val))
1032 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1033 else
1034 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1035 return;
1036 }
6de9cd9a
DN
1037 /* Try "pointer heuristic."
1038 A comparison ptr == 0 is predicted as false.
1039 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1040 if (POINTER_TYPE_P (type))
1041 {
1042 if (TREE_CODE (cond) == EQ_EXPR)
1043 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1044 else if (TREE_CODE (cond) == NE_EXPR)
1045 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1046 }
1047 else
1048
1049 /* Try "opcode heuristic."
1050 EQ tests are usually false and NE tests are usually true. Also,
1051 most quantities are positive, so we can make the appropriate guesses
1052 about signed comparisons against zero. */
1053 switch (TREE_CODE (cond))
1054 {
1055 case EQ_EXPR:
1056 case UNEQ_EXPR:
1057 /* Floating point comparisons appears to behave in a very
1058 unpredictable way because of special role of = tests in
1059 FP code. */
1060 if (FLOAT_TYPE_P (type))
1061 ;
1062 /* Comparisons with 0 are often used for booleans and there is
1063 nothing useful to predict about them. */
1064 else if (integer_zerop (op0)
1065 || integer_zerop (TREE_OPERAND (cond, 1)))
1066 ;
1067 else
1068 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1069 break;
1070
1071 case NE_EXPR:
d1a7edaf 1072 case LTGT_EXPR:
6de9cd9a
DN
1073 /* Floating point comparisons appears to behave in a very
1074 unpredictable way because of special role of = tests in
1075 FP code. */
1076 if (FLOAT_TYPE_P (type))
1077 ;
1078 /* Comparisons with 0 are often used for booleans and there is
1079 nothing useful to predict about them. */
1080 else if (integer_zerop (op0)
1081 || integer_zerop (TREE_OPERAND (cond, 1)))
1082 ;
1083 else
1084 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1085 break;
1086
1087 case ORDERED_EXPR:
1088 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1089 break;
1090
1091 case UNORDERED_EXPR:
1092 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1093 break;
1094
1095 case LE_EXPR:
1096 case LT_EXPR:
1097 if (integer_zerop (TREE_OPERAND (cond, 1))
1098 || integer_onep (TREE_OPERAND (cond, 1))
1099 || integer_all_onesp (TREE_OPERAND (cond, 1))
1100 || real_zerop (TREE_OPERAND (cond, 1))
1101 || real_onep (TREE_OPERAND (cond, 1))
1102 || real_minus_onep (TREE_OPERAND (cond, 1)))
1103 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1104 break;
1105
1106 case GE_EXPR:
1107 case GT_EXPR:
1108 if (integer_zerop (TREE_OPERAND (cond, 1))
1109 || integer_onep (TREE_OPERAND (cond, 1))
1110 || integer_all_onesp (TREE_OPERAND (cond, 1))
1111 || real_zerop (TREE_OPERAND (cond, 1))
1112 || real_onep (TREE_OPERAND (cond, 1))
1113 || real_minus_onep (TREE_OPERAND (cond, 1)))
1114 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1115 break;
1116
1117 default:
1118 break;
1119 }
1120}
1121
1122/* Predict branch probabilities and estimate profile of the tree CFG. */
1123static void
1124tree_estimate_probability (void)
1125{
1126 basic_block bb;
1127 struct loops loops_info;
1128
1129 flow_loops_find (&loops_info, LOOP_TREE);
1130 if (dump_file && (dump_flags & TDF_DETAILS))
1131 flow_loops_dump (&loops_info, dump_file, NULL, 0);
1132
1133 connect_infinite_loops_to_exit ();
1134 calculate_dominance_info (CDI_DOMINATORS);
1135 calculate_dominance_info (CDI_POST_DOMINATORS);
1136
1137 predict_loops (&loops_info, false);
1138
1139 FOR_EACH_BB (bb)
1140 {
1141 edge e;
1142
1143 for (e = bb->succ; e; e = e->succ_next)
1144 {
1145 /* Predict early returns to be probable, as we've already taken
1146 care for error returns and other are often used for fast paths
1147 trought function. */
1148 if ((e->dest == EXIT_BLOCK_PTR
1149 || (e->dest->succ && !e->dest->succ->succ_next
1150 && e->dest->succ->dest == EXIT_BLOCK_PTR))
1151 && !predicted_by_p (bb, PRED_NULL_RETURN)
1152 && !predicted_by_p (bb, PRED_CONST_RETURN)
1153 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
1154 && !last_basic_block_p (e->dest))
1155 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
1156
454ff5cb 1157 /* Look for block we are guarding (i.e. we dominate it,
6de9cd9a
DN
1158 but it doesn't postdominate us). */
1159 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1160 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1161 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1162 {
1163 block_stmt_iterator bi;
1164
1165 /* The call heuristic claims that a guarded function call
1166 is improbable. This is because such calls are often used
1167 to signal exceptional situations such as printing error
1168 messages. */
1169 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1170 bsi_next (&bi))
1171 {
1172 tree stmt = bsi_stmt (bi);
1173 if ((TREE_CODE (stmt) == CALL_EXPR
1174 || (TREE_CODE (stmt) == MODIFY_EXPR
1175 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1176 /* Constant and pure calls are hardly used to signalize
1177 something exceptional. */
1178 && TREE_SIDE_EFFECTS (stmt))
1179 {
1180 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1181 break;
1182 }
1183 }
1184 }
1185 }
1186 tree_predict_by_opcode (bb);
1187 }
1188 FOR_EACH_BB (bb)
1189 combine_predictions_for_bb (dump_file, bb);
861f9cd0 1190
42f97fd2
JH
1191 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1192 strip_builtin_expect ();
6de9cd9a
DN
1193 estimate_bb_frequencies (&loops_info);
1194 free_dominance_info (CDI_POST_DOMINATORS);
6809cbf9 1195 remove_fake_exit_edges ();
6de9cd9a
DN
1196 flow_loops_free (&loops_info);
1197 if (dump_file && (dump_flags & TDF_DETAILS))
1198 dump_tree_cfg (dump_file, dump_flags);
878f99d2
JH
1199 if (profile_status == PROFILE_ABSENT)
1200 profile_status = PROFILE_GUESSED;
f1ebdfc5 1201}
994a57cd 1202\f
bfdade77
RK
1203/* __builtin_expect dropped tokens into the insn stream describing expected
1204 values of registers. Generate branch probabilities based off these
1205 values. */
f1ebdfc5 1206
994a57cd 1207void
79a490a9 1208expected_value_to_br_prob (void)
994a57cd 1209{
36244024 1210 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
994a57cd
RH
1211
1212 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1213 {
10f13594
RH
1214 switch (GET_CODE (insn))
1215 {
1216 case NOTE:
1217 /* Look for expected value notes. */
1218 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
1219 {
1220 ev = NOTE_EXPECTED_VALUE (insn);
1221 ev_reg = XEXP (ev, 0);
49778644 1222 delete_insn (insn);
10f13594
RH
1223 }
1224 continue;
1225
1226 case CODE_LABEL:
1227 /* Never propagate across labels. */
1228 ev = NULL_RTX;
1229 continue;
994a57cd 1230
10f13594 1231 case JUMP_INSN:
a1f300c0 1232 /* Look for simple conditional branches. If we haven't got an
10f13594 1233 expected value yet, no point going further. */
4b4bf941 1234 if (!JUMP_P (insn) || ev == NULL_RTX
bfdade77 1235 || ! any_condjump_p (insn))
10f13594
RH
1236 continue;
1237 break;
bfdade77
RK
1238
1239 default:
1240 /* Look for insns that clobber the EV register. */
1241 if (ev && reg_set_p (ev_reg, insn))
1242 ev = NULL_RTX;
1243 continue;
10f13594
RH
1244 }
1245
1246 /* Collect the branch condition, hopefully relative to EV_REG. */
d9490f2f
RH
1247 /* ??? At present we'll miss things like
1248 (expected_value (eq r70 0))
1249 (set r71 -1)
1250 (set r80 (lt r70 r71))
1251 (set pc (if_then_else (ne r80 0) ...))
57cb6d52 1252 as canonicalize_condition will render this to us as
d9490f2f
RH
1253 (lt r70, r71)
1254 Could use cselib to try and reduce this further. */
24ee7cae 1255 cond = XEXP (SET_SRC (pc_set (insn)), 0);
45d09c02
RS
1256 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg,
1257 false, false);
bfdade77 1258 if (! cond || XEXP (cond, 0) != ev_reg
d9490f2f 1259 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
994a57cd
RH
1260 continue;
1261
57cb6d52 1262 /* Substitute and simplify. Given that the expression we're
994a57cd
RH
1263 building involves two constants, we should wind up with either
1264 true or false. */
1265 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
1266 XEXP (ev, 1), XEXP (cond, 1));
1267 cond = simplify_rtx (cond);
1268
1269 /* Turn the condition into a scaled branch probability. */
8127d0e0
NS
1270 if (cond != const_true_rtx && cond != const0_rtx)
1271 abort ();
4db384c9 1272 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
1b28186a 1273 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
994a57cd
RH
1274 }
1275}
861f9cd0 1276\f
79a490a9
AJ
1277/* Check whether this is the last basic block of function. Commonly
1278 there is one extra common cleanup block. */
969d70ca 1279static bool
79a490a9 1280last_basic_block_p (basic_block bb)
969d70ca 1281{
f6366fc7
ZD
1282 if (bb == EXIT_BLOCK_PTR)
1283 return false;
1284
1285 return (bb->next_bb == EXIT_BLOCK_PTR
1286 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
969d70ca 1287 && bb->succ && !bb->succ->succ_next
f6366fc7 1288 && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
969d70ca 1289}
969d70ca 1290\f
57cb6d52 1291/* This is used to carry information about basic blocks. It is
861f9cd0
JH
1292 attached to the AUX field of the standard CFG block. */
1293
1294typedef struct block_info_def
1295{
1296 /* Estimated frequency of execution of basic_block. */
ac5e69da 1297 sreal frequency;
861f9cd0
JH
1298
1299 /* To keep queue of basic blocks to process. */
1300 basic_block next;
1301
ba228239 1302 /* True if block needs to be visited in propagate_freq. */
2c45a16a 1303 unsigned int tovisit:1;
247a370b 1304
eaec9b3d 1305 /* Number of predecessors we need to visit first. */
754d9299 1306 int npredecessors;
861f9cd0
JH
1307} *block_info;
1308
1309/* Similar information for edges. */
1310typedef struct edge_info_def
1311{
1312 /* In case edge is an loopback edge, the probability edge will be reached
1313 in case header is. Estimated number of iterations of the loop can be
8aa18a7d 1314 then computed as 1 / (1 - back_edge_prob). */
ac5e69da 1315 sreal back_edge_prob;
861f9cd0 1316 /* True if the edge is an loopback edge in the natural loop. */
2c45a16a 1317 unsigned int back_edge:1;
861f9cd0
JH
1318} *edge_info;
1319
1320#define BLOCK_INFO(B) ((block_info) (B)->aux)
1321#define EDGE_INFO(E) ((edge_info) (E)->aux)
1322
1323/* Helper function for estimate_bb_frequencies.
2ecfd709 1324 Propagate the frequencies for LOOP. */
bfdade77 1325
861f9cd0 1326static void
79a490a9 1327propagate_freq (struct loop *loop)
861f9cd0 1328{
2ecfd709 1329 basic_block head = loop->header;
e0082a72
ZD
1330 basic_block bb;
1331 basic_block last;
861f9cd0
JH
1332 edge e;
1333 basic_block nextbb;
247a370b 1334
eaec9b3d 1335 /* For each basic block we need to visit count number of his predecessors
247a370b 1336 we need to visit first. */
214ee4a2 1337 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
247a370b 1338 {
247a370b
JH
1339 if (BLOCK_INFO (bb)->tovisit)
1340 {
1341 int count = 0;
bfdade77 1342
247a370b
JH
1343 for (e = bb->pred; e; e = e->pred_next)
1344 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
1345 count++;
1346 else if (BLOCK_INFO (e->src)->tovisit
c263766c
RH
1347 && dump_file && !EDGE_INFO (e)->back_edge)
1348 fprintf (dump_file,
247a370b 1349 "Irreducible region hit, ignoring edge to %i->%i\n",
0b17ab2f 1350 e->src->index, bb->index);
754d9299 1351 BLOCK_INFO (bb)->npredecessors = count;
247a370b
JH
1352 }
1353 }
861f9cd0 1354
8aa18a7d 1355 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
e0082a72
ZD
1356 last = head;
1357 for (bb = head; bb; bb = nextbb)
861f9cd0 1358 {
ac5e69da 1359 sreal cyclic_probability, frequency;
8aa18a7d
JH
1360
1361 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1362 memcpy (&frequency, &real_zero, sizeof (real_zero));
861f9cd0
JH
1363
1364 nextbb = BLOCK_INFO (bb)->next;
1365 BLOCK_INFO (bb)->next = NULL;
1366
1367 /* Compute frequency of basic block. */
1368 if (bb != head)
1369 {
247a370b 1370#ifdef ENABLE_CHECKING
861f9cd0 1371 for (e = bb->pred; e; e = e->pred_next)
8127d0e0
NS
1372 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
1373 abort ();
247a370b 1374#endif
861f9cd0
JH
1375
1376 for (e = bb->pred; e; e = e->pred_next)
1377 if (EDGE_INFO (e)->back_edge)
8aa18a7d 1378 {
ac5e69da
JZ
1379 sreal_add (&cyclic_probability, &cyclic_probability,
1380 &EDGE_INFO (e)->back_edge_prob);
8aa18a7d 1381 }
247a370b 1382 else if (!(e->flags & EDGE_DFS_BACK))
8aa18a7d 1383 {
ac5e69da 1384 sreal tmp;
8aa18a7d
JH
1385
1386 /* frequency += (e->probability
1387 * BLOCK_INFO (e->src)->frequency /
1388 REG_BR_PROB_BASE); */
1389
ac5e69da
JZ
1390 sreal_init (&tmp, e->probability, 0);
1391 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1392 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1393 sreal_add (&frequency, &frequency, &tmp);
8aa18a7d
JH
1394 }
1395
ac5e69da
JZ
1396 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1397 {
1398 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1399 sizeof (frequency));
1400 }
fbe3b30b
SB
1401 else
1402 {
ac5e69da
JZ
1403 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1404 {
1405 memcpy (&cyclic_probability, &real_almost_one,
1406 sizeof (real_almost_one));
1407 }
861f9cd0 1408
79a490a9 1409 /* BLOCK_INFO (bb)->frequency = frequency
ac5e69da 1410 / (1 - cyclic_probability) */
861f9cd0 1411
ac5e69da
JZ
1412 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1413 sreal_div (&BLOCK_INFO (bb)->frequency,
1414 &frequency, &cyclic_probability);
fbe3b30b 1415 }
861f9cd0
JH
1416 }
1417
247a370b 1418 BLOCK_INFO (bb)->tovisit = 0;
861f9cd0
JH
1419
1420 /* Compute back edge frequencies. */
1421 for (e = bb->succ; e; e = e->succ_next)
1422 if (e->dest == head)
8aa18a7d 1423 {
ac5e69da 1424 sreal tmp;
8aa18a7d
JH
1425
1426 /* EDGE_INFO (e)->back_edge_prob
1427 = ((e->probability * BLOCK_INFO (bb)->frequency)
1428 / REG_BR_PROB_BASE); */
8aa18a7d 1429
ac5e69da
JZ
1430 sreal_init (&tmp, e->probability, 0);
1431 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1432 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1433 &tmp, &real_inv_br_prob_base);
8aa18a7d 1434 }
861f9cd0 1435
57cb6d52 1436 /* Propagate to successor blocks. */
861f9cd0 1437 for (e = bb->succ; e; e = e->succ_next)
247a370b 1438 if (!(e->flags & EDGE_DFS_BACK)
754d9299 1439 && BLOCK_INFO (e->dest)->npredecessors)
861f9cd0 1440 {
754d9299
JM
1441 BLOCK_INFO (e->dest)->npredecessors--;
1442 if (!BLOCK_INFO (e->dest)->npredecessors)
247a370b
JH
1443 {
1444 if (!nextbb)
1445 nextbb = e->dest;
1446 else
1447 BLOCK_INFO (last)->next = e->dest;
bfdade77 1448
247a370b
JH
1449 last = e->dest;
1450 }
1451 }
861f9cd0
JH
1452 }
1453}
1454
57cb6d52 1455/* Estimate probabilities of loopback edges in loops at same nest level. */
bfdade77 1456
861f9cd0 1457static void
79a490a9 1458estimate_loops_at_level (struct loop *first_loop)
861f9cd0 1459{
2ecfd709 1460 struct loop *loop;
861f9cd0
JH
1461
1462 for (loop = first_loop; loop; loop = loop->next)
1463 {
861f9cd0 1464 edge e;
2ecfd709 1465 basic_block *bbs;
3d436d2a 1466 unsigned i;
861f9cd0
JH
1467
1468 estimate_loops_at_level (loop->inner);
79a490a9 1469
2ecfd709 1470 if (loop->latch->succ) /* Do not do this for dummy function loop. */
861f9cd0 1471 {
2ecfd709
ZD
1472 /* Find current loop back edge and mark it. */
1473 e = loop_latch_edge (loop);
1474 EDGE_INFO (e)->back_edge = 1;
1475 }
1476
1477 bbs = get_loop_body (loop);
1478 for (i = 0; i < loop->num_nodes; i++)
1479 BLOCK_INFO (bbs[i])->tovisit = 1;
1480 free (bbs);
1481 propagate_freq (loop);
861f9cd0
JH
1482 }
1483}
1484
02307675
R
1485/* Convert counts measured by profile driven feedback to frequencies.
1486 Return nonzero iff there was any nonzero execution count. */
bfdade77 1487
02307675 1488static int
79a490a9 1489counts_to_freqs (void)
861f9cd0 1490{
02307675 1491 gcov_type count_max, true_count_max = 0;
e0082a72 1492 basic_block bb;
0b17ab2f 1493
e0082a72 1494 FOR_EACH_BB (bb)
02307675 1495 true_count_max = MAX (bb->count, true_count_max);
861f9cd0 1496
02307675 1497 count_max = MAX (true_count_max, 1);
e0082a72
ZD
1498 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1499 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
02307675 1500 return true_count_max;
861f9cd0
JH
1501}
1502
bfdade77
RK
1503/* Return true if function is likely to be expensive, so there is no point to
1504 optimize performance of prologue, epilogue or do inlining at the expense
d55d8fc7 1505 of code size growth. THRESHOLD is the limit of number of instructions
bfdade77
RK
1506 function can execute at average to be still considered not expensive. */
1507
6ab16dd9 1508bool
79a490a9 1509expensive_function_p (int threshold)
6ab16dd9
JH
1510{
1511 unsigned int sum = 0;
e0082a72 1512 basic_block bb;
5197bd50 1513 unsigned int limit;
6ab16dd9
JH
1514
1515 /* We can not compute accurately for large thresholds due to scaled
1516 frequencies. */
8127d0e0
NS
1517 if (threshold > BB_FREQ_MAX)
1518 abort ();
6ab16dd9 1519
eaec9b3d 1520 /* Frequencies are out of range. This either means that function contains
6ab16dd9
JH
1521 internal loop executing more than BB_FREQ_MAX times or profile feedback
1522 is available and function has not been executed at all. */
1523 if (ENTRY_BLOCK_PTR->frequency == 0)
1524 return true;
6a4d6760 1525
6ab16dd9
JH
1526 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1527 limit = ENTRY_BLOCK_PTR->frequency * threshold;
e0082a72 1528 FOR_EACH_BB (bb)
6ab16dd9 1529 {
6ab16dd9
JH
1530 rtx insn;
1531
a813c111 1532 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
6ab16dd9 1533 insn = NEXT_INSN (insn))
bfdade77
RK
1534 if (active_insn_p (insn))
1535 {
1536 sum += bb->frequency;
1537 if (sum > limit)
1538 return true;
6ab16dd9
JH
1539 }
1540 }
bfdade77 1541
6ab16dd9
JH
1542 return false;
1543}
1544
861f9cd0 1545/* Estimate basic blocks frequency by given branch probabilities. */
bfdade77 1546
861f9cd0 1547static void
79a490a9 1548estimate_bb_frequencies (struct loops *loops)
861f9cd0 1549{
e0082a72 1550 basic_block bb;
ac5e69da 1551 sreal freq_max;
8aa18a7d 1552
02307675 1553 if (!flag_branch_probabilities || !counts_to_freqs ())
194734e9 1554 {
c4f6b78e
RE
1555 static int real_values_initialized = 0;
1556
1557 if (!real_values_initialized)
1558 {
85bb9c2a 1559 real_values_initialized = 1;
c4f6b78e
RE
1560 sreal_init (&real_zero, 0, 0);
1561 sreal_init (&real_one, 1, 0);
1562 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1563 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1564 sreal_init (&real_one_half, 1, -1);
1565 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1566 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1567 }
861f9cd0 1568
194734e9 1569 mark_dfs_back_edges ();
194734e9
JH
1570
1571 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1572
1573 /* Set up block info for each basic block. */
1574 alloc_aux_for_blocks (sizeof (struct block_info_def));
1575 alloc_aux_for_edges (sizeof (struct edge_info_def));
e0082a72 1576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
861f9cd0 1577 {
861f9cd0 1578 edge e;
194734e9
JH
1579
1580 BLOCK_INFO (bb)->tovisit = 0;
1581 for (e = bb->succ; e; e = e->succ_next)
861f9cd0 1582 {
ac5e69da
JZ
1583 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1584 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1585 &EDGE_INFO (e)->back_edge_prob,
1586 &real_inv_br_prob_base);
861f9cd0 1587 }
861f9cd0 1588 }
bfdade77 1589
194734e9
JH
1590 /* First compute probabilities locally for each loop from innermost
1591 to outermost to examine probabilities for back edges. */
1592 estimate_loops_at_level (loops->tree_root);
861f9cd0 1593
194734e9 1594 memcpy (&freq_max, &real_zero, sizeof (real_zero));
e0082a72 1595 FOR_EACH_BB (bb)
ac5e69da
JZ
1596 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1597 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
fbe3b30b 1598
ac5e69da 1599 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
e0082a72 1600 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
8aa18a7d 1601 {
ac5e69da 1602 sreal tmp;
bfdade77 1603
ac5e69da
JZ
1604 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1605 sreal_add (&tmp, &tmp, &real_one_half);
1606 bb->frequency = sreal_to_int (&tmp);
194734e9 1607 }
bfdade77 1608
194734e9
JH
1609 free_aux_for_blocks ();
1610 free_aux_for_edges ();
1611 }
1612 compute_function_frequency ();
1613 if (flag_reorder_functions)
1614 choose_function_section ();
1615}
861f9cd0 1616
194734e9
JH
1617/* Decide whether function is hot, cold or unlikely executed. */
1618static void
79a490a9 1619compute_function_frequency (void)
194734e9 1620{
e0082a72
ZD
1621 basic_block bb;
1622
cdb23767 1623 if (!profile_info || !flag_branch_probabilities)
194734e9
JH
1624 return;
1625 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
e0082a72 1626 FOR_EACH_BB (bb)
861f9cd0 1627 {
194734e9
JH
1628 if (maybe_hot_bb_p (bb))
1629 {
1630 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1631 return;
1632 }
1633 if (!probably_never_executed_bb_p (bb))
1634 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
861f9cd0 1635 }
194734e9 1636}
861f9cd0 1637
194734e9
JH
1638/* Choose appropriate section for the function. */
1639static void
79a490a9 1640choose_function_section (void)
194734e9
JH
1641{
1642 if (DECL_SECTION_NAME (current_function_decl)
c07f146f
JH
1643 || !targetm.have_named_sections
1644 /* Theoretically we can split the gnu.linkonce text section too,
79a490a9 1645 but this requires more work as the frequency needs to match
c07f146f
JH
1646 for all generated objects so we need to merge the frequency
1647 of all instances. For now just never set frequency for these. */
c728da61 1648 || DECL_ONE_ONLY (current_function_decl))
194734e9 1649 return;
9fb32434
CT
1650
1651 /* If we are doing the partitioning optimization, let the optimization
1652 choose the correct section into which to put things. */
1653
1654 if (flag_reorder_blocks_and_partition)
1655 return;
1656
194734e9
JH
1657 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1658 DECL_SECTION_NAME (current_function_decl) =
1659 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1660 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1661 DECL_SECTION_NAME (current_function_decl) =
1662 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1663 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
861f9cd0 1664}
6de9cd9a
DN
1665
1666
1667struct tree_opt_pass pass_profile =
1668{
1669 "profile", /* name */
1670 NULL, /* gate */
1671 tree_estimate_probability, /* execute */
1672 NULL, /* sub */
1673 NULL, /* next */
1674 0, /* static_pass_number */
1675 TV_BRANCH_PROB, /* tv_id */
1676 PROP_cfg, /* properties_required */
1677 0, /* properties_provided */
1678 0, /* properties_destroyed */
1679 0, /* todo_flags_start */
9f8628ba
PB
1680 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1681 0 /* letter */
6de9cd9a 1682};
This page took 1.805637 seconds and 5 git commands to generate.