]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-ssa-sccvn.c
Remove a layer of indirection from hash_table
[gcc.git] / gcc / tree-ssa-sccvn.c
CommitLineData
89fb70a3 1/* SCC value numbering for trees
23a5b65a 2 Copyright (C) 2006-2014 Free Software Foundation, Inc.
89fb70a3
DB
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
89fb70a3
DB
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
89fb70a3
DB
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
89fb70a3 25#include "tree.h"
d8a2d370 26#include "stor-layout.h"
89fb70a3 27#include "basic-block.h"
cf835838 28#include "gimple-pretty-print.h"
89fb70a3 29#include "tree-inline.h"
2fb9a547
AM
30#include "hash-table.h"
31#include "tree-ssa-alias.h"
32#include "internal-fn.h"
33#include "gimple-fold.h"
34#include "tree-eh.h"
35#include "gimple-expr.h"
36#include "is-a.h"
18f429e2 37#include "gimple.h"
45b0be94 38#include "gimplify.h"
442b4905
AM
39#include "gimple-ssa.h"
40#include "tree-phinodes.h"
41#include "ssa-iterators.h"
d8a2d370 42#include "stringpool.h"
442b4905 43#include "tree-ssanames.h"
d8a2d370 44#include "expr.h"
442b4905
AM
45#include "tree-dfa.h"
46#include "tree-ssa.h"
7ee2468b 47#include "dumpfile.h"
89fb70a3 48#include "alloc-pool.h"
89fb70a3 49#include "flags.h"
89fb70a3 50#include "cfgloop.h"
863d2a57 51#include "params.h"
c2979eaf 52#include "tree-ssa-propagate.h"
89fb70a3 53#include "tree-ssa-sccvn.h"
a764d660
RB
54#include "tree-cfg.h"
55#include "domwalk.h"
89fb70a3
DB
56
57/* This algorithm is based on the SCC algorithm presented by Keith
58 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
59 (http://citeseer.ist.psu.edu/41805.html). In
60 straight line code, it is equivalent to a regular hash based value
61 numbering that is performed in reverse postorder.
62
63 For code with cycles, there are two alternatives, both of which
64 require keeping the hashtables separate from the actual list of
65 value numbers for SSA names.
66
67 1. Iterate value numbering in an RPO walk of the blocks, removing
68 all the entries from the hashtable after each iteration (but
69 keeping the SSA name->value number mapping between iterations).
70 Iterate until it does not change.
71
72 2. Perform value numbering as part of an SCC walk on the SSA graph,
73 iterating only the cycles in the SSA graph until they do not change
74 (using a separate, optimistic hashtable for value numbering the SCC
75 operands).
76
77 The second is not just faster in practice (because most SSA graph
78 cycles do not involve all the variables in the graph), it also has
79 some nice properties.
80
81 One of these nice properties is that when we pop an SCC off the
82 stack, we are guaranteed to have processed all the operands coming from
83 *outside of that SCC*, so we do not need to do anything special to
84 ensure they have value numbers.
85
86 Another nice property is that the SCC walk is done as part of a DFS
87 of the SSA graph, which makes it easy to perform combining and
88 simplifying operations at the same time.
89
90 The code below is deliberately written in a way that makes it easy
91 to separate the SCC walk from the other work it does.
92
93 In order to propagate constants through the code, we track which
94 expressions contain constants, and use those while folding. In
95 theory, we could also track expressions whose value numbers are
96 replaced, in case we end up folding based on expression
97 identities.
98
99 In order to value number memory, we assign value numbers to vuses.
100 This enables us to note that, for example, stores to the same
101 address of the same value from the same starting memory states are
070b797d 102 equivalent.
89fb70a3
DB
103 TODO:
104
105 1. We can iterate only the changing portions of the SCC's, but
106 I have not seen an SCC big enough for this to be a win.
107 2. If you differentiate between phi nodes for loops and phi nodes
108 for if-then-else, you can properly consider phi nodes in different
109 blocks for equivalence.
110 3. We could value number vuses in more cases, particularly, whole
111 structure copies.
112*/
113
bf190e8d
LC
114
115/* vn_nary_op hashtable helpers. */
116
117struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
118{
119 typedef vn_nary_op_s value_type;
120 typedef vn_nary_op_s compare_type;
121 static inline hashval_t hash (const value_type *);
122 static inline bool equal (const value_type *, const compare_type *);
123};
124
125/* Return the computed hashcode for nary operation P1. */
126
127inline hashval_t
128vn_nary_op_hasher::hash (const value_type *vno1)
129{
130 return vno1->hashcode;
131}
132
133/* Compare nary operations P1 and P2 and return true if they are
134 equivalent. */
135
136inline bool
137vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
138{
139 return vn_nary_op_eq (vno1, vno2);
140}
141
c203e8a7 142typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
bf190e8d
LC
143typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
144
145
146/* vn_phi hashtable helpers. */
147
148static int
149vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
150
151struct vn_phi_hasher
152{
153 typedef vn_phi_s value_type;
154 typedef vn_phi_s compare_type;
155 static inline hashval_t hash (const value_type *);
156 static inline bool equal (const value_type *, const compare_type *);
157 static inline void remove (value_type *);
158};
159
160/* Return the computed hashcode for phi operation P1. */
161
162inline hashval_t
163vn_phi_hasher::hash (const value_type *vp1)
164{
165 return vp1->hashcode;
166}
167
168/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
169
170inline bool
171vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
172{
173 return vn_phi_eq (vp1, vp2);
174}
175
176/* Free a phi operation structure VP. */
177
178inline void
179vn_phi_hasher::remove (value_type *phi)
180{
181 phi->phiargs.release ();
182}
183
c203e8a7 184typedef hash_table<vn_phi_hasher> vn_phi_table_type;
bf190e8d
LC
185typedef vn_phi_table_type::iterator vn_phi_iterator_type;
186
187
188/* Compare two reference operands P1 and P2 for equality. Return true if
189 they are equal, and false otherwise. */
190
191static int
192vn_reference_op_eq (const void *p1, const void *p2)
193{
194 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
195 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
196
197 return (vro1->opcode == vro2->opcode
198 /* We do not care for differences in type qualification. */
199 && (vro1->type == vro2->type
200 || (vro1->type && vro2->type
201 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
202 TYPE_MAIN_VARIANT (vro2->type))))
203 && expressions_equal_p (vro1->op0, vro2->op0)
204 && expressions_equal_p (vro1->op1, vro2->op1)
205 && expressions_equal_p (vro1->op2, vro2->op2));
206}
207
208/* Free a reference operation structure VP. */
209
210static inline void
211free_reference (vn_reference_s *vr)
212{
213 vr->operands.release ();
214}
215
216
217/* vn_reference hashtable helpers. */
218
219struct vn_reference_hasher
220{
221 typedef vn_reference_s value_type;
222 typedef vn_reference_s compare_type;
223 static inline hashval_t hash (const value_type *);
224 static inline bool equal (const value_type *, const compare_type *);
225 static inline void remove (value_type *);
226};
227
228/* Return the hashcode for a given reference operation P1. */
229
230inline hashval_t
231vn_reference_hasher::hash (const value_type *vr1)
232{
233 return vr1->hashcode;
234}
235
236inline bool
237vn_reference_hasher::equal (const value_type *v, const compare_type *c)
238{
239 return vn_reference_eq (v, c);
240}
241
242inline void
243vn_reference_hasher::remove (value_type *v)
244{
245 free_reference (v);
246}
247
c203e8a7 248typedef hash_table<vn_reference_hasher> vn_reference_table_type;
bf190e8d
LC
249typedef vn_reference_table_type::iterator vn_reference_iterator_type;
250
251
89fb70a3
DB
252/* The set of hashtables and alloc_pool's for their items. */
253
254typedef struct vn_tables_s
255{
c203e8a7
TS
256 vn_nary_op_table_type *nary;
257 vn_phi_table_type *phis;
258 vn_reference_table_type *references;
49a1fb2d 259 struct obstack nary_obstack;
89fb70a3
DB
260 alloc_pool phis_pool;
261 alloc_pool references_pool;
262} *vn_tables_t;
263
bf190e8d
LC
264
265/* vn_constant hashtable helpers. */
266
267struct vn_constant_hasher : typed_free_remove <vn_constant_s>
268{
269 typedef vn_constant_s value_type;
270 typedef vn_constant_s compare_type;
271 static inline hashval_t hash (const value_type *);
272 static inline bool equal (const value_type *, const compare_type *);
273};
274
275/* Hash table hash function for vn_constant_t. */
276
277inline hashval_t
278vn_constant_hasher::hash (const value_type *vc1)
279{
280 return vc1->hashcode;
281}
282
283/* Hash table equality function for vn_constant_t. */
284
285inline bool
286vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
287{
288 if (vc1->hashcode != vc2->hashcode)
289 return false;
290
291 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
292}
293
c203e8a7 294static hash_table<vn_constant_hasher> *constant_to_value_id;
c9145754 295static bitmap constant_value_ids;
89fb70a3 296
89fb70a3
DB
297
298/* Valid hashtables storing information we have proven to be
299 correct. */
300
301static vn_tables_t valid_info;
302
303/* Optimistic hashtables storing information we are making assumptions about
304 during iterations. */
305
306static vn_tables_t optimistic_info;
307
89fb70a3
DB
308/* Pointer to the set of hashtables that is currently being used.
309 Should always point to either the optimistic_info, or the
310 valid_info. */
311
312static vn_tables_t current_info;
313
314
315/* Reverse post order index for each basic block. */
316
317static int *rpo_numbers;
318
319#define SSA_VAL(x) (VN_INFO ((x))->valnum)
320
d1c0308e
RB
321/* Return the SSA value of the VUSE x, supporting released VDEFs
322 during elimination which will value-number the VDEF to the
323 associated VUSE (but not substitute in the whole lattice). */
324
325static inline tree
326vuse_ssa_val (tree x)
327{
328 if (!x)
329 return NULL_TREE;
330
331 do
332 {
333 x = SSA_VAL (x);
334 }
335 while (SSA_NAME_IN_FREE_LIST (x));
336
337 return x;
338}
339
89fb70a3
DB
340/* This represents the top of the VN lattice, which is the universal
341 value. */
342
343tree VN_TOP;
344
c9145754
DB
345/* Unique counter for our value ids. */
346
347static unsigned int next_value_id;
348
89fb70a3
DB
349/* Next DFS number and the stack for strongly connected component
350 detection. */
351
352static unsigned int next_dfs_num;
9771b263 353static vec<tree> sccstack;
89fb70a3 354
3d45dd59 355
89fb70a3 356
cbfb21c1
SB
357/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
358 are allocated on an obstack for locality reasons, and to free them
9771b263 359 without looping over the vec. */
89fb70a3 360
9771b263 361static vec<vn_ssa_aux_t> vn_ssa_aux_table;
cbfb21c1 362static struct obstack vn_ssa_aux_obstack;
89fb70a3
DB
363
364/* Return the value numbering information for a given SSA name. */
365
366vn_ssa_aux_t
367VN_INFO (tree name)
368{
9771b263 369 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
7a40b8b1 370 gcc_checking_assert (res);
c9145754 371 return res;
89fb70a3
DB
372}
373
374/* Set the value numbering info for a given SSA name to a given
375 value. */
376
377static inline void
378VN_INFO_SET (tree name, vn_ssa_aux_t value)
379{
9771b263 380 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
89fb70a3
DB
381}
382
cbfb21c1
SB
383/* Initialize the value numbering info for a given SSA name.
384 This should be called just once for every SSA name. */
89fb70a3
DB
385
386vn_ssa_aux_t
387VN_INFO_GET (tree name)
388{
cbfb21c1
SB
389 vn_ssa_aux_t newinfo;
390
3d9a9f94 391 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
cbfb21c1 392 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
9771b263
DN
393 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
394 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
395 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
89fb70a3
DB
396 return newinfo;
397}
398
399
726a989a
RB
400/* Get the representative expression for the SSA_NAME NAME. Returns
401 the representative SSA_NAME if there is no expression associated with it. */
402
403tree
404vn_get_expr_for (tree name)
405{
406 vn_ssa_aux_t vn = VN_INFO (name);
407 gimple def_stmt;
408 tree expr = NULL_TREE;
1a60c352 409 enum tree_code code;
726a989a
RB
410
411 if (vn->valnum == VN_TOP)
412 return name;
413
414 /* If the value-number is a constant it is the representative
415 expression. */
416 if (TREE_CODE (vn->valnum) != SSA_NAME)
417 return vn->valnum;
418
419 /* Get to the information of the value of this SSA_NAME. */
420 vn = VN_INFO (vn->valnum);
421
422 /* If the value-number is a constant it is the representative
423 expression. */
424 if (TREE_CODE (vn->valnum) != SSA_NAME)
425 return vn->valnum;
426
427 /* Else if we have an expression, return it. */
428 if (vn->expr != NULL_TREE)
429 return vn->expr;
430
431 /* Otherwise use the defining statement to build the expression. */
432 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
433
1a60c352 434 /* If the value number is not an assignment use it directly. */
726a989a
RB
435 if (!is_gimple_assign (def_stmt))
436 return vn->valnum;
437
a5eaec42
RB
438 /* Note that we can valueize here because we clear the cached
439 simplified expressions after each optimistic iteration. */
1a60c352
RG
440 code = gimple_assign_rhs_code (def_stmt);
441 switch (TREE_CODE_CLASS (code))
726a989a
RB
442 {
443 case tcc_reference:
1a60c352
RG
444 if ((code == REALPART_EXPR
445 || code == IMAGPART_EXPR
446 || code == VIEW_CONVERT_EXPR)
447 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
448 0)) == SSA_NAME)
449 expr = fold_build1 (code,
726a989a 450 gimple_expr_type (def_stmt),
a5eaec42
RB
451 vn_valueize (TREE_OPERAND
452 (gimple_assign_rhs1 (def_stmt), 0)));
726a989a
RB
453 break;
454
455 case tcc_unary:
1a60c352 456 expr = fold_build1 (code,
726a989a 457 gimple_expr_type (def_stmt),
a5eaec42 458 vn_valueize (gimple_assign_rhs1 (def_stmt)));
726a989a
RB
459 break;
460
461 case tcc_binary:
1a60c352 462 expr = fold_build2 (code,
726a989a 463 gimple_expr_type (def_stmt),
a5eaec42
RB
464 vn_valueize (gimple_assign_rhs1 (def_stmt)),
465 vn_valueize (gimple_assign_rhs2 (def_stmt)));
726a989a
RB
466 break;
467
18474649
RG
468 case tcc_exceptional:
469 if (code == CONSTRUCTOR
470 && TREE_CODE
471 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
472 expr = gimple_assign_rhs1 (def_stmt);
473 break;
474
726a989a
RB
475 default:;
476 }
477 if (expr == NULL_TREE)
478 return vn->valnum;
479
480 /* Cache the expression. */
481 vn->expr = expr;
482
483 return expr;
484}
485
17742d62
RG
486/* Return the vn_kind the expression computed by the stmt should be
487 associated with. */
488
489enum vn_kind
490vn_get_stmt_kind (gimple stmt)
491{
492 switch (gimple_code (stmt))
493 {
494 case GIMPLE_CALL:
495 return VN_REFERENCE;
496 case GIMPLE_PHI:
497 return VN_PHI;
498 case GIMPLE_ASSIGN:
499 {
500 enum tree_code code = gimple_assign_rhs_code (stmt);
501 tree rhs1 = gimple_assign_rhs1 (stmt);
502 switch (get_gimple_rhs_class (code))
503 {
504 case GIMPLE_UNARY_RHS:
505 case GIMPLE_BINARY_RHS:
506 case GIMPLE_TERNARY_RHS:
507 return VN_NARY;
508 case GIMPLE_SINGLE_RHS:
509 switch (TREE_CODE_CLASS (code))
510 {
511 case tcc_reference:
512 /* VOP-less references can go through unary case. */
513 if ((code == REALPART_EXPR
514 || code == IMAGPART_EXPR
515 || code == VIEW_CONVERT_EXPR
516 || code == BIT_FIELD_REF)
517 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
518 return VN_NARY;
519
520 /* Fallthrough. */
521 case tcc_declaration:
522 return VN_REFERENCE;
523
524 case tcc_constant:
525 return VN_CONSTANT;
526
527 default:
528 if (code == ADDR_EXPR)
529 return (is_gimple_min_invariant (rhs1)
530 ? VN_CONSTANT : VN_REFERENCE);
531 else if (code == CONSTRUCTOR)
532 return VN_NARY;
533 return VN_NONE;
534 }
535 default:
536 return VN_NONE;
537 }
538 }
539 default:
540 return VN_NONE;
541 }
542}
726a989a 543
bb9e4199
RG
544/* Lookup a value id for CONSTANT and return it. If it does not
545 exist returns 0. */
546
547unsigned int
548get_constant_value_id (tree constant)
549{
bf190e8d 550 vn_constant_s **slot;
bb9e4199 551 struct vn_constant_s vc;
726a989a
RB
552
553 vc.hashcode = vn_hash_constant_with_type (constant);
bb9e4199 554 vc.constant = constant;
c203e8a7 555 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
bb9e4199 556 if (slot)
bf190e8d 557 return (*slot)->value_id;
bb9e4199
RG
558 return 0;
559}
560
c9145754
DB
561/* Lookup a value id for CONSTANT, and if it does not exist, create a
562 new one and return it. If it does exist, return it. */
563
564unsigned int
565get_or_alloc_constant_value_id (tree constant)
566{
bf190e8d 567 vn_constant_s **slot;
a7d04a53
RG
568 struct vn_constant_s vc;
569 vn_constant_t vcp;
b8698a0f 570
a7d04a53
RG
571 vc.hashcode = vn_hash_constant_with_type (constant);
572 vc.constant = constant;
c203e8a7 573 slot = constant_to_value_id->find_slot (&vc, INSERT);
c9145754 574 if (*slot)
bf190e8d 575 return (*slot)->value_id;
a7d04a53
RG
576
577 vcp = XNEW (struct vn_constant_s);
578 vcp->hashcode = vc.hashcode;
579 vcp->constant = constant;
580 vcp->value_id = get_next_value_id ();
bf190e8d 581 *slot = vcp;
a7d04a53
RG
582 bitmap_set_bit (constant_value_ids, vcp->value_id);
583 return vcp->value_id;
c9145754
DB
584}
585
586/* Return true if V is a value id for a constant. */
587
588bool
589value_id_constant_p (unsigned int v)
590{
b8698a0f 591 return bitmap_bit_p (constant_value_ids, v);
c9145754
DB
592}
593
b40bf772 594/* Compute the hash for a reference operand VRO1. */
89fb70a3
DB
595
596static hashval_t
9708c51d 597vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
89fb70a3 598{
9708c51d 599 result = iterative_hash_hashval_t (vro1->opcode, result);
85169114 600 if (vro1->op0)
9708c51d 601 result = iterative_hash_expr (vro1->op0, result);
85169114 602 if (vro1->op1)
9708c51d 603 result = iterative_hash_expr (vro1->op1, result);
85169114 604 if (vro1->op2)
9708c51d 605 result = iterative_hash_expr (vro1->op2, result);
85169114 606 return result;
89fb70a3
DB
607}
608
89fb70a3
DB
609/* Compute a hash for the reference operation VR1 and return it. */
610
c9145754 611hashval_t
89fb70a3
DB
612vn_reference_compute_hash (const vn_reference_t vr1)
613{
9708c51d 614 hashval_t result = 0;
89fb70a3
DB
615 int i;
616 vn_reference_op_t vro;
70f34814
RG
617 HOST_WIDE_INT off = -1;
618 bool deref = false;
89fb70a3 619
9771b263 620 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
70f34814
RG
621 {
622 if (vro->opcode == MEM_REF)
623 deref = true;
624 else if (vro->opcode != ADDR_EXPR)
625 deref = false;
626 if (vro->off != -1)
627 {
628 if (off == -1)
629 off = 0;
630 off += vro->off;
631 }
632 else
633 {
634 if (off != -1
635 && off != 0)
636 result = iterative_hash_hashval_t (off, result);
637 off = -1;
638 if (deref
639 && vro->opcode == ADDR_EXPR)
640 {
641 if (vro->op0)
642 {
643 tree op = TREE_OPERAND (vro->op0, 0);
644 result = iterative_hash_hashval_t (TREE_CODE (op), result);
645 result = iterative_hash_expr (op, result);
646 }
647 }
648 else
649 result = vn_reference_op_compute_hash (vro, result);
650 }
651 }
9708c51d
RG
652 if (vr1->vuse)
653 result += SSA_NAME_VERSION (vr1->vuse);
89fb70a3
DB
654
655 return result;
656}
657
bf190e8d 658/* Return true if reference operations VR1 and VR2 are equivalent. This
89fb70a3
DB
659 means they have the same set of operands and vuses. */
660
bf190e8d
LC
661bool
662vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
89fb70a3 663{
70f34814 664 unsigned i, j;
89fb70a3 665
5006671f
RG
666 /* Early out if this is not a hash collision. */
667 if (vr1->hashcode != vr2->hashcode)
668 return false;
89fb70a3 669
5006671f
RG
670 /* The VOP needs to be the same. */
671 if (vr1->vuse != vr2->vuse)
89fb70a3
DB
672 return false;
673
5006671f
RG
674 /* If the operands are the same we are done. */
675 if (vr1->operands == vr2->operands)
676 return true;
677
70f34814 678 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
89fb70a3
DB
679 return false;
680
5ccbfc1f
RG
681 if (INTEGRAL_TYPE_P (vr1->type)
682 && INTEGRAL_TYPE_P (vr2->type))
683 {
684 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
685 return false;
686 }
687 else if (INTEGRAL_TYPE_P (vr1->type)
688 && (TYPE_PRECISION (vr1->type)
689 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
690 return false;
691 else if (INTEGRAL_TYPE_P (vr2->type)
692 && (TYPE_PRECISION (vr2->type)
693 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
694 return false;
695
70f34814
RG
696 i = 0;
697 j = 0;
698 do
699 {
700 HOST_WIDE_INT off1 = 0, off2 = 0;
701 vn_reference_op_t vro1, vro2;
702 vn_reference_op_s tem1, tem2;
703 bool deref1 = false, deref2 = false;
9771b263 704 for (; vr1->operands.iterate (i, &vro1); i++)
70f34814
RG
705 {
706 if (vro1->opcode == MEM_REF)
707 deref1 = true;
708 if (vro1->off == -1)
709 break;
710 off1 += vro1->off;
711 }
9771b263 712 for (; vr2->operands.iterate (j, &vro2); j++)
70f34814
RG
713 {
714 if (vro2->opcode == MEM_REF)
715 deref2 = true;
716 if (vro2->off == -1)
717 break;
718 off2 += vro2->off;
719 }
720 if (off1 != off2)
721 return false;
722 if (deref1 && vro1->opcode == ADDR_EXPR)
723 {
724 memset (&tem1, 0, sizeof (tem1));
725 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
726 tem1.type = TREE_TYPE (tem1.op0);
727 tem1.opcode = TREE_CODE (tem1.op0);
728 vro1 = &tem1;
8bf43909 729 deref1 = false;
70f34814
RG
730 }
731 if (deref2 && vro2->opcode == ADDR_EXPR)
732 {
733 memset (&tem2, 0, sizeof (tem2));
734 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
735 tem2.type = TREE_TYPE (tem2.op0);
736 tem2.opcode = TREE_CODE (tem2.op0);
737 vro2 = &tem2;
8bf43909 738 deref2 = false;
70f34814 739 }
8bf43909
RG
740 if (deref1 != deref2)
741 return false;
70f34814
RG
742 if (!vn_reference_op_eq (vro1, vro2))
743 return false;
744 ++j;
745 ++i;
746 }
9771b263
DN
747 while (vr1->operands.length () != i
748 || vr2->operands.length () != j);
89fb70a3 749
5006671f 750 return true;
89fb70a3
DB
751}
752
726a989a 753/* Copy the operations present in load/store REF into RESULT, a vector of
89fb70a3
DB
754 vn_reference_op_s's. */
755
ce94d354 756void
9771b263 757copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
89fb70a3 758{
9f59420d
RG
759 if (TREE_CODE (ref) == TARGET_MEM_REF)
760 {
761 vn_reference_op_s temp;
762
39e843e8
RB
763 result->reserve (3);
764
9f59420d 765 memset (&temp, 0, sizeof (temp));
6d6c9525 766 temp.type = TREE_TYPE (ref);
9f59420d 767 temp.opcode = TREE_CODE (ref);
150e3929
RG
768 temp.op0 = TMR_INDEX (ref);
769 temp.op1 = TMR_STEP (ref);
770 temp.op2 = TMR_OFFSET (ref);
70f34814 771 temp.off = -1;
39e843e8 772 result->quick_push (temp);
9f59420d
RG
773
774 memset (&temp, 0, sizeof (temp));
775 temp.type = NULL_TREE;
4d948885
RG
776 temp.opcode = ERROR_MARK;
777 temp.op0 = TMR_INDEX2 (ref);
778 temp.off = -1;
39e843e8 779 result->quick_push (temp);
4d948885
RG
780
781 memset (&temp, 0, sizeof (temp));
782 temp.type = NULL_TREE;
783 temp.opcode = TREE_CODE (TMR_BASE (ref));
784 temp.op0 = TMR_BASE (ref);
70f34814 785 temp.off = -1;
39e843e8 786 result->quick_push (temp);
9f59420d
RG
787 return;
788 }
789
89fb70a3 790 /* For non-calls, store the information that makes up the address. */
1eadb567 791 tree orig = ref;
89fb70a3
DB
792 while (ref)
793 {
794 vn_reference_op_s temp;
795
796 memset (&temp, 0, sizeof (temp));
6d6c9525 797 temp.type = TREE_TYPE (ref);
89fb70a3 798 temp.opcode = TREE_CODE (ref);
70f34814 799 temp.off = -1;
89fb70a3
DB
800
801 switch (temp.opcode)
802 {
4ec0a198
TV
803 case MODIFY_EXPR:
804 temp.op0 = TREE_OPERAND (ref, 1);
805 break;
842679dc
TV
806 case WITH_SIZE_EXPR:
807 temp.op0 = TREE_OPERAND (ref, 1);
808 temp.off = 0;
809 break;
70f34814
RG
810 case MEM_REF:
811 /* The base address gets its own vn_reference_op_s structure. */
812 temp.op0 = TREE_OPERAND (ref, 1);
9541ffee 813 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
eb1ce453 814 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
70f34814 815 break;
89fb70a3
DB
816 case BIT_FIELD_REF:
817 /* Record bits and position. */
818 temp.op0 = TREE_OPERAND (ref, 1);
819 temp.op1 = TREE_OPERAND (ref, 2);
820 break;
821 case COMPONENT_REF:
ba2e1892
RG
822 /* The field decl is enough to unambiguously specify the field,
823 a matching type is not necessary and a mismatching type
824 is always a spurious difference. */
825 temp.type = NULL_TREE;
b45d2719
RG
826 temp.op0 = TREE_OPERAND (ref, 1);
827 temp.op1 = TREE_OPERAND (ref, 2);
70f34814
RG
828 {
829 tree this_offset = component_ref_field_offset (ref);
830 if (this_offset
831 && TREE_CODE (this_offset) == INTEGER_CST)
832 {
833 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
834 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
835 {
807e902e
KZ
836 offset_int off
837 = (wi::to_offset (this_offset)
838 + wi::lrshift (wi::to_offset (bit_offset),
839 LOG2_BITS_PER_UNIT));
840 if (wi::fits_shwi_p (off)
1eadb567
RB
841 /* Probibit value-numbering zero offset components
842 of addresses the same before the pass folding
843 __builtin_object_size had a chance to run
844 (checking cfun->after_inlining does the
845 trick here). */
846 && (TREE_CODE (orig) != ADDR_EXPR
807e902e 847 || off != 0
1eadb567 848 || cfun->after_inlining))
807e902e 849 temp.off = off.to_shwi ();
70f34814
RG
850 }
851 }
852 }
89fb70a3
DB
853 break;
854 case ARRAY_RANGE_REF:
855 case ARRAY_REF:
856 /* Record index as operand. */
857 temp.op0 = TREE_OPERAND (ref, 1);
e52201b6
RG
858 /* Always record lower bounds and element size. */
859 temp.op1 = array_ref_low_bound (ref);
860 temp.op2 = array_ref_element_size (ref);
70f34814
RG
861 if (TREE_CODE (temp.op0) == INTEGER_CST
862 && TREE_CODE (temp.op1) == INTEGER_CST
863 && TREE_CODE (temp.op2) == INTEGER_CST)
864 {
807e902e
KZ
865 offset_int off = ((wi::to_offset (temp.op0)
866 - wi::to_offset (temp.op1))
867 * wi::to_offset (temp.op2));
868 if (wi::fits_shwi_p (off))
869 temp.off = off.to_shwi();
70f34814 870 }
89fb70a3 871 break;
6d6c9525
RG
872 case VAR_DECL:
873 if (DECL_HARD_REGISTER (ref))
874 {
875 temp.op0 = ref;
876 break;
877 }
878 /* Fallthru. */
879 case PARM_DECL:
880 case CONST_DECL:
881 case RESULT_DECL:
882 /* Canonicalize decls to MEM[&decl] which is what we end up with
883 when valueizing MEM[ptr] with ptr = &decl. */
884 temp.opcode = MEM_REF;
885 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
886 temp.off = 0;
9771b263 887 result->safe_push (temp);
6d6c9525 888 temp.opcode = ADDR_EXPR;
39e843e8 889 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
6d6c9525
RG
890 temp.type = TREE_TYPE (temp.op0);
891 temp.off = -1;
892 break;
4794afa5
DB
893 case STRING_CST:
894 case INTEGER_CST:
895 case COMPLEX_CST:
896 case VECTOR_CST:
26b70b9f 897 case REAL_CST:
0747aae4 898 case FIXED_CST:
bb0c55f6 899 case CONSTRUCTOR:
89fb70a3
DB
900 case SSA_NAME:
901 temp.op0 = ref;
902 break;
ce94d354
RG
903 case ADDR_EXPR:
904 if (is_gimple_min_invariant (ref))
905 {
906 temp.op0 = ref;
907 break;
908 }
909 /* Fallthrough. */
4794afa5
DB
910 /* These are only interesting for their operands, their
911 existence, and their type. They will never be the last
912 ref in the chain of references (IE they require an
913 operand), so we don't have to put anything
914 for op* as it will be handled by the iteration */
4794afa5
DB
915 case REALPART_EXPR:
916 case VIEW_CONVERT_EXPR:
70f34814
RG
917 temp.off = 0;
918 break;
919 case IMAGPART_EXPR:
920 /* This is only interesting for its constant offset. */
921 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
89fb70a3 922 break;
4794afa5
DB
923 default:
924 gcc_unreachable ();
89fb70a3 925 }
9771b263 926 result->safe_push (temp);
89fb70a3 927
ce94d354 928 if (REFERENCE_CLASS_P (ref)
4ec0a198 929 || TREE_CODE (ref) == MODIFY_EXPR
842679dc 930 || TREE_CODE (ref) == WITH_SIZE_EXPR
ce94d354
RG
931 || (TREE_CODE (ref) == ADDR_EXPR
932 && !is_gimple_min_invariant (ref)))
89fb70a3
DB
933 ref = TREE_OPERAND (ref, 0);
934 else
935 ref = NULL_TREE;
936 }
937}
938
b45d2719
RG
939/* Build a alias-oracle reference abstraction in *REF from the vn_reference
940 operands in *OPS, the reference alias set SET and the reference type TYPE.
941 Return true if something useful was produced. */
53f3815c 942
b45d2719
RG
943bool
944ao_ref_init_from_vn_reference (ao_ref *ref,
945 alias_set_type set, tree type,
9771b263 946 vec<vn_reference_op_s> ops)
53f3815c
RG
947{
948 vn_reference_op_t op;
949 unsigned i;
b45d2719
RG
950 tree base = NULL_TREE;
951 tree *op0_p = &base;
952 HOST_WIDE_INT offset = 0;
953 HOST_WIDE_INT max_size;
954 HOST_WIDE_INT size = -1;
955 tree size_tree = NULL_TREE;
70f34814 956 alias_set_type base_alias_set = -1;
b45d2719
RG
957
958 /* First get the final access size from just the outermost expression. */
9771b263 959 op = &ops[0];
b45d2719 960 if (op->opcode == COMPONENT_REF)
70f34814 961 size_tree = DECL_SIZE (op->op0);
b45d2719
RG
962 else if (op->opcode == BIT_FIELD_REF)
963 size_tree = op->op0;
964 else
965 {
966 enum machine_mode mode = TYPE_MODE (type);
967 if (mode == BLKmode)
968 size_tree = TYPE_SIZE (type);
969 else
970 size = GET_MODE_BITSIZE (mode);
971 }
972 if (size_tree != NULL_TREE)
973 {
cc269bb6 974 if (!tree_fits_uhwi_p (size_tree))
b45d2719
RG
975 size = -1;
976 else
eb1ce453 977 size = tree_to_uhwi (size_tree);
b45d2719
RG
978 }
979
980 /* Initially, maxsize is the same as the accessed element size.
981 In the following it will only grow (or become -1). */
982 max_size = size;
53f3815c 983
b45d2719
RG
984 /* Compute cumulative bit-offset for nested component-refs and array-refs,
985 and find the ultimate containing object. */
9771b263 986 FOR_EACH_VEC_ELT (ops, i, op)
53f3815c
RG
987 {
988 switch (op->opcode)
989 {
b45d2719
RG
990 /* These may be in the reference ops, but we cannot do anything
991 sensible with them here. */
b45d2719 992 case ADDR_EXPR:
70f34814
RG
993 /* Apart from ADDR_EXPR arguments to MEM_REF. */
994 if (base != NULL_TREE
995 && TREE_CODE (base) == MEM_REF
996 && op->op0
997 && DECL_P (TREE_OPERAND (op->op0, 0)))
998 {
9771b263 999 vn_reference_op_t pop = &ops[i-1];
70f34814
RG
1000 base = TREE_OPERAND (op->op0, 0);
1001 if (pop->off == -1)
1002 {
1003 max_size = -1;
1004 offset = 0;
1005 }
1006 else
1007 offset += pop->off * BITS_PER_UNIT;
1008 op0_p = NULL;
1009 break;
1010 }
1011 /* Fallthru. */
1012 case CALL_EXPR:
b45d2719 1013 return false;
53f3815c 1014
b45d2719 1015 /* Record the base objects. */
70f34814
RG
1016 case MEM_REF:
1017 base_alias_set = get_deref_alias_set (op->op0);
1018 *op0_p = build2 (MEM_REF, op->type,
1019 NULL_TREE, op->op0);
1020 op0_p = &TREE_OPERAND (*op0_p, 0);
1021 break;
1022
b45d2719
RG
1023 case VAR_DECL:
1024 case PARM_DECL:
1025 case RESULT_DECL:
1026 case SSA_NAME:
b45d2719 1027 *op0_p = op->op0;
70f34814 1028 op0_p = NULL;
b45d2719
RG
1029 break;
1030
1031 /* And now the usual component-reference style ops. */
53f3815c 1032 case BIT_FIELD_REF:
9439e9a1 1033 offset += tree_to_shwi (op->op1);
53f3815c
RG
1034 break;
1035
1036 case COMPONENT_REF:
b45d2719
RG
1037 {
1038 tree field = op->op0;
1039 /* We do not have a complete COMPONENT_REF tree here so we
1040 cannot use component_ref_field_offset. Do the interesting
1041 parts manually. */
1042
70f34814 1043 if (op->op1
cc269bb6 1044 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
b45d2719
RG
1045 max_size = -1;
1046 else
1047 {
eb1ce453 1048 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
b45d2719
RG
1049 * BITS_PER_UNIT);
1050 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1051 }
1052 break;
1053 }
53f3815c
RG
1054
1055 case ARRAY_RANGE_REF:
1056 case ARRAY_REF:
e52201b6 1057 /* We recorded the lower bound and the element size. */
9541ffee
RS
1058 if (!tree_fits_shwi_p (op->op0)
1059 || !tree_fits_shwi_p (op->op1)
1060 || !tree_fits_shwi_p (op->op2))
b45d2719
RG
1061 max_size = -1;
1062 else
1063 {
eb1ce453
KZ
1064 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1065 hindex -= tree_to_shwi (op->op1);
1066 hindex *= tree_to_shwi (op->op2);
e52201b6 1067 hindex *= BITS_PER_UNIT;
b45d2719
RG
1068 offset += hindex;
1069 }
1070 break;
1071
1072 case REALPART_EXPR:
1073 break;
1074
1075 case IMAGPART_EXPR:
1076 offset += size;
1077 break;
1078
1079 case VIEW_CONVERT_EXPR:
53f3815c
RG
1080 break;
1081
1082 case STRING_CST:
1083 case INTEGER_CST:
1084 case COMPLEX_CST:
1085 case VECTOR_CST:
1086 case REAL_CST:
1087 case CONSTRUCTOR:
53f3815c 1088 case CONST_DECL:
b45d2719 1089 return false;
53f3815c
RG
1090
1091 default:
b45d2719 1092 return false;
53f3815c
RG
1093 }
1094 }
1095
b45d2719
RG
1096 if (base == NULL_TREE)
1097 return false;
1098
1099 ref->ref = NULL_TREE;
1100 ref->base = base;
1101 ref->offset = offset;
1102 ref->size = size;
1103 ref->max_size = max_size;
1104 ref->ref_alias_set = set;
70f34814
RG
1105 if (base_alias_set != -1)
1106 ref->base_alias_set = base_alias_set;
1107 else
1108 ref->base_alias_set = get_alias_set (base);
14cd91f9
RG
1109 /* We discount volatiles from value-numbering elsewhere. */
1110 ref->volatile_p = false;
b45d2719
RG
1111
1112 return true;
53f3815c
RG
1113}
1114
726a989a
RB
1115/* Copy the operations present in load/store/call REF into RESULT, a vector of
1116 vn_reference_op_s's. */
1117
1118void
1119copy_reference_ops_from_call (gimple call,
9771b263 1120 vec<vn_reference_op_s> *result)
726a989a
RB
1121{
1122 vn_reference_op_s temp;
726a989a 1123 unsigned i;
6867d9a9 1124 tree lhs = gimple_call_lhs (call);
7eab31ed 1125 int lr;
6867d9a9
TV
1126
1127 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1128 different. By adding the lhs here in the vector, we ensure that the
1129 hashcode is different, guaranteeing a different value number. */
1130 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1131 {
1132 memset (&temp, 0, sizeof (temp));
1133 temp.opcode = MODIFY_EXPR;
1134 temp.type = TREE_TYPE (lhs);
1135 temp.op0 = lhs;
1136 temp.off = -1;
9771b263 1137 result->safe_push (temp);
6867d9a9 1138 }
726a989a 1139
7eab31ed 1140 /* Copy the type, opcode, function, static chain and EH region, if any. */
726a989a
RB
1141 memset (&temp, 0, sizeof (temp));
1142 temp.type = gimple_call_return_type (call);
1143 temp.opcode = CALL_EXPR;
ce94d354 1144 temp.op0 = gimple_call_fn (call);
7aec7a38 1145 temp.op1 = gimple_call_chain (call);
7eab31ed
EB
1146 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1147 temp.op2 = size_int (lr);
70f34814 1148 temp.off = -1;
9771b263 1149 result->safe_push (temp);
726a989a 1150
ce94d354
RG
1151 /* Copy the call arguments. As they can be references as well,
1152 just chain them together. */
726a989a
RB
1153 for (i = 0; i < gimple_call_num_args (call); ++i)
1154 {
1155 tree callarg = gimple_call_arg (call, i);
ce94d354 1156 copy_reference_ops_from_ref (callarg, result);
726a989a 1157 }
726a989a
RB
1158}
1159
726a989a
RB
1160/* Create a vector of vn_reference_op_s structures from CALL, a
1161 call statement. The vector is not shared. */
1162
9771b263 1163static vec<vn_reference_op_s>
726a989a
RB
1164create_reference_ops_from_call (gimple call)
1165{
6e1aa848 1166 vec<vn_reference_op_s> result = vNULL;
726a989a
RB
1167
1168 copy_reference_ops_from_call (call, &result);
1169 return result;
1170}
1171
aa7069aa
RG
1172/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
1174void
9771b263 1175vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
aa7069aa 1176 unsigned int *i_p)
89fb70a3 1177{
aa7069aa 1178 unsigned int i = *i_p;
9771b263
DN
1179 vn_reference_op_t op = &(*ops)[i];
1180 vn_reference_op_t mem_op = &(*ops)[i - 1];
70f34814 1181 tree addr_base;
8b4f7b47 1182 HOST_WIDE_INT addr_offset = 0;
70f34814
RG
1183
1184 /* The only thing we have to do is from &OBJ.foo.bar add the offset
073a8998 1185 from .foo.bar to the preceding MEM_REF offset and replace the
70f34814
RG
1186 address with &OBJ. */
1187 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1188 &addr_offset);
1189 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
d1de852b 1190 if (addr_base != TREE_OPERAND (op->op0, 0))
70f34814 1191 {
807e902e
KZ
1192 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1193 off += addr_offset;
1194 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
70f34814 1195 op->op0 = build_fold_addr_expr (addr_base);
9541ffee 1196 if (tree_fits_shwi_p (mem_op->op0))
eb1ce453 1197 mem_op->off = tree_to_shwi (mem_op->op0);
70f34814
RG
1198 else
1199 mem_op->off = -1;
aa7069aa 1200 }
aa7069aa 1201}
89fb70a3 1202
a03a9774
RG
1203/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1204 *I_P to point to the last element of the replacement. */
1205static void
9771b263 1206vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
a03a9774
RG
1207 unsigned int *i_p)
1208{
1209 unsigned int i = *i_p;
9771b263
DN
1210 vn_reference_op_t op = &(*ops)[i];
1211 vn_reference_op_t mem_op = &(*ops)[i - 1];
a03a9774
RG
1212 gimple def_stmt;
1213 enum tree_code code;
807e902e 1214 offset_int off;
a03a9774
RG
1215
1216 def_stmt = SSA_NAME_DEF_STMT (op->op0);
d0c422cb 1217 if (!is_gimple_assign (def_stmt))
a03a9774
RG
1218 return;
1219
1220 code = gimple_assign_rhs_code (def_stmt);
1221 if (code != ADDR_EXPR
1222 && code != POINTER_PLUS_EXPR)
1223 return;
1224
807e902e 1225 off = offset_int::from (mem_op->op0, SIGNED);
a03a9774
RG
1226
1227 /* The only thing we have to do is from &OBJ.foo.bar add the offset
073a8998 1228 from .foo.bar to the preceding MEM_REF offset and replace the
a03a9774
RG
1229 address with &OBJ. */
1230 if (code == ADDR_EXPR)
1231 {
1232 tree addr, addr_base;
1233 HOST_WIDE_INT addr_offset;
1234
1235 addr = gimple_assign_rhs1 (def_stmt);
1236 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1237 &addr_offset);
1238 if (!addr_base
1239 || TREE_CODE (addr_base) != MEM_REF)
1240 return;
1241
807e902e 1242 off += addr_offset;
27bcd47c 1243 off += mem_ref_offset (addr_base);
a03a9774
RG
1244 op->op0 = TREE_OPERAND (addr_base, 0);
1245 }
1246 else
1247 {
1248 tree ptr, ptroff;
1249 ptr = gimple_assign_rhs1 (def_stmt);
1250 ptroff = gimple_assign_rhs2 (def_stmt);
1251 if (TREE_CODE (ptr) != SSA_NAME
1252 || TREE_CODE (ptroff) != INTEGER_CST)
1253 return;
1254
807e902e 1255 off += wi::to_offset (ptroff);
d0c422cb 1256 op->op0 = ptr;
a03a9774
RG
1257 }
1258
807e902e 1259 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
9541ffee 1260 if (tree_fits_shwi_p (mem_op->op0))
eb1ce453 1261 mem_op->off = tree_to_shwi (mem_op->op0);
a03a9774
RG
1262 else
1263 mem_op->off = -1;
1264 if (TREE_CODE (op->op0) == SSA_NAME)
e093ffe3
RG
1265 op->op0 = SSA_VAL (op->op0);
1266 if (TREE_CODE (op->op0) != SSA_NAME)
1267 op->opcode = TREE_CODE (op->op0);
a03a9774
RG
1268
1269 /* And recurse. */
1270 if (TREE_CODE (op->op0) == SSA_NAME)
1271 vn_reference_maybe_forwprop_address (ops, i_p);
1272 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1273 vn_reference_fold_indirect (ops, i_p);
1274}
1275
12bd5a1e
RG
1276/* Optimize the reference REF to a constant if possible or return
1277 NULL_TREE if not. */
1278
1279tree
1280fully_constant_vn_reference_p (vn_reference_t ref)
1281{
9771b263 1282 vec<vn_reference_op_s> operands = ref->operands;
12bd5a1e
RG
1283 vn_reference_op_t op;
1284
1285 /* Try to simplify the translated expression if it is
1286 a call to a builtin function with at most two arguments. */
9771b263 1287 op = &operands[0];
12bd5a1e
RG
1288 if (op->opcode == CALL_EXPR
1289 && TREE_CODE (op->op0) == ADDR_EXPR
1290 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1291 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
9771b263
DN
1292 && operands.length () >= 2
1293 && operands.length () <= 3)
12bd5a1e
RG
1294 {
1295 vn_reference_op_t arg0, arg1 = NULL;
1296 bool anyconst = false;
9771b263
DN
1297 arg0 = &operands[1];
1298 if (operands.length () > 2)
1299 arg1 = &operands[2];
12bd5a1e
RG
1300 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1301 || (arg0->opcode == ADDR_EXPR
1302 && is_gimple_min_invariant (arg0->op0)))
1303 anyconst = true;
1304 if (arg1
1305 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1306 || (arg1->opcode == ADDR_EXPR
1307 && is_gimple_min_invariant (arg1->op0))))
1308 anyconst = true;
1309 if (anyconst)
1310 {
1311 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1312 arg1 ? 2 : 1,
1313 arg0->op0,
1314 arg1 ? arg1->op0 : NULL);
1315 if (folded
1316 && TREE_CODE (folded) == NOP_EXPR)
1317 folded = TREE_OPERAND (folded, 0);
1318 if (folded
1319 && is_gimple_min_invariant (folded))
1320 return folded;
1321 }
1322 }
1323
1324 /* Simplify reads from constant strings. */
1325 else if (op->opcode == ARRAY_REF
1326 && TREE_CODE (op->op0) == INTEGER_CST
1327 && integer_zerop (op->op1)
9771b263 1328 && operands.length () == 2)
12bd5a1e
RG
1329 {
1330 vn_reference_op_t arg0;
9771b263 1331 arg0 = &operands[1];
12bd5a1e
RG
1332 if (arg0->opcode == STRING_CST
1333 && (TYPE_MODE (op->type)
1334 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1335 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1336 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
634e03d3 1337 && tree_int_cst_sgn (op->op0) >= 0
12bd5a1e
RG
1338 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1339 return build_int_cst_type (op->type,
1340 (TREE_STRING_POINTER (arg0->op0)
1341 [TREE_INT_CST_LOW (op->op0)]));
1342 }
1343
1344 return NULL_TREE;
1345}
1346
89fb70a3
DB
1347/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1348 structures into their value numbers. This is done in-place, and
3ceaf2f5
RG
1349 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1350 whether any operands were valueized. */
89fb70a3 1351
9771b263
DN
1352static vec<vn_reference_op_s>
1353valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
89fb70a3
DB
1354{
1355 vn_reference_op_t vro;
aa7069aa 1356 unsigned int i;
89fb70a3 1357
3ceaf2f5
RG
1358 *valueized_anything = false;
1359
9771b263 1360 FOR_EACH_VEC_ELT (orig, i, vro)
89fb70a3
DB
1361 {
1362 if (vro->opcode == SSA_NAME
1363 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
c9145754 1364 {
3ceaf2f5
RG
1365 tree tem = SSA_VAL (vro->op0);
1366 if (tem != vro->op0)
1367 {
1368 *valueized_anything = true;
1369 vro->op0 = tem;
1370 }
c9145754
DB
1371 /* If it transforms from an SSA_NAME to a constant, update
1372 the opcode. */
1373 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1374 vro->opcode = TREE_CODE (vro->op0);
1375 }
aa7069aa 1376 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3ceaf2f5
RG
1377 {
1378 tree tem = SSA_VAL (vro->op1);
1379 if (tem != vro->op1)
1380 {
1381 *valueized_anything = true;
1382 vro->op1 = tem;
1383 }
1384 }
aa7069aa 1385 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3ceaf2f5
RG
1386 {
1387 tree tem = SSA_VAL (vro->op2);
1388 if (tem != vro->op2)
1389 {
1390 *valueized_anything = true;
1391 vro->op2 = tem;
1392 }
1393 }
70f34814
RG
1394 /* If it transforms from an SSA_NAME to an address, fold with
1395 a preceding indirect reference. */
1396 if (i > 0
1397 && vro->op0
1398 && TREE_CODE (vro->op0) == ADDR_EXPR
9771b263 1399 && orig[i - 1].opcode == MEM_REF)
70f34814 1400 vn_reference_fold_indirect (&orig, &i);
a03a9774
RG
1401 else if (i > 0
1402 && vro->opcode == SSA_NAME
9771b263 1403 && orig[i - 1].opcode == MEM_REF)
a03a9774 1404 vn_reference_maybe_forwprop_address (&orig, &i);
70f34814
RG
1405 /* If it transforms a non-constant ARRAY_REF into a constant
1406 one, adjust the constant offset. */
1407 else if (vro->opcode == ARRAY_REF
1408 && vro->off == -1
1409 && TREE_CODE (vro->op0) == INTEGER_CST
1410 && TREE_CODE (vro->op1) == INTEGER_CST
1411 && TREE_CODE (vro->op2) == INTEGER_CST)
1412 {
807e902e
KZ
1413 offset_int off = ((wi::to_offset (vro->op0)
1414 - wi::to_offset (vro->op1))
1415 * wi::to_offset (vro->op2));
1416 if (wi::fits_shwi_p (off))
1417 vro->off = off.to_shwi ();
70f34814 1418 }
89fb70a3
DB
1419 }
1420
1421 return orig;
1422}
1423
9771b263
DN
1424static vec<vn_reference_op_s>
1425valueize_refs (vec<vn_reference_op_s> orig)
3ceaf2f5
RG
1426{
1427 bool tem;
1428 return valueize_refs_1 (orig, &tem);
1429}
1430
9771b263 1431static vec<vn_reference_op_s> shared_lookup_references;
aa7069aa
RG
1432
1433/* Create a vector of vn_reference_op_s structures from REF, a
1434 REFERENCE_CLASS_P tree. The vector is shared among all callers of
3ceaf2f5
RG
1435 this function. *VALUEIZED_ANYTHING will specify whether any
1436 operands were valueized. */
aa7069aa 1437
9771b263 1438static vec<vn_reference_op_s>
3ceaf2f5 1439valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
aa7069aa
RG
1440{
1441 if (!ref)
6e1aa848 1442 return vNULL;
9771b263 1443 shared_lookup_references.truncate (0);
aa7069aa 1444 copy_reference_ops_from_ref (ref, &shared_lookup_references);
3ceaf2f5
RG
1445 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1446 valueized_anything);
aa7069aa
RG
1447 return shared_lookup_references;
1448}
1449
1450/* Create a vector of vn_reference_op_s structures from CALL, a
1451 call statement. The vector is shared among all callers of
1452 this function. */
1453
9771b263 1454static vec<vn_reference_op_s>
aa7069aa
RG
1455valueize_shared_reference_ops_from_call (gimple call)
1456{
1457 if (!call)
6e1aa848 1458 return vNULL;
9771b263 1459 shared_lookup_references.truncate (0);
aa7069aa
RG
1460 copy_reference_ops_from_call (call, &shared_lookup_references);
1461 shared_lookup_references = valueize_refs (shared_lookup_references);
1462 return shared_lookup_references;
1463}
1464
896c8b96
RG
1465/* Lookup a SCCVN reference operation VR in the current hash table.
1466 Returns the resulting value number if it exists in the hash table,
c9145754
DB
1467 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1468 vn_reference_t stored in the hashtable if something is found. */
896c8b96
RG
1469
1470static tree
c9145754 1471vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
896c8b96 1472{
bf190e8d 1473 vn_reference_s **slot;
896c8b96
RG
1474 hashval_t hash;
1475
1476 hash = vr->hashcode;
c203e8a7 1477 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
896c8b96 1478 if (!slot && current_info == optimistic_info)
c203e8a7 1479 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
896c8b96 1480 if (slot)
c9145754
DB
1481 {
1482 if (vnresult)
1483 *vnresult = (vn_reference_t)*slot;
1484 return ((vn_reference_t)*slot)->result;
1485 }
b8698a0f 1486
896c8b96
RG
1487 return NULL_TREE;
1488}
1489
d0ca0bcb 1490static tree *last_vuse_ptr;
3bc27de7 1491static vn_lookup_kind vn_walk_kind;
1ec87690 1492static vn_lookup_kind default_vn_walk_kind;
d0ca0bcb 1493
5006671f
RG
1494/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1495 with the current VUSE and performs the expression lookup. */
1496
1497static void *
9bb06c2a
RG
1498vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1499 unsigned int cnt, void *vr_)
5006671f
RG
1500{
1501 vn_reference_t vr = (vn_reference_t)vr_;
bf190e8d 1502 vn_reference_s **slot;
5006671f
RG
1503 hashval_t hash;
1504
9bb06c2a
RG
1505 /* This bounds the stmt walks we perform on reference lookups
1506 to O(1) instead of O(N) where N is the number of dominating
1507 stores. */
1508 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1509 return (void *)-1;
1510
d0ca0bcb
RG
1511 if (last_vuse_ptr)
1512 *last_vuse_ptr = vuse;
1513
5006671f 1514 /* Fixup vuse and hash. */
9708c51d
RG
1515 if (vr->vuse)
1516 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
d1c0308e 1517 vr->vuse = vuse_ssa_val (vuse);
9708c51d
RG
1518 if (vr->vuse)
1519 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
5006671f
RG
1520
1521 hash = vr->hashcode;
c203e8a7 1522 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
5006671f 1523 if (!slot && current_info == optimistic_info)
c203e8a7 1524 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
5006671f
RG
1525 if (slot)
1526 return *slot;
b8698a0f 1527
5006671f
RG
1528 return NULL;
1529}
c9145754 1530
b55eb410
RG
1531/* Lookup an existing or insert a new vn_reference entry into the
1532 value table for the VUSE, SET, TYPE, OPERANDS reference which
9179ed9d 1533 has the value VALUE which is either a constant or an SSA name. */
b55eb410
RG
1534
1535static vn_reference_t
9179ed9d
RG
1536vn_reference_lookup_or_insert_for_pieces (tree vuse,
1537 alias_set_type set,
1538 tree type,
9771b263
DN
1539 vec<vn_reference_op_s,
1540 va_heap> operands,
9179ed9d 1541 tree value)
b55eb410
RG
1542{
1543 struct vn_reference_s vr1;
1544 vn_reference_t result;
9179ed9d 1545 unsigned value_id;
b55eb410
RG
1546 vr1.vuse = vuse;
1547 vr1.operands = operands;
1548 vr1.type = type;
1549 vr1.set = set;
1550 vr1.hashcode = vn_reference_compute_hash (&vr1);
1551 if (vn_reference_lookup_1 (&vr1, &result))
1552 return result;
9179ed9d
RG
1553 if (TREE_CODE (value) == SSA_NAME)
1554 value_id = VN_INFO (value)->value_id;
1555 else
1556 value_id = get_or_alloc_constant_value_id (value);
b55eb410 1557 return vn_reference_insert_pieces (vuse, set, type,
9771b263 1558 operands.copy (), value, value_id);
b55eb410
RG
1559}
1560
01df5c8a
RG
1561/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1562 from the statement defining VUSE and if not successful tries to
073a8998 1563 translate *REFP and VR_ through an aggregate copy at the definition
01df5c8a
RG
1564 of VUSE. */
1565
1566static void *
50f0aa20
RB
1567vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1568 bool disambiguate_only)
01df5c8a
RG
1569{
1570 vn_reference_t vr = (vn_reference_t)vr_;
1571 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
01df5c8a 1572 tree base;
0f900dfa 1573 HOST_WIDE_INT offset, maxsize;
9771b263 1574 static vec<vn_reference_op_s>
6e1aa848 1575 lhs_ops = vNULL;
8ea34dab
RG
1576 ao_ref lhs_ref;
1577 bool lhs_ref_ok = false;
01df5c8a 1578
4fa4929e
RG
1579 /* First try to disambiguate after value-replacing in the definitions LHS. */
1580 if (is_gimple_assign (def_stmt))
1581 {
9771b263 1582 vec<vn_reference_op_s> tem;
4fa4929e 1583 tree lhs = gimple_assign_lhs (def_stmt);
25aa059e 1584 bool valueized_anything = false;
8ea34dab 1585 /* Avoid re-allocation overhead. */
9771b263 1586 lhs_ops.truncate (0);
8ea34dab
RG
1587 copy_reference_ops_from_ref (lhs, &lhs_ops);
1588 tem = lhs_ops;
25aa059e 1589 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
8ea34dab 1590 gcc_assert (lhs_ops == tem);
25aa059e
RG
1591 if (valueized_anything)
1592 {
1593 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1594 get_alias_set (lhs),
1595 TREE_TYPE (lhs), lhs_ops);
1596 if (lhs_ref_ok
1597 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1598 return NULL;
1599 }
1600 else
1601 {
1602 ao_ref_init (&lhs_ref, lhs);
1603 lhs_ref_ok = true;
1604 }
4fa4929e 1605 }
50f0aa20
RB
1606 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1607 && gimple_call_num_args (def_stmt) <= 4)
1608 {
1609 /* For builtin calls valueize its arguments and call the
1610 alias oracle again. Valueization may improve points-to
1611 info of pointers and constify size and position arguments.
1612 Originally this was motivated by PR61034 which has
1613 conditional calls to free falsely clobbering ref because
1614 of imprecise points-to info of the argument. */
1615 tree oldargs[4];
61dd7fbc 1616 bool valueized_anything = false;
50f0aa20
RB
1617 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1618 {
1619 oldargs[i] = gimple_call_arg (def_stmt, i);
1620 if (TREE_CODE (oldargs[i]) == SSA_NAME
1621 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1622 {
1623 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1624 valueized_anything = true;
1625 }
1626 }
1627 if (valueized_anything)
1628 {
1629 bool res = call_may_clobber_ref_p_1 (def_stmt, ref);
1630 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1631 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1632 if (!res)
1633 return NULL;
1634 }
1635 }
1636
1637 if (disambiguate_only)
1638 return (void *)-1;
4fa4929e 1639
b45d2719
RG
1640 base = ao_ref_base (ref);
1641 offset = ref->offset;
b45d2719 1642 maxsize = ref->max_size;
01df5c8a
RG
1643
1644 /* If we cannot constrain the size of the reference we cannot
1645 test if anything kills it. */
1646 if (maxsize == -1)
1647 return (void *)-1;
1648
47598145
MM
1649 /* We can't deduce anything useful from clobbers. */
1650 if (gimple_clobber_p (def_stmt))
1651 return (void *)-1;
1652
01df5c8a 1653 /* def_stmt may-defs *ref. See if we can derive a value for *ref
47598145 1654 from that definition.
01df5c8a 1655 1) Memset. */
b45d2719 1656 if (is_gimple_reg_type (vr->type)
c7ee7b45 1657 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
01df5c8a 1658 && integer_zerop (gimple_call_arg (def_stmt, 1))
cc269bb6 1659 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
01df5c8a
RG
1660 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1661 {
1662 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1663 tree base2;
1664 HOST_WIDE_INT offset2, size2, maxsize2;
1665 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
eb1ce453 1666 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
01df5c8a 1667 if ((unsigned HOST_WIDE_INT)size2 / 8
eb1ce453 1668 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
9c8cbc74 1669 && maxsize2 != -1
01df5c8a
RG
1670 && operand_equal_p (base, base2, 0)
1671 && offset2 <= offset
1672 && offset2 + size2 >= offset + maxsize)
b45d2719 1673 {
e8160c9a 1674 tree val = build_zero_cst (vr->type);
9179ed9d 1675 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1676 (vuse, vr->set, vr->type, vr->operands, val);
b45d2719 1677 }
01df5c8a
RG
1678 }
1679
1680 /* 2) Assignment from an empty CONSTRUCTOR. */
b45d2719 1681 else if (is_gimple_reg_type (vr->type)
01df5c8a
RG
1682 && gimple_assign_single_p (def_stmt)
1683 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1684 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1685 {
1686 tree base2;
1687 HOST_WIDE_INT offset2, size2, maxsize2;
1688 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1689 &offset2, &size2, &maxsize2);
9c8cbc74
EB
1690 if (maxsize2 != -1
1691 && operand_equal_p (base, base2, 0)
01df5c8a
RG
1692 && offset2 <= offset
1693 && offset2 + size2 >= offset + maxsize)
b45d2719 1694 {
e8160c9a 1695 tree val = build_zero_cst (vr->type);
9179ed9d 1696 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1697 (vuse, vr->set, vr->type, vr->operands, val);
b45d2719 1698 }
01df5c8a
RG
1699 }
1700
c867aba0
RG
1701 /* 3) Assignment from a constant. We can use folds native encode/interpret
1702 routines to extract the assigned bits. */
b2e51979
RG
1703 else if (vn_walk_kind == VN_WALKREWRITE
1704 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
c867aba0
RG
1705 && ref->size == maxsize
1706 && maxsize % BITS_PER_UNIT == 0
1707 && offset % BITS_PER_UNIT == 0
1708 && is_gimple_reg_type (vr->type)
1709 && gimple_assign_single_p (def_stmt)
1710 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1711 {
1712 tree base2;
1713 HOST_WIDE_INT offset2, size2, maxsize2;
1714 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1715 &offset2, &size2, &maxsize2);
1716 if (maxsize2 != -1
1717 && maxsize2 == size2
1718 && size2 % BITS_PER_UNIT == 0
1719 && offset2 % BITS_PER_UNIT == 0
1720 && operand_equal_p (base, base2, 0)
1721 && offset2 <= offset
1722 && offset2 + size2 >= offset + maxsize)
1723 {
1724 /* We support up to 512-bit values (for V8DFmode). */
1725 unsigned char buffer[64];
1726 int len;
1727
1728 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1729 buffer, sizeof (buffer));
1730 if (len > 0)
1731 {
1732 tree val = native_interpret_expr (vr->type,
1733 buffer
1734 + ((offset - offset2)
1735 / BITS_PER_UNIT),
1736 ref->size / BITS_PER_UNIT);
1737 if (val)
9179ed9d 1738 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1739 (vuse, vr->set, vr->type, vr->operands, val);
c867aba0
RG
1740 }
1741 }
1742 }
1743
0147184e
RG
1744 /* 4) Assignment from an SSA name which definition we may be able
1745 to access pieces from. */
1746 else if (ref->size == maxsize
1747 && is_gimple_reg_type (vr->type)
1748 && gimple_assign_single_p (def_stmt)
1749 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1750 {
1751 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1752 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1753 if (is_gimple_assign (def_stmt2)
1754 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1755 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1756 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1757 {
1758 tree base2;
1759 HOST_WIDE_INT offset2, size2, maxsize2, off;
1760 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1761 &offset2, &size2, &maxsize2);
1762 off = offset - offset2;
1763 if (maxsize2 != -1
1764 && maxsize2 == size2
1765 && operand_equal_p (base, base2, 0)
1766 && offset2 <= offset
1767 && offset2 + size2 >= offset + maxsize)
1768 {
1769 tree val = NULL_TREE;
1770 HOST_WIDE_INT elsz
1771 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1772 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1773 {
1774 if (off == 0)
1775 val = gimple_assign_rhs1 (def_stmt2);
1776 else if (off == elsz)
1777 val = gimple_assign_rhs2 (def_stmt2);
1778 }
1779 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1780 && off % elsz == 0)
1781 {
1782 tree ctor = gimple_assign_rhs1 (def_stmt2);
1783 unsigned i = off / elsz;
1784 if (i < CONSTRUCTOR_NELTS (ctor))
1785 {
1786 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
13396b6e
JJ
1787 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1788 {
1789 if (TREE_CODE (TREE_TYPE (elt->value))
1790 != VECTOR_TYPE)
1791 val = elt->value;
1792 }
0147184e
RG
1793 }
1794 }
1795 if (val)
9179ed9d 1796 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1797 (vuse, vr->set, vr->type, vr->operands, val);
0147184e
RG
1798 }
1799 }
1800 }
1801
1802 /* 5) For aggregate copies translate the reference through them if
01df5c8a 1803 the copy kills ref. */
3bc27de7
RG
1804 else if (vn_walk_kind == VN_WALKREWRITE
1805 && gimple_assign_single_p (def_stmt)
01df5c8a 1806 && (DECL_P (gimple_assign_rhs1 (def_stmt))
70f34814 1807 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
01df5c8a
RG
1808 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1809 {
1810 tree base2;
9c8cbc74 1811 HOST_WIDE_INT offset2, size2, maxsize2;
01df5c8a 1812 int i, j;
ef062b13 1813 auto_vec<vn_reference_op_s> rhs;
01df5c8a 1814 vn_reference_op_t vro;
b45d2719 1815 ao_ref r;
01df5c8a 1816
8ea34dab
RG
1817 if (!lhs_ref_ok)
1818 return (void *)-1;
1819
01df5c8a 1820 /* See if the assignment kills REF. */
8ea34dab
RG
1821 base2 = ao_ref_base (&lhs_ref);
1822 offset2 = lhs_ref.offset;
1823 size2 = lhs_ref.size;
9c8cbc74
EB
1824 maxsize2 = lhs_ref.max_size;
1825 if (maxsize2 == -1
1826 || (base != base2 && !operand_equal_p (base, base2, 0))
01df5c8a
RG
1827 || offset2 > offset
1828 || offset2 + size2 < offset + maxsize)
1829 return (void *)-1;
1830
8ea34dab
RG
1831 /* Find the common base of ref and the lhs. lhs_ops already
1832 contains valueized operands for the lhs. */
9771b263
DN
1833 i = vr->operands.length () - 1;
1834 j = lhs_ops.length () - 1;
35ecd408 1835 while (j >= 0 && i >= 0
9771b263 1836 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
01df5c8a
RG
1837 {
1838 i--;
1839 j--;
1840 }
35ecd408 1841
25aa059e
RG
1842 /* ??? The innermost op should always be a MEM_REF and we already
1843 checked that the assignment to the lhs kills vr. Thus for
1844 aggregate copies using char[] types the vn_reference_op_eq
1845 may fail when comparing types for compatibility. But we really
1846 don't care here - further lookups with the rewritten operands
1847 will simply fail if we messed up types too badly. */
4f9dbaaa 1848 if (j == 0 && i >= 0
9771b263
DN
1849 && lhs_ops[0].opcode == MEM_REF
1850 && lhs_ops[0].off != -1
1851 && (lhs_ops[0].off == vr->operands[i].off))
25aa059e
RG
1852 i--, j--;
1853
01df5c8a
RG
1854 /* i now points to the first additional op.
1855 ??? LHS may not be completely contained in VR, one or more
1856 VIEW_CONVERT_EXPRs could be in its way. We could at least
1857 try handling outermost VIEW_CONVERT_EXPRs. */
1858 if (j != -1)
1859 return (void *)-1;
01df5c8a
RG
1860
1861 /* Now re-write REF to be based on the rhs of the assignment. */
1862 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1863 /* We need to pre-pend vr->operands[0..i] to rhs. */
9771b263 1864 if (i + 1 + rhs.length () > vr->operands.length ())
01df5c8a 1865 {
9771b263
DN
1866 vec<vn_reference_op_s> old = vr->operands;
1867 vr->operands.safe_grow (i + 1 + rhs.length ());
01df5c8a
RG
1868 if (old == shared_lookup_references
1869 && vr->operands != old)
6e1aa848 1870 shared_lookup_references = vNULL;
01df5c8a
RG
1871 }
1872 else
9771b263
DN
1873 vr->operands.truncate (i + 1 + rhs.length ());
1874 FOR_EACH_VEC_ELT (rhs, j, vro)
1875 vr->operands[i + 1 + j] = *vro;
b55eb410 1876 vr->operands = valueize_refs (vr->operands);
01df5c8a 1877 vr->hashcode = vn_reference_compute_hash (vr);
c7ee7b45
RG
1878
1879 /* Adjust *ref from the new operands. */
1880 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1881 return (void *)-1;
1882 /* This can happen with bitfields. */
1883 if (ref->size != r.size)
1884 return (void *)-1;
1885 *ref = r;
1886
1887 /* Do not update last seen VUSE after translating. */
1888 last_vuse_ptr = NULL;
1889
1890 /* Keep looking for the adjusted *REF / VR pair. */
1891 return NULL;
1892 }
1893
0147184e 1894 /* 6) For memcpy copies translate the reference through them if
c7ee7b45
RG
1895 the copy kills ref. */
1896 else if (vn_walk_kind == VN_WALKREWRITE
1897 && is_gimple_reg_type (vr->type)
1898 /* ??? Handle BCOPY as well. */
1899 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1900 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1901 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1902 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1903 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1904 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1905 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
cc269bb6 1906 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
c7ee7b45
RG
1907 {
1908 tree lhs, rhs;
1909 ao_ref r;
1910 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1911 vn_reference_op_s op;
1912 HOST_WIDE_INT at;
1913
1914
1915 /* Only handle non-variable, addressable refs. */
1916 if (ref->size != maxsize
1917 || offset % BITS_PER_UNIT != 0
1918 || ref->size % BITS_PER_UNIT != 0)
1919 return (void *)-1;
1920
1921 /* Extract a pointer base and an offset for the destination. */
1922 lhs = gimple_call_arg (def_stmt, 0);
1923 lhs_offset = 0;
1924 if (TREE_CODE (lhs) == SSA_NAME)
1925 lhs = SSA_VAL (lhs);
1926 if (TREE_CODE (lhs) == ADDR_EXPR)
1927 {
1928 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1929 &lhs_offset);
1930 if (!tem)
1931 return (void *)-1;
1932 if (TREE_CODE (tem) == MEM_REF
cc269bb6 1933 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
c7ee7b45
RG
1934 {
1935 lhs = TREE_OPERAND (tem, 0);
eb1ce453 1936 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
c7ee7b45
RG
1937 }
1938 else if (DECL_P (tem))
1939 lhs = build_fold_addr_expr (tem);
1940 else
1941 return (void *)-1;
1942 }
1943 if (TREE_CODE (lhs) != SSA_NAME
1944 && TREE_CODE (lhs) != ADDR_EXPR)
1945 return (void *)-1;
1946
1947 /* Extract a pointer base and an offset for the source. */
1948 rhs = gimple_call_arg (def_stmt, 1);
1949 rhs_offset = 0;
1950 if (TREE_CODE (rhs) == SSA_NAME)
1951 rhs = SSA_VAL (rhs);
1952 if (TREE_CODE (rhs) == ADDR_EXPR)
1953 {
1954 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1955 &rhs_offset);
1956 if (!tem)
1957 return (void *)-1;
1958 if (TREE_CODE (tem) == MEM_REF
cc269bb6 1959 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
c7ee7b45
RG
1960 {
1961 rhs = TREE_OPERAND (tem, 0);
eb1ce453 1962 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
c7ee7b45
RG
1963 }
1964 else if (DECL_P (tem))
1965 rhs = build_fold_addr_expr (tem);
1966 else
1967 return (void *)-1;
1968 }
1969 if (TREE_CODE (rhs) != SSA_NAME
1970 && TREE_CODE (rhs) != ADDR_EXPR)
1971 return (void *)-1;
1972
eb1ce453 1973 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
c7ee7b45
RG
1974
1975 /* The bases of the destination and the references have to agree. */
1976 if ((TREE_CODE (base) != MEM_REF
1977 && !DECL_P (base))
1978 || (TREE_CODE (base) == MEM_REF
1979 && (TREE_OPERAND (base, 0) != lhs
cc269bb6 1980 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
c7ee7b45
RG
1981 || (DECL_P (base)
1982 && (TREE_CODE (lhs) != ADDR_EXPR
1983 || TREE_OPERAND (lhs, 0) != base)))
1984 return (void *)-1;
1985
1986 /* And the access has to be contained within the memcpy destination. */
1987 at = offset / BITS_PER_UNIT;
1988 if (TREE_CODE (base) == MEM_REF)
eb1ce453 1989 at += tree_to_uhwi (TREE_OPERAND (base, 1));
c7ee7b45
RG
1990 if (lhs_offset > at
1991 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1992 return (void *)-1;
1993
1994 /* Make room for 2 operands in the new reference. */
9771b263 1995 if (vr->operands.length () < 2)
c7ee7b45 1996 {
9771b263
DN
1997 vec<vn_reference_op_s> old = vr->operands;
1998 vr->operands.safe_grow_cleared (2);
c7ee7b45
RG
1999 if (old == shared_lookup_references
2000 && vr->operands != old)
9771b263 2001 shared_lookup_references.create (0);
c7ee7b45
RG
2002 }
2003 else
9771b263 2004 vr->operands.truncate (2);
c7ee7b45
RG
2005
2006 /* The looked-through reference is a simple MEM_REF. */
2007 memset (&op, 0, sizeof (op));
2008 op.type = vr->type;
2009 op.opcode = MEM_REF;
2010 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2011 op.off = at - lhs_offset + rhs_offset;
9771b263 2012 vr->operands[0] = op;
6d6c9525 2013 op.type = TREE_TYPE (rhs);
c7ee7b45
RG
2014 op.opcode = TREE_CODE (rhs);
2015 op.op0 = rhs;
2016 op.off = -1;
9771b263 2017 vr->operands[1] = op;
c7ee7b45 2018 vr->hashcode = vn_reference_compute_hash (vr);
b45d2719
RG
2019
2020 /* Adjust *ref from the new operands. */
2021 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
01df5c8a 2022 return (void *)-1;
03472fdd
EB
2023 /* This can happen with bitfields. */
2024 if (ref->size != r.size)
2025 return (void *)-1;
b45d2719 2026 *ref = r;
01df5c8a 2027
d0ca0bcb
RG
2028 /* Do not update last seen VUSE after translating. */
2029 last_vuse_ptr = NULL;
2030
01df5c8a
RG
2031 /* Keep looking for the adjusted *REF / VR pair. */
2032 return NULL;
2033 }
2034
2035 /* Bail out and stop walking. */
2036 return (void *)-1;
2037}
2038
c9145754
DB
2039/* Lookup a reference operation by it's parts, in the current hash table.
2040 Returns the resulting value number if it exists in the hash table,
2041 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2042 vn_reference_t stored in the hashtable if something is found. */
89fb70a3
DB
2043
2044tree
b45d2719 2045vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
9771b263 2046 vec<vn_reference_op_s> operands,
3bc27de7 2047 vn_reference_t *vnresult, vn_lookup_kind kind)
c9145754
DB
2048{
2049 struct vn_reference_s vr1;
5006671f 2050 vn_reference_t tmp;
12bd5a1e 2051 tree cst;
5006671f
RG
2052
2053 if (!vnresult)
2054 vnresult = &tmp;
2055 *vnresult = NULL;
01df5c8a 2056
d1c0308e 2057 vr1.vuse = vuse_ssa_val (vuse);
9771b263
DN
2058 shared_lookup_references.truncate (0);
2059 shared_lookup_references.safe_grow (operands.length ());
2060 memcpy (shared_lookup_references.address (),
2061 operands.address (),
01df5c8a 2062 sizeof (vn_reference_op_s)
9771b263 2063 * operands.length ());
01df5c8a
RG
2064 vr1.operands = operands = shared_lookup_references
2065 = valueize_refs (shared_lookup_references);
b45d2719
RG
2066 vr1.type = type;
2067 vr1.set = set;
c9145754 2068 vr1.hashcode = vn_reference_compute_hash (&vr1);
12bd5a1e
RG
2069 if ((cst = fully_constant_vn_reference_p (&vr1)))
2070 return cst;
c9145754 2071
12bd5a1e 2072 vn_reference_lookup_1 (&vr1, vnresult);
5006671f 2073 if (!*vnresult
3bc27de7 2074 && kind != VN_NOWALK
5006671f 2075 && vr1.vuse)
53f3815c 2076 {
b45d2719 2077 ao_ref r;
3bc27de7 2078 vn_walk_kind = kind;
b45d2719 2079 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
01df5c8a 2080 *vnresult =
b45d2719 2081 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
01df5c8a
RG
2082 vn_reference_lookup_2,
2083 vn_reference_lookup_3, &vr1);
2084 if (vr1.operands != operands)
9771b263 2085 vr1.operands.release ();
53f3815c
RG
2086 }
2087
5006671f
RG
2088 if (*vnresult)
2089 return (*vnresult)->result;
2090
2091 return NULL_TREE;
c9145754
DB
2092}
2093
2094/* Lookup OP in the current hash table, and return the resulting value
2095 number if it exists in the hash table. Return NULL_TREE if it does
2096 not exist in the hash table or if the result field of the structure
2097 was NULL.. VNRESULT will be filled in with the vn_reference_t
2098 stored in the hashtable if one exists. */
2099
2100tree
3bc27de7 2101vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
c9145754 2102 vn_reference_t *vnresult)
89fb70a3 2103{
9771b263 2104 vec<vn_reference_op_s> operands;
89fb70a3 2105 struct vn_reference_s vr1;
12bd5a1e 2106 tree cst;
3ceaf2f5 2107 bool valuezied_anything;
5006671f 2108
c9145754
DB
2109 if (vnresult)
2110 *vnresult = NULL;
89fb70a3 2111
d1c0308e 2112 vr1.vuse = vuse_ssa_val (vuse);
3ceaf2f5
RG
2113 vr1.operands = operands
2114 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
b45d2719
RG
2115 vr1.type = TREE_TYPE (op);
2116 vr1.set = get_alias_set (op);
89fb70a3 2117 vr1.hashcode = vn_reference_compute_hash (&vr1);
12bd5a1e
RG
2118 if ((cst = fully_constant_vn_reference_p (&vr1)))
2119 return cst;
896c8b96 2120
3bc27de7 2121 if (kind != VN_NOWALK
5006671f
RG
2122 && vr1.vuse)
2123 {
2124 vn_reference_t wvnresult;
b45d2719 2125 ao_ref r;
3ceaf2f5
RG
2126 /* Make sure to use a valueized reference if we valueized anything.
2127 Otherwise preserve the full reference for advanced TBAA. */
2128 if (!valuezied_anything
2129 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2130 vr1.operands))
6d6c9525 2131 ao_ref_init (&r, op);
3bc27de7 2132 vn_walk_kind = kind;
5006671f 2133 wvnresult =
b45d2719 2134 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
01df5c8a
RG
2135 vn_reference_lookup_2,
2136 vn_reference_lookup_3, &vr1);
2137 if (vr1.operands != operands)
9771b263 2138 vr1.operands.release ();
5006671f
RG
2139 if (wvnresult)
2140 {
2141 if (vnresult)
2142 *vnresult = wvnresult;
2143 return wvnresult->result;
2144 }
2145
2146 return NULL_TREE;
896c8b96 2147 }
89fb70a3 2148
5006671f 2149 return vn_reference_lookup_1 (&vr1, vnresult);
89fb70a3
DB
2150}
2151
c9145754 2152
89fb70a3 2153/* Insert OP into the current hash table with a value number of
c9145754 2154 RESULT, and return the resulting reference structure we created. */
89fb70a3 2155
c9145754 2156vn_reference_t
4ec0a198 2157vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
89fb70a3 2158{
bf190e8d 2159 vn_reference_s **slot;
89fb70a3 2160 vn_reference_t vr1;
39e843e8 2161 bool tem;
89fb70a3
DB
2162
2163 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
c9145754
DB
2164 if (TREE_CODE (result) == SSA_NAME)
2165 vr1->value_id = VN_INFO (result)->value_id;
2166 else
2167 vr1->value_id = get_or_alloc_constant_value_id (result);
5006671f 2168 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
39e843e8 2169 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
b45d2719
RG
2170 vr1->type = TREE_TYPE (op);
2171 vr1->set = get_alias_set (op);
89fb70a3
DB
2172 vr1->hashcode = vn_reference_compute_hash (vr1);
2173 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
4ec0a198 2174 vr1->result_vdef = vdef;
89fb70a3 2175
c203e8a7
TS
2176 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2177 INSERT);
89fb70a3
DB
2178
2179 /* Because we lookup stores using vuses, and value number failures
2180 using the vdefs (see visit_reference_op_store for how and why),
2181 it's possible that on failure we may try to insert an already
2182 inserted store. This is not wrong, there is no ssa name for a
2183 store that we could use as a differentiator anyway. Thus, unlike
2184 the other lookup functions, you cannot gcc_assert (!*slot)
2185 here. */
2186
8d0eca24
RG
2187 /* But free the old slot in case of a collision. */
2188 if (*slot)
2189 free_reference (*slot);
89fb70a3
DB
2190
2191 *slot = vr1;
c9145754
DB
2192 return vr1;
2193}
2194
2195/* Insert a reference by it's pieces into the current hash table with
2196 a value number of RESULT. Return the resulting reference
2197 structure we created. */
2198
2199vn_reference_t
b45d2719 2200vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
9771b263 2201 vec<vn_reference_op_s> operands,
c9145754
DB
2202 tree result, unsigned int value_id)
2203
2204{
bf190e8d 2205 vn_reference_s **slot;
c9145754
DB
2206 vn_reference_t vr1;
2207
2208 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
5006671f
RG
2209 vr1->value_id = value_id;
2210 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
c9145754 2211 vr1->operands = valueize_refs (operands);
b45d2719
RG
2212 vr1->type = type;
2213 vr1->set = set;
c9145754
DB
2214 vr1->hashcode = vn_reference_compute_hash (vr1);
2215 if (result && TREE_CODE (result) == SSA_NAME)
2216 result = SSA_VAL (result);
2217 vr1->result = result;
2218
c203e8a7
TS
2219 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2220 INSERT);
b8698a0f 2221
c9145754 2222 /* At this point we should have all the things inserted that we have
5006671f
RG
2223 seen before, and we should never try inserting something that
2224 already exists. */
c9145754
DB
2225 gcc_assert (!*slot);
2226 if (*slot)
2227 free_reference (*slot);
2228
2229 *slot = vr1;
2230 return vr1;
89fb70a3
DB
2231}
2232
49a1fb2d 2233/* Compute and return the hash value for nary operation VBO1. */
89fb70a3 2234
5fd8300b 2235hashval_t
49a1fb2d 2236vn_nary_op_compute_hash (const vn_nary_op_t vno1)
89fb70a3 2237{
9708c51d 2238 hashval_t hash;
49a1fb2d 2239 unsigned i;
89fb70a3 2240
49a1fb2d
RG
2241 for (i = 0; i < vno1->length; ++i)
2242 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2243 vno1->op[i] = SSA_VAL (vno1->op[i]);
89fb70a3 2244
49a1fb2d
RG
2245 if (vno1->length == 2
2246 && commutative_tree_code (vno1->opcode)
2247 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2248 {
2249 tree temp = vno1->op[0];
2250 vno1->op[0] = vno1->op[1];
2251 vno1->op[1] = temp;
2252 }
89fb70a3 2253
9708c51d 2254 hash = iterative_hash_hashval_t (vno1->opcode, 0);
49a1fb2d 2255 for (i = 0; i < vno1->length; ++i)
9708c51d 2256 hash = iterative_hash_expr (vno1->op[i], hash);
89fb70a3 2257
49a1fb2d 2258 return hash;
89fb70a3
DB
2259}
2260
bf190e8d 2261/* Compare nary operations VNO1 and VNO2 and return true if they are
89fb70a3
DB
2262 equivalent. */
2263
bf190e8d
LC
2264bool
2265vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
89fb70a3 2266{
49a1fb2d
RG
2267 unsigned i;
2268
85169114
PB
2269 if (vno1->hashcode != vno2->hashcode)
2270 return false;
2271
5a7d7f9c
RG
2272 if (vno1->length != vno2->length)
2273 return false;
2274
49a1fb2d 2275 if (vno1->opcode != vno2->opcode
63a14fa3 2276 || !types_compatible_p (vno1->type, vno2->type))
49a1fb2d
RG
2277 return false;
2278
2279 for (i = 0; i < vno1->length; ++i)
2280 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2281 return false;
2282
2283 return true;
89fb70a3
DB
2284}
2285
9ad6bebe 2286/* Initialize VNO from the pieces provided. */
89fb70a3 2287
9ad6bebe
NF
2288static void
2289init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
5a7d7f9c 2290 enum tree_code code, tree type, tree *ops)
9ad6bebe
NF
2291{
2292 vno->opcode = code;
2293 vno->length = length;
2294 vno->type = type;
5a7d7f9c 2295 memcpy (&vno->op[0], ops, sizeof (tree) * length);
9ad6bebe
NF
2296}
2297
2298/* Initialize VNO from OP. */
2299
2300static void
2301init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2302{
2303 unsigned i;
2304
2305 vno->opcode = TREE_CODE (op);
2306 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2307 vno->type = TREE_TYPE (op);
2308 for (i = 0; i < vno->length; ++i)
2309 vno->op[i] = TREE_OPERAND (op, i);
2310}
2311
5a7d7f9c
RG
2312/* Return the number of operands for a vn_nary ops structure from STMT. */
2313
2314static unsigned int
2315vn_nary_length_from_stmt (gimple stmt)
2316{
2317 switch (gimple_assign_rhs_code (stmt))
2318 {
2319 case REALPART_EXPR:
2320 case IMAGPART_EXPR:
2321 case VIEW_CONVERT_EXPR:
2322 return 1;
2323
91af9dc9
RG
2324 case BIT_FIELD_REF:
2325 return 3;
2326
5a7d7f9c
RG
2327 case CONSTRUCTOR:
2328 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2329
2330 default:
2331 return gimple_num_ops (stmt) - 1;
2332 }
2333}
2334
9ad6bebe
NF
2335/* Initialize VNO from STMT. */
2336
2337static void
2338init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2339{
2340 unsigned i;
2341
2342 vno->opcode = gimple_assign_rhs_code (stmt);
9ad6bebe 2343 vno->type = gimple_expr_type (stmt);
5a7d7f9c
RG
2344 switch (vno->opcode)
2345 {
2346 case REALPART_EXPR:
2347 case IMAGPART_EXPR:
2348 case VIEW_CONVERT_EXPR:
2349 vno->length = 1;
2350 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2351 break;
2352
91af9dc9
RG
2353 case BIT_FIELD_REF:
2354 vno->length = 3;
2355 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2356 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2357 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2358 break;
2359
5a7d7f9c
RG
2360 case CONSTRUCTOR:
2361 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2362 for (i = 0; i < vno->length; ++i)
2363 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2364 break;
2365
2366 default:
91af9dc9 2367 gcc_checking_assert (!gimple_assign_single_p (stmt));
5a7d7f9c
RG
2368 vno->length = gimple_num_ops (stmt) - 1;
2369 for (i = 0; i < vno->length; ++i)
2370 vno->op[i] = gimple_op (stmt, i + 1);
2371 }
9ad6bebe
NF
2372}
2373
2374/* Compute the hashcode for VNO and look for it in the hash table;
2375 return the resulting value number if it exists in the hash table.
2376 Return NULL_TREE if it does not exist in the hash table or if the
2377 result field of the operation is NULL. VNRESULT will contain the
2378 vn_nary_op_t from the hashtable if it exists. */
2379
2380static tree
2381vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
c9145754 2382{
bf190e8d 2383 vn_nary_op_s **slot;
9ad6bebe 2384
c9145754
DB
2385 if (vnresult)
2386 *vnresult = NULL;
9ad6bebe
NF
2387
2388 vno->hashcode = vn_nary_op_compute_hash (vno);
c203e8a7
TS
2389 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2390 NO_INSERT);
c9145754 2391 if (!slot && current_info == optimistic_info)
c203e8a7
TS
2392 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2393 NO_INSERT);
c9145754
DB
2394 if (!slot)
2395 return NULL_TREE;
2396 if (vnresult)
bf190e8d
LC
2397 *vnresult = *slot;
2398 return (*slot)->result;
c9145754
DB
2399}
2400
9ad6bebe
NF
2401/* Lookup a n-ary operation by its pieces and return the resulting value
2402 number if it exists in the hash table. Return NULL_TREE if it does
2403 not exist in the hash table or if the result field of the operation
2404 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2405 if it exists. */
2406
2407tree
2408vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
5a7d7f9c 2409 tree type, tree *ops, vn_nary_op_t *vnresult)
9ad6bebe 2410{
5a7d7f9c
RG
2411 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2412 sizeof_vn_nary_op (length));
2413 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2414 return vn_nary_op_lookup_1 (vno1, vnresult);
9ad6bebe
NF
2415}
2416
c9145754
DB
2417/* Lookup OP in the current hash table, and return the resulting value
2418 number if it exists in the hash table. Return NULL_TREE if it does
2419 not exist in the hash table or if the result field of the operation
2420 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2421 if it exists. */
2422
2423tree
2424vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
89fb70a3 2425{
5a7d7f9c
RG
2426 vn_nary_op_t vno1
2427 = XALLOCAVAR (struct vn_nary_op_s,
2428 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2429 init_vn_nary_op_from_op (vno1, op);
2430 return vn_nary_op_lookup_1 (vno1, vnresult);
89fb70a3
DB
2431}
2432
726a989a
RB
2433/* Lookup the rhs of STMT in the current hash table, and return the resulting
2434 value number if it exists in the hash table. Return NULL_TREE if
2435 it does not exist in the hash table. VNRESULT will contain the
2436 vn_nary_op_t from the hashtable if it exists. */
2437
2438tree
2439vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2440{
5a7d7f9c
RG
2441 vn_nary_op_t vno1
2442 = XALLOCAVAR (struct vn_nary_op_s,
2443 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2444 init_vn_nary_op_from_stmt (vno1, stmt);
2445 return vn_nary_op_lookup_1 (vno1, vnresult);
9ad6bebe
NF
2446}
2447
2448/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2449
2450static vn_nary_op_t
2451alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2452{
2453 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2454}
2455
2456/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2457 obstack. */
2458
2459static vn_nary_op_t
2460alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2461{
2462 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2463 &current_info->nary_obstack);
2464
2465 vno1->value_id = value_id;
2466 vno1->length = length;
2467 vno1->result = result;
2468
2469 return vno1;
2470}
2471
2472/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2473 VNO->HASHCODE first. */
2474
2475static vn_nary_op_t
c203e8a7 2476vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
bf190e8d 2477 bool compute_hash)
9ad6bebe 2478{
bf190e8d 2479 vn_nary_op_s **slot;
9ad6bebe
NF
2480
2481 if (compute_hash)
2482 vno->hashcode = vn_nary_op_compute_hash (vno);
2483
c203e8a7 2484 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
9ad6bebe
NF
2485 gcc_assert (!*slot);
2486
2487 *slot = vno;
2488 return vno;
726a989a
RB
2489}
2490
c9145754
DB
2491/* Insert a n-ary operation into the current hash table using it's
2492 pieces. Return the vn_nary_op_t structure we created and put in
2493 the hashtable. */
2494
2495vn_nary_op_t
2496vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
5a7d7f9c
RG
2497 tree type, tree *ops,
2498 tree result, unsigned int value_id)
c9145754 2499{
5a7d7f9c
RG
2500 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2501 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
9ad6bebe 2502 return vn_nary_op_insert_into (vno1, current_info->nary, true);
c9145754
DB
2503}
2504
89fb70a3 2505/* Insert OP into the current hash table with a value number of
c9145754
DB
2506 RESULT. Return the vn_nary_op_t structure we created and put in
2507 the hashtable. */
89fb70a3 2508
c9145754 2509vn_nary_op_t
49a1fb2d 2510vn_nary_op_insert (tree op, tree result)
89fb70a3 2511{
49a1fb2d 2512 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
49a1fb2d 2513 vn_nary_op_t vno1;
49a1fb2d 2514
9ad6bebe
NF
2515 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2516 init_vn_nary_op_from_op (vno1, op);
2517 return vn_nary_op_insert_into (vno1, current_info->nary, true);
89fb70a3
DB
2518}
2519
726a989a
RB
2520/* Insert the rhs of STMT into the current hash table with a value number of
2521 RESULT. */
2522
2523vn_nary_op_t
2524vn_nary_op_insert_stmt (gimple stmt, tree result)
2525{
5a7d7f9c
RG
2526 vn_nary_op_t vno1
2527 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2528 result, VN_INFO (result)->value_id);
9ad6bebe
NF
2529 init_vn_nary_op_from_stmt (vno1, stmt);
2530 return vn_nary_op_insert_into (vno1, current_info->nary, true);
726a989a
RB
2531}
2532
89fb70a3
DB
2533/* Compute a hashcode for PHI operation VP1 and return it. */
2534
2535static inline hashval_t
2536vn_phi_compute_hash (vn_phi_t vp1)
2537{
9708c51d 2538 hashval_t result;
89fb70a3
DB
2539 int i;
2540 tree phi1op;
1d295886 2541 tree type;
89fb70a3
DB
2542
2543 result = vp1->block->index;
2544
1d295886
RG
2545 /* If all PHI arguments are constants we need to distinguish
2546 the PHI node via its type. */
24d63016
RB
2547 type = vp1->type;
2548 result += vn_hash_type (type);
1d295886 2549
9771b263 2550 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
89fb70a3
DB
2551 {
2552 if (phi1op == VN_TOP)
2553 continue;
9708c51d 2554 result = iterative_hash_expr (phi1op, result);
89fb70a3
DB
2555 }
2556
2557 return result;
2558}
2559
89fb70a3
DB
2560/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2561
2562static int
bf190e8d 2563vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
89fb70a3 2564{
85169114
PB
2565 if (vp1->hashcode != vp2->hashcode)
2566 return false;
2567
89fb70a3
DB
2568 if (vp1->block == vp2->block)
2569 {
2570 int i;
2571 tree phi1op;
2572
1d295886
RG
2573 /* If the PHI nodes do not have compatible types
2574 they are not the same. */
24d63016 2575 if (!types_compatible_p (vp1->type, vp2->type))
1d295886
RG
2576 return false;
2577
89fb70a3
DB
2578 /* Any phi in the same block will have it's arguments in the
2579 same edge order, because of how we store phi nodes. */
9771b263 2580 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
89fb70a3 2581 {
9771b263 2582 tree phi2op = vp2->phiargs[i];
89fb70a3
DB
2583 if (phi1op == VN_TOP || phi2op == VN_TOP)
2584 continue;
2585 if (!expressions_equal_p (phi1op, phi2op))
2586 return false;
2587 }
2588 return true;
2589 }
2590 return false;
2591}
2592
9771b263 2593static vec<tree> shared_lookup_phiargs;
89fb70a3
DB
2594
2595/* Lookup PHI in the current hash table, and return the resulting
2596 value number if it exists in the hash table. Return NULL_TREE if
2597 it does not exist in the hash table. */
2598
de081cfd 2599static tree
726a989a 2600vn_phi_lookup (gimple phi)
89fb70a3 2601{
bf190e8d 2602 vn_phi_s **slot;
89fb70a3 2603 struct vn_phi_s vp1;
726a989a 2604 unsigned i;
89fb70a3 2605
9771b263 2606 shared_lookup_phiargs.truncate (0);
89fb70a3
DB
2607
2608 /* Canonicalize the SSA_NAME's to their value number. */
726a989a 2609 for (i = 0; i < gimple_phi_num_args (phi); i++)
89fb70a3
DB
2610 {
2611 tree def = PHI_ARG_DEF (phi, i);
2612 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
9771b263 2613 shared_lookup_phiargs.safe_push (def);
89fb70a3 2614 }
24d63016 2615 vp1.type = TREE_TYPE (gimple_phi_result (phi));
89fb70a3 2616 vp1.phiargs = shared_lookup_phiargs;
726a989a 2617 vp1.block = gimple_bb (phi);
89fb70a3 2618 vp1.hashcode = vn_phi_compute_hash (&vp1);
c203e8a7
TS
2619 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2620 NO_INSERT);
27fa4044 2621 if (!slot && current_info == optimistic_info)
c203e8a7
TS
2622 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2623 NO_INSERT);
89fb70a3
DB
2624 if (!slot)
2625 return NULL_TREE;
bf190e8d 2626 return (*slot)->result;
89fb70a3
DB
2627}
2628
2629/* Insert PHI into the current hash table with a value number of
2630 RESULT. */
2631
c9145754 2632static vn_phi_t
726a989a 2633vn_phi_insert (gimple phi, tree result)
89fb70a3 2634{
bf190e8d 2635 vn_phi_s **slot;
89fb70a3 2636 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
726a989a 2637 unsigned i;
6e1aa848 2638 vec<tree> args = vNULL;
89fb70a3
DB
2639
2640 /* Canonicalize the SSA_NAME's to their value number. */
726a989a 2641 for (i = 0; i < gimple_phi_num_args (phi); i++)
89fb70a3
DB
2642 {
2643 tree def = PHI_ARG_DEF (phi, i);
2644 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
9771b263 2645 args.safe_push (def);
89fb70a3 2646 }
c9145754 2647 vp1->value_id = VN_INFO (result)->value_id;
24d63016 2648 vp1->type = TREE_TYPE (gimple_phi_result (phi));
89fb70a3 2649 vp1->phiargs = args;
726a989a 2650 vp1->block = gimple_bb (phi);
89fb70a3
DB
2651 vp1->result = result;
2652 vp1->hashcode = vn_phi_compute_hash (vp1);
2653
c203e8a7 2654 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
89fb70a3
DB
2655
2656 /* Because we iterate over phi operations more than once, it's
2657 possible the slot might already exist here, hence no assert.*/
2658 *slot = vp1;
c9145754 2659 return vp1;
89fb70a3
DB
2660}
2661
2662
2663/* Print set of components in strongly connected component SCC to OUT. */
2664
2665static void
9771b263 2666print_scc (FILE *out, vec<tree> scc)
89fb70a3
DB
2667{
2668 tree var;
2669 unsigned int i;
2670
0eb09f31 2671 fprintf (out, "SCC consists of:");
9771b263 2672 FOR_EACH_VEC_ELT (scc, i, var)
89fb70a3 2673 {
89fb70a3 2674 fprintf (out, " ");
0eb09f31 2675 print_generic_expr (out, var, 0);
89fb70a3
DB
2676 }
2677 fprintf (out, "\n");
2678}
2679
2680/* Set the value number of FROM to TO, return true if it has changed
2681 as a result. */
2682
2683static inline bool
2684set_ssa_val_to (tree from, tree to)
2685{
90bc4623 2686 tree currval = SSA_VAL (from);
d1de852b 2687 HOST_WIDE_INT toff, coff;
89fb70a3 2688
a764d660
RB
2689 /* The only thing we allow as value numbers are ssa_names
2690 and invariants. So assert that here. We don't allow VN_TOP
2691 as visiting a stmt should produce a value-number other than
2692 that.
2693 ??? Still VN_TOP can happen for unreachable code, so force
2694 it to varying in that case. Not all code is prepared to
2695 get VN_TOP on valueization. */
2696 if (to == VN_TOP)
2697 {
2698 if (dump_file && (dump_flags & TDF_DETAILS))
2699 fprintf (dump_file, "Forcing value number to varying on "
2700 "receiving VN_TOP\n");
2701 to = from;
2702 }
2703
2704 gcc_assert (to != NULL_TREE
2705 && (TREE_CODE (to) == SSA_NAME
2706 || is_gimple_min_invariant (to)));
2707
90bc4623
RG
2708 if (from != to)
2709 {
2710 if (currval == from)
2711 {
2712 if (dump_file && (dump_flags & TDF_DETAILS))
2713 {
2714 fprintf (dump_file, "Not changing value number of ");
2715 print_generic_expr (dump_file, from, 0);
2716 fprintf (dump_file, " from VARYING to ");
2717 print_generic_expr (dump_file, to, 0);
2718 fprintf (dump_file, "\n");
2719 }
2720 return false;
2721 }
2722 else if (TREE_CODE (to) == SSA_NAME
2723 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2724 to = from;
2725 }
fe4fefa0 2726
89fb70a3
DB
2727 if (dump_file && (dump_flags & TDF_DETAILS))
2728 {
2729 fprintf (dump_file, "Setting value number of ");
2730 print_generic_expr (dump_file, from, 0);
2731 fprintf (dump_file, " to ");
2732 print_generic_expr (dump_file, to, 0);
89fb70a3
DB
2733 }
2734
d1de852b
RB
2735 if (currval != to
2736 && !operand_equal_p (currval, to, 0)
2737 /* ??? For addresses involving volatile objects or types operand_equal_p
2738 does not reliably detect ADDR_EXPRs as equal. We know we are only
2739 getting invariant gimple addresses here, so can use
2740 get_addr_base_and_unit_offset to do this comparison. */
2741 && !(TREE_CODE (currval) == ADDR_EXPR
2742 && TREE_CODE (to) == ADDR_EXPR
2743 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2744 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2745 && coff == toff))
89fb70a3 2746 {
5006671f 2747 VN_INFO (from)->valnum = to;
8495c94f
RG
2748 if (dump_file && (dump_flags & TDF_DETAILS))
2749 fprintf (dump_file, " (changed)\n");
89fb70a3
DB
2750 return true;
2751 }
8495c94f
RG
2752 if (dump_file && (dump_flags & TDF_DETAILS))
2753 fprintf (dump_file, "\n");
89fb70a3
DB
2754 return false;
2755}
2756
00115921
TV
2757/* Mark as processed all the definitions in the defining stmt of USE, or
2758 the USE itself. */
2759
2760static void
2761mark_use_processed (tree use)
2762{
2763 ssa_op_iter iter;
2764 def_operand_p defp;
2765 gimple stmt = SSA_NAME_DEF_STMT (use);
2766
2767 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2768 {
2769 VN_INFO (use)->use_processed = true;
2770 return;
2771 }
2772
2773 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2774 {
2775 tree def = DEF_FROM_PTR (defp);
2776
2777 VN_INFO (def)->use_processed = true;
2778 }
2779}
2780
89fb70a3
DB
2781/* Set all definitions in STMT to value number to themselves.
2782 Return true if a value number changed. */
2783
2784static bool
726a989a 2785defs_to_varying (gimple stmt)
89fb70a3
DB
2786{
2787 bool changed = false;
2788 ssa_op_iter iter;
2789 def_operand_p defp;
2790
2791 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2792 {
2793 tree def = DEF_FROM_PTR (defp);
89fb70a3
DB
2794 changed |= set_ssa_val_to (def, def);
2795 }
2796 return changed;
2797}
2798
26fa9076 2799static bool expr_has_constants (tree expr);
3d45dd59 2800
89fb70a3
DB
2801/* Visit a copy between LHS and RHS, return true if the value number
2802 changed. */
2803
2804static bool
2805visit_copy (tree lhs, tree rhs)
2806{
89fb70a3
DB
2807 /* The copy may have a more interesting constant filled expression
2808 (we don't, since we know our RHS is just an SSA name). */
0d5a1b56
RB
2809 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2810 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2811
2812 /* And finally valueize. */
2813 rhs = SSA_VAL (rhs);
89fb70a3
DB
2814
2815 return set_ssa_val_to (lhs, rhs);
2816}
2817
2262707f 2818/* Visit a nary operator RHS, value number it, and return true if the
89fb70a3
DB
2819 value number of LHS has changed as a result. */
2820
2821static bool
2262707f 2822visit_nary_op (tree lhs, gimple stmt)
89fb70a3
DB
2823{
2824 bool changed = false;
726a989a 2825 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
89fb70a3
DB
2826
2827 if (result)
2262707f 2828 changed = set_ssa_val_to (lhs, result);
726a989a
RB
2829 else
2830 {
2831 changed = set_ssa_val_to (lhs, lhs);
2832 vn_nary_op_insert_stmt (stmt, lhs);
2833 }
2834
2835 return changed;
2836}
2837
2838/* Visit a call STMT storing into LHS. Return true if the value number
2839 of the LHS has changed as a result. */
2840
2841static bool
2842visit_reference_op_call (tree lhs, gimple stmt)
89fb70a3
DB
2843{
2844 bool changed = false;
726a989a 2845 struct vn_reference_s vr1;
00115921 2846 vn_reference_t vnresult = NULL;
5006671f 2847 tree vuse = gimple_vuse (stmt);
00115921 2848 tree vdef = gimple_vdef (stmt);
89fb70a3 2849
6867d9a9
TV
2850 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2851 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2852 lhs = NULL_TREE;
2853
5006671f 2854 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
aa7069aa 2855 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
b45d2719
RG
2856 vr1.type = gimple_expr_type (stmt);
2857 vr1.set = 0;
726a989a 2858 vr1.hashcode = vn_reference_compute_hash (&vr1);
00115921
TV
2859 vn_reference_lookup_1 (&vr1, &vnresult);
2860
2861 if (vnresult)
89fb70a3 2862 {
4583fada 2863 if (vnresult->result_vdef && vdef)
00115921
TV
2864 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2865
2866 if (!vnresult->result && lhs)
2867 vnresult->result = lhs;
2868
2869 if (vnresult->result && lhs)
2870 {
2871 changed |= set_ssa_val_to (lhs, vnresult->result);
2872
2873 if (VN_INFO (vnresult->result)->has_constants)
2874 VN_INFO (lhs)->has_constants = true;
2875 }
89fb70a3
DB
2876 }
2877 else
2878 {
bf190e8d 2879 vn_reference_s **slot;
726a989a 2880 vn_reference_t vr2;
00115921
TV
2881 if (vdef)
2882 changed |= set_ssa_val_to (vdef, vdef);
2883 if (lhs)
2884 changed |= set_ssa_val_to (lhs, lhs);
726a989a 2885 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
5006671f 2886 vr2->vuse = vr1.vuse;
726a989a 2887 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
b45d2719
RG
2888 vr2->type = vr1.type;
2889 vr2->set = vr1.set;
726a989a
RB
2890 vr2->hashcode = vr1.hashcode;
2891 vr2->result = lhs;
00115921 2892 vr2->result_vdef = vdef;
c203e8a7
TS
2893 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
2894 INSERT);
726a989a
RB
2895 if (*slot)
2896 free_reference (*slot);
2897 *slot = vr2;
89fb70a3
DB
2898 }
2899
2900 return changed;
2901}
2902
2903/* Visit a load from a reference operator RHS, part of STMT, value number it,
2904 and return true if the value number of the LHS has changed as a result. */
2905
2906static bool
726a989a 2907visit_reference_op_load (tree lhs, tree op, gimple stmt)
89fb70a3
DB
2908{
2909 bool changed = false;
d0ca0bcb
RG
2910 tree last_vuse;
2911 tree result;
2912
2913 last_vuse = gimple_vuse (stmt);
2914 last_vuse_ptr = &last_vuse;
1ec87690
RG
2915 result = vn_reference_lookup (op, gimple_vuse (stmt),
2916 default_vn_walk_kind, NULL);
d0ca0bcb 2917 last_vuse_ptr = NULL;
89fb70a3 2918
737142ce
AP
2919 /* If we have a VCE, try looking up its operand as it might be stored in
2920 a different type. */
2921 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2922 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
1ec87690 2923 default_vn_walk_kind, NULL);
737142ce 2924
3d45dd59
RG
2925 /* We handle type-punning through unions by value-numbering based
2926 on offset and size of the access. Be prepared to handle a
2927 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2928 if (result
2929 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2930 {
2931 /* We will be setting the value number of lhs to the value number
2932 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2933 So first simplify and lookup this expression to see if it
2934 is already available. */
2935 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
ecb4e37b
RG
2936 if ((CONVERT_EXPR_P (val)
2937 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2938 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3d45dd59 2939 {
a5eaec42 2940 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
ecb4e37b
RG
2941 if ((CONVERT_EXPR_P (tem)
2942 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
9bacafeb
PB
2943 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2944 TREE_TYPE (val), tem)))
3d45dd59
RG
2945 val = tem;
2946 }
2947 result = val;
2948 if (!is_gimple_min_invariant (val)
2949 && TREE_CODE (val) != SSA_NAME)
c9145754 2950 result = vn_nary_op_lookup (val, NULL);
3d45dd59
RG
2951 /* If the expression is not yet available, value-number lhs to
2952 a new SSA_NAME we create. */
70f34814 2953 if (!result)
3d45dd59 2954 {
70b5e7dc
RG
2955 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2956 "vntemp");
3d45dd59
RG
2957 /* Initialize value-number information properly. */
2958 VN_INFO_GET (result)->valnum = result;
726a989a 2959 VN_INFO (result)->value_id = get_next_value_id ();
3d45dd59 2960 VN_INFO (result)->expr = val;
26fa9076 2961 VN_INFO (result)->has_constants = expr_has_constants (val);
3d45dd59
RG
2962 VN_INFO (result)->needs_insertion = true;
2963 /* As all "inserted" statements are singleton SCCs, insert
2964 to the valid table. This is strictly needed to
2965 avoid re-generating new value SSA_NAMEs for the same
2966 expression during SCC iteration over and over (the
2967 optimistic table gets cleared after each iteration).
2968 We do not need to insert into the optimistic table, as
2969 lookups there will fall back to the valid table. */
2970 if (current_info == optimistic_info)
2971 {
2972 current_info = valid_info;
2973 vn_nary_op_insert (val, result);
2974 current_info = optimistic_info;
2975 }
2976 else
2977 vn_nary_op_insert (val, result);
2978 if (dump_file && (dump_flags & TDF_DETAILS))
2979 {
2980 fprintf (dump_file, "Inserting name ");
2981 print_generic_expr (dump_file, result, 0);
2982 fprintf (dump_file, " for expression ");
2983 print_generic_expr (dump_file, val, 0);
2984 fprintf (dump_file, "\n");
2985 }
2986 }
2987 }
2988
89fb70a3
DB
2989 if (result)
2990 {
2991 changed = set_ssa_val_to (lhs, result);
b80280f2
RG
2992 if (TREE_CODE (result) == SSA_NAME
2993 && VN_INFO (result)->has_constants)
2994 {
2995 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2996 VN_INFO (lhs)->has_constants = true;
2997 }
89fb70a3
DB
2998 }
2999 else
3000 {
3001 changed = set_ssa_val_to (lhs, lhs);
4ec0a198 3002 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
89fb70a3
DB
3003 }
3004
3005 return changed;
3006}
3007
3008
3009/* Visit a store to a reference operator LHS, part of STMT, value number it,
3010 and return true if the value number of the LHS has changed as a result. */
3011
3012static bool
726a989a 3013visit_reference_op_store (tree lhs, tree op, gimple stmt)
89fb70a3
DB
3014{
3015 bool changed = false;
4ec0a198
TV
3016 vn_reference_t vnresult = NULL;
3017 tree result, assign;
89fb70a3 3018 bool resultsame = false;
4ec0a198
TV
3019 tree vuse = gimple_vuse (stmt);
3020 tree vdef = gimple_vdef (stmt);
89fb70a3
DB
3021
3022 /* First we want to lookup using the *vuses* from the store and see
3023 if there the last store to this location with the same address
3024 had the same value.
3025
3026 The vuses represent the memory state before the store. If the
3027 memory state, address, and value of the store is the same as the
3028 last store to this location, then this store will produce the
3029 same memory state as that store.
3030
3031 In this case the vdef versions for this store are value numbered to those
3032 vuse versions, since they represent the same memory state after
3033 this store.
3034
3035 Otherwise, the vdefs for the store are used when inserting into
3036 the table, since the store generates a new memory state. */
3037
4ec0a198 3038 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
89fb70a3
DB
3039
3040 if (result)
3041 {
3042 if (TREE_CODE (result) == SSA_NAME)
3043 result = SSA_VAL (result);
5be891a4
RG
3044 if (TREE_CODE (op) == SSA_NAME)
3045 op = SSA_VAL (op);
89fb70a3
DB
3046 resultsame = expressions_equal_p (result, op);
3047 }
3048
3049 if (!result || !resultsame)
3050 {
4ec0a198
TV
3051 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3052 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3053 if (vnresult)
3054 {
3055 VN_INFO (vdef)->use_processed = true;
3056 return set_ssa_val_to (vdef, vnresult->result_vdef);
3057 }
3058 }
89fb70a3 3059
4ec0a198
TV
3060 if (!result || !resultsame)
3061 {
89fb70a3
DB
3062 if (dump_file && (dump_flags & TDF_DETAILS))
3063 {
3064 fprintf (dump_file, "No store match\n");
3065 fprintf (dump_file, "Value numbering store ");
3066 print_generic_expr (dump_file, lhs, 0);
3067 fprintf (dump_file, " to ");
3068 print_generic_expr (dump_file, op, 0);
3069 fprintf (dump_file, "\n");
3070 }
3071 /* Have to set value numbers before insert, since insert is
3072 going to valueize the references in-place. */
4ec0a198 3073 if (vdef)
89fb70a3 3074 {
89fb70a3
DB
3075 changed |= set_ssa_val_to (vdef, vdef);
3076 }
3077
9a327766
RG
3078 /* Do not insert structure copies into the tables. */
3079 if (is_gimple_min_invariant (op)
3080 || is_gimple_reg (op))
4ec0a198
TV
3081 vn_reference_insert (lhs, op, vdef, NULL);
3082
3083 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3084 vn_reference_insert (assign, lhs, vuse, vdef);
89fb70a3
DB
3085 }
3086 else
3087 {
5006671f
RG
3088 /* We had a match, so value number the vdef to have the value
3089 number of the vuse it came from. */
89fb70a3
DB
3090
3091 if (dump_file && (dump_flags & TDF_DETAILS))
3092 fprintf (dump_file, "Store matched earlier value,"
3093 "value numbering store vdefs to matching vuses.\n");
3094
4ec0a198 3095 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
89fb70a3
DB
3096 }
3097
3098 return changed;
3099}
3100
3101/* Visit and value number PHI, return true if the value number
3102 changed. */
3103
3104static bool
726a989a 3105visit_phi (gimple phi)
89fb70a3
DB
3106{
3107 bool changed = false;
3108 tree result;
3109 tree sameval = VN_TOP;
3110 bool allsame = true;
89fb70a3 3111
62b0d9ec
DJ
3112 /* TODO: We could check for this in init_sccvn, and replace this
3113 with a gcc_assert. */
3114 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3115 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3116
89fb70a3
DB
3117 /* See if all non-TOP arguments have the same value. TOP is
3118 equivalent to everything, so we can ignore it. */
a764d660
RB
3119 edge_iterator ei;
3120 edge e;
3121 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3122 if (e->flags & EDGE_EXECUTABLE)
3123 {
3124 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
89fb70a3 3125
a764d660
RB
3126 if (TREE_CODE (def) == SSA_NAME)
3127 def = SSA_VAL (def);
3128 if (def == VN_TOP)
3129 continue;
3130 if (sameval == VN_TOP)
3131 {
3132 sameval = def;
3133 }
3134 else
3135 {
3136 if (!expressions_equal_p (def, sameval))
3137 {
3138 allsame = false;
3139 break;
3140 }
3141 }
3142 }
89fb70a3
DB
3143
3144 /* If all value numbered to the same value, the phi node has that
3145 value. */
3146 if (allsame)
c1604254 3147 return set_ssa_val_to (PHI_RESULT (phi), sameval);
89fb70a3
DB
3148
3149 /* Otherwise, see if it is equivalent to a phi node in this block. */
3150 result = vn_phi_lookup (phi);
3151 if (result)
c1604254 3152 changed = set_ssa_val_to (PHI_RESULT (phi), result);
89fb70a3
DB
3153 else
3154 {
3155 vn_phi_insert (phi, PHI_RESULT (phi));
3156 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3157 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3158 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3159 }
3160
3161 return changed;
3162}
3163
3164/* Return true if EXPR contains constants. */
3165
3166static bool
3167expr_has_constants (tree expr)
3168{
3169 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3170 {
3171 case tcc_unary:
3172 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3173
3174 case tcc_binary:
3175 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3176 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3177 /* Constants inside reference ops are rarely interesting, but
3178 it can take a lot of looking to find them. */
3179 case tcc_reference:
e9bd9cf3 3180 case tcc_declaration:
89fb70a3
DB
3181 return false;
3182 default:
3183 return is_gimple_min_invariant (expr);
3184 }
3185 return false;
3186}
3187
726a989a
RB
3188/* Return true if STMT contains constants. */
3189
3190static bool
3191stmt_has_constants (gimple stmt)
3192{
0d5a1b56
RB
3193 tree tem;
3194
726a989a
RB
3195 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3196 return false;
3197
3198 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3199 {
0d5a1b56
RB
3200 case GIMPLE_TERNARY_RHS:
3201 tem = gimple_assign_rhs3 (stmt);
3202 if (TREE_CODE (tem) == SSA_NAME)
3203 tem = SSA_VAL (tem);
3204 if (is_gimple_min_invariant (tem))
3205 return true;
3206 /* Fallthru. */
726a989a
RB
3207
3208 case GIMPLE_BINARY_RHS:
0d5a1b56
RB
3209 tem = gimple_assign_rhs2 (stmt);
3210 if (TREE_CODE (tem) == SSA_NAME)
3211 tem = SSA_VAL (tem);
3212 if (is_gimple_min_invariant (tem))
3213 return true;
3214 /* Fallthru. */
3215
726a989a
RB
3216 case GIMPLE_SINGLE_RHS:
3217 /* Constants inside reference ops are rarely interesting, but
3218 it can take a lot of looking to find them. */
0d5a1b56
RB
3219 case GIMPLE_UNARY_RHS:
3220 tem = gimple_assign_rhs1 (stmt);
3221 if (TREE_CODE (tem) == SSA_NAME)
3222 tem = SSA_VAL (tem);
3223 return is_gimple_min_invariant (tem);
3224
726a989a
RB
3225 default:
3226 gcc_unreachable ();
3227 }
3228 return false;
3229}
3230
89fb70a3
DB
3231/* Simplify the binary expression RHS, and return the result if
3232 simplified. */
3233
3234static tree
726a989a 3235simplify_binary_expression (gimple stmt)
89fb70a3
DB
3236{
3237 tree result = NULL_TREE;
726a989a
RB
3238 tree op0 = gimple_assign_rhs1 (stmt);
3239 tree op1 = gimple_assign_rhs2 (stmt);
1a60c352 3240 enum tree_code code = gimple_assign_rhs_code (stmt);
89fb70a3
DB
3241
3242 /* This will not catch every single case we could combine, but will
3243 catch those with constants. The goal here is to simultaneously
3244 combine constants between expressions, but avoid infinite
3245 expansion of expressions during simplification. */
c1604254
RB
3246 op0 = vn_valueize (op0);
3247 if (TREE_CODE (op0) == SSA_NAME
3248 && (VN_INFO (op0)->has_constants
1a60c352 3249 || TREE_CODE_CLASS (code) == tcc_comparison
c1604254
RB
3250 || code == COMPLEX_EXPR))
3251 op0 = vn_get_expr_for (op0);
89fb70a3 3252
c1604254
RB
3253 op1 = vn_valueize (op1);
3254 if (TREE_CODE (op1) == SSA_NAME
3255 && (VN_INFO (op1)->has_constants
3256 || code == COMPLEX_EXPR))
3257 op1 = vn_get_expr_for (op1);
c2979eaf 3258
cfef45c8
RG
3259 /* Pointer plus constant can be represented as invariant address.
3260 Do so to allow further propatation, see also tree forwprop. */
1a60c352 3261 if (code == POINTER_PLUS_EXPR
cc269bb6 3262 && tree_fits_uhwi_p (op1)
cfef45c8
RG
3263 && TREE_CODE (op0) == ADDR_EXPR
3264 && is_gimple_min_invariant (op0))
3265 return build_invariant_address (TREE_TYPE (op0),
3266 TREE_OPERAND (op0, 0),
eb1ce453 3267 tree_to_uhwi (op1));
cfef45c8 3268
eb2c3940 3269 /* Avoid folding if nothing changed. */
726a989a
RB
3270 if (op0 == gimple_assign_rhs1 (stmt)
3271 && op1 == gimple_assign_rhs2 (stmt))
eb2c3940
RG
3272 return NULL_TREE;
3273
e233ac97
ILT
3274 fold_defer_overflow_warnings ();
3275
1a60c352 3276 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
8495c94f
RG
3277 if (result)
3278 STRIP_USELESS_TYPE_CONVERSION (result);
89fb70a3 3279
726a989a 3280 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
e233ac97
ILT
3281 stmt, 0);
3282
6ed3da00 3283 /* Make sure result is not a complex expression consisting
89fb70a3
DB
3284 of operators of operators (IE (a + b) + (a + c))
3285 Otherwise, we will end up with unbounded expressions if
3286 fold does anything at all. */
726a989a 3287 if (result && valid_gimple_rhs_p (result))
c2979eaf
EB
3288 return result;
3289
89fb70a3
DB
3290 return NULL_TREE;
3291}
3292
eb2c3940
RG
3293/* Simplify the unary expression RHS, and return the result if
3294 simplified. */
3295
3296static tree
726a989a 3297simplify_unary_expression (gimple stmt)
eb2c3940
RG
3298{
3299 tree result = NULL_TREE;
726a989a 3300 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
1a60c352 3301 enum tree_code code = gimple_assign_rhs_code (stmt);
726a989a
RB
3302
3303 /* We handle some tcc_reference codes here that are all
3304 GIMPLE_ASSIGN_SINGLE codes. */
1a60c352
RG
3305 if (code == REALPART_EXPR
3306 || code == IMAGPART_EXPR
18474649
RG
3307 || code == VIEW_CONVERT_EXPR
3308 || code == BIT_FIELD_REF)
726a989a 3309 op0 = TREE_OPERAND (op0, 0);
eb2c3940 3310
726a989a 3311 orig_op0 = op0;
c1604254
RB
3312 op0 = vn_valueize (op0);
3313 if (TREE_CODE (op0) == SSA_NAME)
eb2c3940 3314 {
c1604254
RB
3315 if (VN_INFO (op0)->has_constants)
3316 op0 = vn_get_expr_for (op0);
3317 else if (CONVERT_EXPR_CODE_P (code)
3318 || code == REALPART_EXPR
3319 || code == IMAGPART_EXPR
3320 || code == VIEW_CONVERT_EXPR
3321 || code == BIT_FIELD_REF)
3322 {
3323 /* We want to do tree-combining on conversion-like expressions.
3324 Make sure we feed only SSA_NAMEs or constants to fold though. */
3325 tree tem = vn_get_expr_for (op0);
3326 if (UNARY_CLASS_P (tem)
3327 || BINARY_CLASS_P (tem)
3328 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3329 || TREE_CODE (tem) == SSA_NAME
3330 || TREE_CODE (tem) == CONSTRUCTOR
3331 || is_gimple_min_invariant (tem))
3332 op0 = tem;
3333 }
eb2c3940
RG
3334 }
3335
3336 /* Avoid folding if nothing changed, but remember the expression. */
726a989a
RB
3337 if (op0 == orig_op0)
3338 return NULL_TREE;
eb2c3940 3339
18474649
RG
3340 if (code == BIT_FIELD_REF)
3341 {
3342 tree rhs = gimple_assign_rhs1 (stmt);
3343 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3344 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3345 }
3346 else
3347 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
eb2c3940
RG
3348 if (result)
3349 {
3350 STRIP_USELESS_TYPE_CONVERSION (result);
726a989a 3351 if (valid_gimple_rhs_p (result))
eb2c3940
RG
3352 return result;
3353 }
3354
726a989a 3355 return NULL_TREE;
eb2c3940
RG
3356}
3357
89fb70a3
DB
3358/* Try to simplify RHS using equivalences and constant folding. */
3359
3360static tree
726a989a 3361try_to_simplify (gimple stmt)
89fb70a3 3362{
d3878abf 3363 enum tree_code code = gimple_assign_rhs_code (stmt);
ed97ddc6
RG
3364 tree tem;
3365
5be891a4
RG
3366 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3367 in this case, there is no point in doing extra work. */
d3878abf 3368 if (code == SSA_NAME)
726a989a 3369 return NULL_TREE;
ed97ddc6 3370
cfef45c8 3371 /* First try constant folding based on our current lattice. */
d3878abf
RG
3372 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3373 if (tem
3374 && (TREE_CODE (tem) == SSA_NAME
3375 || is_gimple_min_invariant (tem)))
cfef45c8
RG
3376 return tem;
3377
3378 /* If that didn't work try combining multiple statements. */
d3878abf 3379 switch (TREE_CODE_CLASS (code))
89fb70a3 3380 {
ed97ddc6 3381 case tcc_reference:
d3878abf
RG
3382 /* Fallthrough for some unary codes that can operate on registers. */
3383 if (!(code == REALPART_EXPR
3384 || code == IMAGPART_EXPR
18474649
RG
3385 || code == VIEW_CONVERT_EXPR
3386 || code == BIT_FIELD_REF))
ed97ddc6
RG
3387 break;
3388 /* We could do a little more with unary ops, if they expand
3389 into binary ops, but it's debatable whether it is worth it. */
3390 case tcc_unary:
726a989a 3391 return simplify_unary_expression (stmt);
cfef45c8 3392
ed97ddc6
RG
3393 case tcc_comparison:
3394 case tcc_binary:
726a989a 3395 return simplify_binary_expression (stmt);
cfef45c8 3396
ed97ddc6
RG
3397 default:
3398 break;
89fb70a3 3399 }
ed97ddc6 3400
726a989a 3401 return NULL_TREE;
89fb70a3
DB
3402}
3403
3404/* Visit and value number USE, return true if the value number
3405 changed. */
3406
3407static bool
3408visit_use (tree use)
3409{
3410 bool changed = false;
726a989a 3411 gimple stmt = SSA_NAME_DEF_STMT (use);
89fb70a3 3412
00115921 3413 mark_use_processed (use);
89fb70a3
DB
3414
3415 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3d45dd59 3416 if (dump_file && (dump_flags & TDF_DETAILS)
726a989a 3417 && !SSA_NAME_IS_DEFAULT_DEF (use))
89fb70a3
DB
3418 {
3419 fprintf (dump_file, "Value numbering ");
3420 print_generic_expr (dump_file, use, 0);
3421 fprintf (dump_file, " stmt = ");
726a989a 3422 print_gimple_stmt (dump_file, stmt, 0, 0);
89fb70a3
DB
3423 }
3424
89fb70a3 3425 /* Handle uninitialized uses. */
726a989a
RB
3426 if (SSA_NAME_IS_DEFAULT_DEF (use))
3427 changed = set_ssa_val_to (use, use);
89fb70a3
DB
3428 else
3429 {
726a989a
RB
3430 if (gimple_code (stmt) == GIMPLE_PHI)
3431 changed = visit_phi (stmt);
00115921 3432 else if (gimple_has_volatile_ops (stmt))
726a989a
RB
3433 changed = defs_to_varying (stmt);
3434 else if (is_gimple_assign (stmt))
89fb70a3 3435 {
32dba5ef 3436 enum tree_code code = gimple_assign_rhs_code (stmt);
726a989a 3437 tree lhs = gimple_assign_lhs (stmt);
32dba5ef 3438 tree rhs1 = gimple_assign_rhs1 (stmt);
89fb70a3
DB
3439 tree simplified;
3440
8b0a5125
DB
3441 /* Shortcut for copies. Simplifying copies is pointless,
3442 since we copy the expression and value they represent. */
32dba5ef 3443 if (code == SSA_NAME
726a989a 3444 && TREE_CODE (lhs) == SSA_NAME)
8b0a5125 3445 {
32dba5ef 3446 changed = visit_copy (lhs, rhs1);
8b0a5125
DB
3447 goto done;
3448 }
726a989a
RB
3449 simplified = try_to_simplify (stmt);
3450 if (simplified)
89fb70a3
DB
3451 {
3452 if (dump_file && (dump_flags & TDF_DETAILS))
3453 {
3454 fprintf (dump_file, "RHS ");
726a989a 3455 print_gimple_expr (dump_file, stmt, 0, 0);
89fb70a3
DB
3456 fprintf (dump_file, " simplified to ");
3457 print_generic_expr (dump_file, simplified, 0);
3458 if (TREE_CODE (lhs) == SSA_NAME)
3459 fprintf (dump_file, " has constants %d\n",
896c8b96 3460 expr_has_constants (simplified));
89fb70a3
DB
3461 else
3462 fprintf (dump_file, "\n");
89fb70a3
DB
3463 }
3464 }
3465 /* Setting value numbers to constants will occasionally
3466 screw up phi congruence because constants are not
3467 uniquely associated with a single ssa name that can be
3468 looked up. */
726a989a
RB
3469 if (simplified
3470 && is_gimple_min_invariant (simplified)
3471 && TREE_CODE (lhs) == SSA_NAME)
89fb70a3
DB
3472 {
3473 VN_INFO (lhs)->expr = simplified;
3474 VN_INFO (lhs)->has_constants = true;
3475 changed = set_ssa_val_to (lhs, simplified);
3476 goto done;
3477 }
726a989a
RB
3478 else if (simplified
3479 && TREE_CODE (simplified) == SSA_NAME
89fb70a3
DB
3480 && TREE_CODE (lhs) == SSA_NAME)
3481 {
3482 changed = visit_copy (lhs, simplified);
3483 goto done;
3484 }
3485 else if (simplified)
3486 {
3487 if (TREE_CODE (lhs) == SSA_NAME)
3488 {
3489 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3490 /* We have to unshare the expression or else
3491 valuizing may change the IL stream. */
3492 VN_INFO (lhs)->expr = unshare_expr (simplified);
3493 }
89fb70a3 3494 }
726a989a
RB
3495 else if (stmt_has_constants (stmt)
3496 && TREE_CODE (lhs) == SSA_NAME)
3497 VN_INFO (lhs)->has_constants = true;
89fb70a3
DB
3498 else if (TREE_CODE (lhs) == SSA_NAME)
3499 {
3500 /* We reset expr and constantness here because we may
3501 have been value numbering optimistically, and
3502 iterating. They may become non-constant in this case,
3503 even if they were optimistically constant. */
070b797d 3504
89fb70a3 3505 VN_INFO (lhs)->has_constants = false;
726a989a 3506 VN_INFO (lhs)->expr = NULL_TREE;
89fb70a3
DB
3507 }
3508
b505e785
RG
3509 if ((TREE_CODE (lhs) == SSA_NAME
3510 /* We can substitute SSA_NAMEs that are live over
3511 abnormal edges with their constant value. */
3512 && !(gimple_assign_copy_p (stmt)
32dba5ef 3513 && is_gimple_min_invariant (rhs1))
b505e785
RG
3514 && !(simplified
3515 && is_gimple_min_invariant (simplified))
3516 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3517 /* Stores or copies from SSA_NAMEs that are live over
3518 abnormal edges are a problem. */
32dba5ef
RG
3519 || (code == SSA_NAME
3520 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
89fb70a3 3521 changed = defs_to_varying (stmt);
32dba5ef
RG
3522 else if (REFERENCE_CLASS_P (lhs)
3523 || DECL_P (lhs))
3524 changed = visit_reference_op_store (lhs, rhs1, stmt);
89fb70a3
DB
3525 else if (TREE_CODE (lhs) == SSA_NAME)
3526 {
726a989a 3527 if ((gimple_assign_copy_p (stmt)
32dba5ef 3528 && is_gimple_min_invariant (rhs1))
726a989a
RB
3529 || (simplified
3530 && is_gimple_min_invariant (simplified)))
89fb70a3
DB
3531 {
3532 VN_INFO (lhs)->has_constants = true;
726a989a
RB
3533 if (simplified)
3534 changed = set_ssa_val_to (lhs, simplified);
3535 else
32dba5ef 3536 changed = set_ssa_val_to (lhs, rhs1);
89fb70a3 3537 }
89fb70a3
DB
3538 else
3539 {
5630e3e1
JL
3540 /* First try to lookup the simplified expression. */
3541 if (simplified)
3542 {
3543 enum gimple_rhs_class rhs_class;
3544
3545
3546 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3547 if ((rhs_class == GIMPLE_UNARY_RHS
3548 || rhs_class == GIMPLE_BINARY_RHS
3549 || rhs_class == GIMPLE_TERNARY_RHS)
3550 && valid_gimple_rhs_p (simplified))
3551 {
3552 tree result = vn_nary_op_lookup (simplified, NULL);
3553 if (result)
3554 {
3555 changed = set_ssa_val_to (lhs, result);
3556 goto done;
3557 }
3558 }
3559 }
3560
3561 /* Otherwise visit the original statement. */
17742d62 3562 switch (vn_get_stmt_kind (stmt))
89fb70a3 3563 {
17742d62 3564 case VN_NARY:
2262707f 3565 changed = visit_nary_op (lhs, stmt);
89fb70a3 3566 break;
17742d62
RG
3567 case VN_REFERENCE:
3568 changed = visit_reference_op_load (lhs, rhs1, stmt);
726a989a 3569 break;
89fb70a3
DB
3570 default:
3571 changed = defs_to_varying (stmt);
3572 break;
3573 }
3574 }
3575 }
3576 else
3577 changed = defs_to_varying (stmt);
3578 }
726a989a
RB
3579 else if (is_gimple_call (stmt))
3580 {
3581 tree lhs = gimple_call_lhs (stmt);
00115921 3582 if (lhs && TREE_CODE (lhs) == SSA_NAME)
726a989a 3583 {
a27c3860
RB
3584 /* Try constant folding based on our current lattice. */
3585 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3586 vn_valueize);
3587 if (simplified)
3588 {
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3590 {
3591 fprintf (dump_file, "call ");
3592 print_gimple_expr (dump_file, stmt, 0, 0);
3593 fprintf (dump_file, " simplified to ");
3594 print_generic_expr (dump_file, simplified, 0);
3595 if (TREE_CODE (lhs) == SSA_NAME)
3596 fprintf (dump_file, " has constants %d\n",
3597 expr_has_constants (simplified));
3598 else
3599 fprintf (dump_file, "\n");
3600 }
3601 }
3602 /* Setting value numbers to constants will occasionally
3603 screw up phi congruence because constants are not
3604 uniquely associated with a single ssa name that can be
3605 looked up. */
3606 if (simplified
3607 && is_gimple_min_invariant (simplified))
00115921 3608 {
a27c3860
RB
3609 VN_INFO (lhs)->expr = simplified;
3610 VN_INFO (lhs)->has_constants = true;
3611 changed = set_ssa_val_to (lhs, simplified);
3612 if (gimple_vdef (stmt))
3613 changed |= set_ssa_val_to (gimple_vdef (stmt),
3614 gimple_vuse (stmt));
3615 goto done;
00115921 3616 }
a27c3860
RB
3617 else if (simplified
3618 && TREE_CODE (simplified) == SSA_NAME)
00115921 3619 {
a27c3860
RB
3620 changed = visit_copy (lhs, simplified);
3621 if (gimple_vdef (stmt))
3622 changed |= set_ssa_val_to (gimple_vdef (stmt),
3623 gimple_vuse (stmt));
00115921
TV
3624 goto done;
3625 }
a27c3860
RB
3626 else
3627 {
3628 if (stmt_has_constants (stmt))
3629 VN_INFO (lhs)->has_constants = true;
3630 else
3631 {
3632 /* We reset expr and constantness here because we may
3633 have been value numbering optimistically, and
3634 iterating. They may become non-constant in this case,
3635 even if they were optimistically constant. */
3636 VN_INFO (lhs)->has_constants = false;
3637 VN_INFO (lhs)->expr = NULL_TREE;
3638 }
3639
3640 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3641 {
3642 changed = defs_to_varying (stmt);
3643 goto done;
3644 }
3645 }
726a989a
RB
3646 }
3647
00115921 3648 if (!gimple_call_internal_p (stmt)
6867d9a9
TV
3649 && (/* Calls to the same function with the same vuse
3650 and the same operands do not necessarily return the same
3651 value, unless they're pure or const. */
3652 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3653 /* If calls have a vdef, subsequent calls won't have
3654 the same incoming vuse. So, if 2 calls with vdef have the
3655 same vuse, we know they're not subsequent.
3656 We can value number 2 calls to the same function with the
3657 same vuse and the same operands which are not subsequent
3658 the same, because there is no code in the program that can
92a8d7a7
RB
3659 compare the 2 values... */
3660 || (gimple_vdef (stmt)
3661 /* ... unless the call returns a pointer which does
3662 not alias with anything else. In which case the
3663 information that the values are distinct are encoded
3664 in the IL. */
3665 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
6867d9a9 3666 changed = visit_reference_op_call (lhs, stmt);
726a989a
RB
3667 else
3668 changed = defs_to_varying (stmt);
3669 }
00115921
TV
3670 else
3671 changed = defs_to_varying (stmt);
89fb70a3
DB
3672 }
3673 done:
3674 return changed;
3675}
3676
3677/* Compare two operands by reverse postorder index */
3678
3679static int
3680compare_ops (const void *pa, const void *pb)
3681{
3682 const tree opa = *((const tree *)pa);
3683 const tree opb = *((const tree *)pb);
726a989a
RB
3684 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3685 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
89fb70a3
DB
3686 basic_block bba;
3687 basic_block bbb;
3688
726a989a 3689 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3d8fa148 3690 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
726a989a 3691 else if (gimple_nop_p (opstmta))
89fb70a3 3692 return -1;
726a989a 3693 else if (gimple_nop_p (opstmtb))
89fb70a3
DB
3694 return 1;
3695
726a989a
RB
3696 bba = gimple_bb (opstmta);
3697 bbb = gimple_bb (opstmtb);
89fb70a3
DB
3698
3699 if (!bba && !bbb)
3d8fa148 3700 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
89fb70a3
DB
3701 else if (!bba)
3702 return -1;
3703 else if (!bbb)
3704 return 1;
3705
3706 if (bba == bbb)
3707 {
726a989a
RB
3708 if (gimple_code (opstmta) == GIMPLE_PHI
3709 && gimple_code (opstmtb) == GIMPLE_PHI)
3d8fa148 3710 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
726a989a 3711 else if (gimple_code (opstmta) == GIMPLE_PHI)
89fb70a3 3712 return -1;
726a989a 3713 else if (gimple_code (opstmtb) == GIMPLE_PHI)
89fb70a3 3714 return 1;
3d8fa148
DK
3715 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3716 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3717 else
3718 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
89fb70a3
DB
3719 }
3720 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3721}
3722
3723/* Sort an array containing members of a strongly connected component
3724 SCC so that the members are ordered by RPO number.
3725 This means that when the sort is complete, iterating through the
3726 array will give you the members in RPO order. */
3727
3728static void
9771b263 3729sort_scc (vec<tree> scc)
89fb70a3 3730{
9771b263 3731 scc.qsort (compare_ops);
89fb70a3
DB
3732}
3733
880ad25f 3734/* Insert the no longer used nary ONARY to the hash INFO. */
c82e0b3b 3735
880ad25f
RG
3736static void
3737copy_nary (vn_nary_op_t onary, vn_tables_t info)
c82e0b3b 3738{
9ad6bebe
NF
3739 size_t size = sizeof_vn_nary_op (onary->length);
3740 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3741 &info->nary_obstack);
c82e0b3b 3742 memcpy (nary, onary, size);
9ad6bebe 3743 vn_nary_op_insert_into (nary, info->nary, false);
c82e0b3b
RG
3744}
3745
880ad25f 3746/* Insert the no longer used phi OPHI to the hash INFO. */
c82e0b3b 3747
880ad25f
RG
3748static void
3749copy_phi (vn_phi_t ophi, vn_tables_t info)
c82e0b3b 3750{
880ad25f 3751 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
bf190e8d 3752 vn_phi_s **slot;
c82e0b3b 3753 memcpy (phi, ophi, sizeof (*phi));
9771b263 3754 ophi->phiargs.create (0);
c203e8a7 3755 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
880ad25f 3756 gcc_assert (!*slot);
c82e0b3b 3757 *slot = phi;
c82e0b3b
RG
3758}
3759
880ad25f 3760/* Insert the no longer used reference OREF to the hash INFO. */
c82e0b3b 3761
880ad25f
RG
3762static void
3763copy_reference (vn_reference_t oref, vn_tables_t info)
c82e0b3b 3764{
c82e0b3b 3765 vn_reference_t ref;
bf190e8d 3766 vn_reference_s **slot;
880ad25f 3767 ref = (vn_reference_t) pool_alloc (info->references_pool);
c82e0b3b 3768 memcpy (ref, oref, sizeof (*ref));
9771b263 3769 oref->operands.create (0);
c203e8a7 3770 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
c82e0b3b
RG
3771 if (*slot)
3772 free_reference (*slot);
3773 *slot = ref;
c82e0b3b
RG
3774}
3775
89fb70a3
DB
3776/* Process a strongly connected component in the SSA graph. */
3777
3778static void
9771b263 3779process_scc (vec<tree> scc)
89fb70a3 3780{
880ad25f
RG
3781 tree var;
3782 unsigned int i;
3783 unsigned int iterations = 0;
3784 bool changed = true;
bf190e8d
LC
3785 vn_nary_op_iterator_type hin;
3786 vn_phi_iterator_type hip;
3787 vn_reference_iterator_type hir;
880ad25f
RG
3788 vn_nary_op_t nary;
3789 vn_phi_t phi;
3790 vn_reference_t ref;
89fb70a3 3791
880ad25f 3792 /* If the SCC has a single member, just visit it. */
9771b263 3793 if (scc.length () == 1)
89fb70a3 3794 {
9771b263 3795 tree use = scc[0];
72a07d9b
RB
3796 if (VN_INFO (use)->use_processed)
3797 return;
3798 /* We need to make sure it doesn't form a cycle itself, which can
3799 happen for self-referential PHI nodes. In that case we would
3800 end up inserting an expression with VN_TOP operands into the
3801 valid table which makes us derive bogus equivalences later.
3802 The cheapest way to check this is to assume it for all PHI nodes. */
3803 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3804 /* Fallthru to iteration. */ ;
3805 else
3806 {
3807 visit_use (use);
3808 return;
3809 }
89fb70a3 3810 }
880ad25f 3811
b2b222b3
RB
3812 if (dump_file && (dump_flags & TDF_DETAILS))
3813 print_scc (dump_file, scc);
3814
880ad25f
RG
3815 /* Iterate over the SCC with the optimistic table until it stops
3816 changing. */
3817 current_info = optimistic_info;
3818 while (changed)
89fb70a3 3819 {
880ad25f
RG
3820 changed = false;
3821 iterations++;
90bc4623
RG
3822 if (dump_file && (dump_flags & TDF_DETAILS))
3823 fprintf (dump_file, "Starting iteration %d\n", iterations);
880ad25f
RG
3824 /* As we are value-numbering optimistically we have to
3825 clear the expression tables and the simplified expressions
3826 in each iteration until we converge. */
c203e8a7
TS
3827 optimistic_info->nary->empty ();
3828 optimistic_info->phis->empty ();
3829 optimistic_info->references->empty ();
880ad25f
RG
3830 obstack_free (&optimistic_info->nary_obstack, NULL);
3831 gcc_obstack_init (&optimistic_info->nary_obstack);
3832 empty_alloc_pool (optimistic_info->phis_pool);
3833 empty_alloc_pool (optimistic_info->references_pool);
9771b263 3834 FOR_EACH_VEC_ELT (scc, i, var)
880ad25f 3835 VN_INFO (var)->expr = NULL_TREE;
9771b263 3836 FOR_EACH_VEC_ELT (scc, i, var)
880ad25f
RG
3837 changed |= visit_use (var);
3838 }
89fb70a3 3839
b2b222b3
RB
3840 if (dump_file && (dump_flags & TDF_DETAILS))
3841 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
880ad25f 3842 statistics_histogram_event (cfun, "SCC iterations", iterations);
89fb70a3 3843
880ad25f
RG
3844 /* Finally, copy the contents of the no longer used optimistic
3845 table to the valid table. */
c203e8a7 3846 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
880ad25f 3847 copy_nary (nary, valid_info);
c203e8a7 3848 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
880ad25f 3849 copy_phi (phi, valid_info);
c203e8a7 3850 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
bf190e8d 3851 ref, vn_reference_t, hir)
880ad25f
RG
3852 copy_reference (ref, valid_info);
3853
3854 current_info = valid_info;
89fb70a3
DB
3855}
3856
6be34936
RG
3857
3858/* Pop the components of the found SCC for NAME off the SCC stack
3859 and process them. Returns true if all went well, false if
3860 we run into resource limits. */
3861
3862static bool
3863extract_and_process_scc_for_name (tree name)
3864{
ef062b13 3865 auto_vec<tree> scc;
6be34936
RG
3866 tree x;
3867
3868 /* Found an SCC, pop the components off the SCC stack and
3869 process them. */
3870 do
3871 {
9771b263 3872 x = sccstack.pop ();
6be34936
RG
3873
3874 VN_INFO (x)->on_sccstack = false;
9771b263 3875 scc.safe_push (x);
6be34936
RG
3876 } while (x != name);
3877
3878 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
9771b263 3879 if (scc.length ()
6be34936
RG
3880 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3881 {
3882 if (dump_file)
3883 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
9771b263 3884 "SCC size %u exceeding %u\n", scc.length (),
6be34936 3885 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
f5843d08 3886
6be34936
RG
3887 return false;
3888 }
3889
9771b263 3890 if (scc.length () > 1)
6be34936
RG
3891 sort_scc (scc);
3892
6be34936
RG
3893 process_scc (scc);
3894
6be34936
RG
3895 return true;
3896}
3897
89fb70a3
DB
3898/* Depth first search on NAME to discover and process SCC's in the SSA
3899 graph.
3900 Execution of this algorithm relies on the fact that the SCC's are
863d2a57
RG
3901 popped off the stack in topological order.
3902 Returns true if successful, false if we stopped processing SCC's due
fa10beec 3903 to resource constraints. */
89fb70a3 3904
863d2a57 3905static bool
89fb70a3
DB
3906DFS (tree name)
3907{
6e1aa848
DN
3908 vec<ssa_op_iter> itervec = vNULL;
3909 vec<tree> namevec = vNULL;
6be34936 3910 use_operand_p usep = NULL;
726a989a
RB
3911 gimple defstmt;
3912 tree use;
89fb70a3 3913 ssa_op_iter iter;
89fb70a3 3914
6be34936 3915start_over:
89fb70a3
DB
3916 /* SCC info */
3917 VN_INFO (name)->dfsnum = next_dfs_num++;
3918 VN_INFO (name)->visited = true;
3919 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3920
9771b263 3921 sccstack.safe_push (name);
89fb70a3
DB
3922 VN_INFO (name)->on_sccstack = true;
3923 defstmt = SSA_NAME_DEF_STMT (name);
3924
3925 /* Recursively DFS on our operands, looking for SCC's. */
726a989a 3926 if (!gimple_nop_p (defstmt))
89fb70a3 3927 {
6be34936 3928 /* Push a new iterator. */
726a989a 3929 if (gimple_code (defstmt) == GIMPLE_PHI)
6be34936
RG
3930 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3931 else
3932 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3933 }
3934 else
81f5094d 3935 clear_and_done_ssa_iter (&iter);
6be34936
RG
3936
3937 while (1)
3938 {
3939 /* If we are done processing uses of a name, go up the stack
3940 of iterators and process SCCs as we found them. */
3941 if (op_iter_done (&iter))
89fb70a3 3942 {
6be34936
RG
3943 /* See if we found an SCC. */
3944 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3945 if (!extract_and_process_scc_for_name (name))
3946 {
9771b263
DN
3947 namevec.release ();
3948 itervec.release ();
6be34936
RG
3949 return false;
3950 }
89fb70a3 3951
6be34936 3952 /* Check if we are done. */
9771b263 3953 if (namevec.is_empty ())
6be34936 3954 {
9771b263
DN
3955 namevec.release ();
3956 itervec.release ();
6be34936
RG
3957 return true;
3958 }
3959
3960 /* Restore the last use walker and continue walking there. */
3961 use = name;
9771b263
DN
3962 name = namevec.pop ();
3963 memcpy (&iter, &itervec.last (),
6be34936 3964 sizeof (ssa_op_iter));
9771b263 3965 itervec.pop ();
6be34936
RG
3966 goto continue_walking;
3967 }
89fb70a3 3968
6be34936
RG
3969 use = USE_FROM_PTR (usep);
3970
3971 /* Since we handle phi nodes, we will sometimes get
3972 invariants in the use expression. */
3973 if (TREE_CODE (use) == SSA_NAME)
3974 {
89fb70a3
DB
3975 if (! (VN_INFO (use)->visited))
3976 {
6be34936
RG
3977 /* Recurse by pushing the current use walking state on
3978 the stack and starting over. */
9771b263
DN
3979 itervec.safe_push (iter);
3980 namevec.safe_push (name);
6be34936
RG
3981 name = use;
3982 goto start_over;
3983
3984continue_walking:
89fb70a3
DB
3985 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3986 VN_INFO (use)->low);
3987 }
3988 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3989 && VN_INFO (use)->on_sccstack)
3990 {
3991 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3992 VN_INFO (name)->low);
3993 }
3994 }
863d2a57 3995
6be34936 3996 usep = op_iter_next_use (&iter);
89fb70a3
DB
3997 }
3998}
3999
89fb70a3
DB
4000/* Allocate a value number table. */
4001
4002static void
4003allocate_vn_table (vn_tables_t table)
4004{
c203e8a7
TS
4005 table->phis = new vn_phi_table_type (23);
4006 table->nary = new vn_nary_op_table_type (23);
4007 table->references = new vn_reference_table_type (23);
89fb70a3 4008
49a1fb2d 4009 gcc_obstack_init (&table->nary_obstack);
89fb70a3
DB
4010 table->phis_pool = create_alloc_pool ("VN phis",
4011 sizeof (struct vn_phi_s),
4012 30);
4013 table->references_pool = create_alloc_pool ("VN references",
4014 sizeof (struct vn_reference_s),
4015 30);
4016}
4017
4018/* Free a value number table. */
4019
4020static void
4021free_vn_table (vn_tables_t table)
4022{
c203e8a7
TS
4023 delete table->phis;
4024 table->phis = NULL;
4025 delete table->nary;
4026 table->nary = NULL;
4027 delete table->references;
4028 table->references = NULL;
49a1fb2d 4029 obstack_free (&table->nary_obstack, NULL);
89fb70a3
DB
4030 free_alloc_pool (table->phis_pool);
4031 free_alloc_pool (table->references_pool);
4032}
4033
4034static void
4035init_scc_vn (void)
4036{
4037 size_t i;
4038 int j;
4039 int *rpo_numbers_temp;
89fb70a3
DB
4040
4041 calculate_dominance_info (CDI_DOMINATORS);
9771b263 4042 sccstack.create (0);
c203e8a7 4043 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
b8698a0f 4044
c9145754 4045 constant_value_ids = BITMAP_ALLOC (NULL);
b8698a0f 4046
89fb70a3 4047 next_dfs_num = 1;
c9145754 4048 next_value_id = 1;
b8698a0f 4049
9771b263 4050 vn_ssa_aux_table.create (num_ssa_names + 1);
89fb70a3
DB
4051 /* VEC_alloc doesn't actually grow it to the right size, it just
4052 preallocates the space to do so. */
9771b263 4053 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
cbfb21c1
SB
4054 gcc_obstack_init (&vn_ssa_aux_obstack);
4055
9771b263
DN
4056 shared_lookup_phiargs.create (0);
4057 shared_lookup_references.create (0);
8b1c6fd7 4058 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
0cae8d31
DM
4059 rpo_numbers_temp =
4060 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
89fb70a3
DB
4061 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4062
4063 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4064 the i'th block in RPO order is bb. We want to map bb's to RPO
4065 numbers, so we need to rearrange this array. */
0cae8d31 4066 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
89fb70a3
DB
4067 rpo_numbers[rpo_numbers_temp[j]] = j;
4068
cbfb21c1 4069 XDELETE (rpo_numbers_temp);
89fb70a3
DB
4070
4071 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4072
4073 /* Create the VN_INFO structures, and initialize value numbers to
4074 TOP. */
4075 for (i = 0; i < num_ssa_names; i++)
4076 {
4077 tree name = ssa_name (i);
4078 if (name)
4079 {
4080 VN_INFO_GET (name)->valnum = VN_TOP;
726a989a 4081 VN_INFO (name)->expr = NULL_TREE;
c9145754 4082 VN_INFO (name)->value_id = 0;
89fb70a3
DB
4083 }
4084 }
4085
908ff6a3 4086 renumber_gimple_stmt_uids ();
89fb70a3
DB
4087
4088 /* Create the valid and optimistic value numbering tables. */
4089 valid_info = XCNEW (struct vn_tables_s);
4090 allocate_vn_table (valid_info);
4091 optimistic_info = XCNEW (struct vn_tables_s);
4092 allocate_vn_table (optimistic_info);
89fb70a3
DB
4093}
4094
4095void
4096free_scc_vn (void)
4097{
4098 size_t i;
4099
c203e8a7
TS
4100 delete constant_to_value_id;
4101 constant_to_value_id = NULL;
c9145754 4102 BITMAP_FREE (constant_value_ids);
9771b263
DN
4103 shared_lookup_phiargs.release ();
4104 shared_lookup_references.release ();
89fb70a3 4105 XDELETEVEC (rpo_numbers);
cbfb21c1 4106
89fb70a3
DB
4107 for (i = 0; i < num_ssa_names; i++)
4108 {
4109 tree name = ssa_name (i);
3d45dd59
RG
4110 if (name
4111 && VN_INFO (name)->needs_insertion)
4112 release_ssa_name (name);
89fb70a3 4113 }
cbfb21c1 4114 obstack_free (&vn_ssa_aux_obstack, NULL);
9771b263 4115 vn_ssa_aux_table.release ();
cbfb21c1 4116
9771b263 4117 sccstack.release ();
89fb70a3
DB
4118 free_vn_table (valid_info);
4119 XDELETE (valid_info);
4120 free_vn_table (optimistic_info);
4121 XDELETE (optimistic_info);
89fb70a3
DB
4122}
4123
9ca966ca 4124/* Set *ID according to RESULT. */
9ad6bebe
NF
4125
4126static void
4127set_value_id_for_result (tree result, unsigned int *id)
4128{
9ca966ca
RB
4129 if (result && TREE_CODE (result) == SSA_NAME)
4130 *id = VN_INFO (result)->value_id;
4131 else if (result && is_gimple_min_invariant (result))
4132 *id = get_or_alloc_constant_value_id (result);
4133 else
4134 *id = get_next_value_id ();
9ad6bebe
NF
4135}
4136
caf55296 4137/* Set the value ids in the valid hash tables. */
c9145754
DB
4138
4139static void
4140set_hashtable_value_ids (void)
4141{
bf190e8d
LC
4142 vn_nary_op_iterator_type hin;
4143 vn_phi_iterator_type hip;
4144 vn_reference_iterator_type hir;
c9145754
DB
4145 vn_nary_op_t vno;
4146 vn_reference_t vr;
4147 vn_phi_t vp;
caf55296 4148
c9145754
DB
4149 /* Now set the value ids of the things we had put in the hash
4150 table. */
4151
c203e8a7 4152 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
9ad6bebe 4153 set_value_id_for_result (vno->result, &vno->value_id);
c9145754 4154
c203e8a7 4155 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
9ad6bebe 4156 set_value_id_for_result (vp->result, &vp->value_id);
c9145754 4157
c203e8a7
TS
4158 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4159 hir)
9ad6bebe 4160 set_value_id_for_result (vr->result, &vr->value_id);
c9145754
DB
4161}
4162
a764d660
RB
4163class cond_dom_walker : public dom_walker
4164{
4165public:
4166 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4167
4168 virtual void before_dom_children (basic_block);
4169
4170 bool fail;
4171};
4172
4173void
4174cond_dom_walker::before_dom_children (basic_block bb)
4175{
4176 edge e;
4177 edge_iterator ei;
4178
4179 if (fail)
4180 return;
4181
1d44def2
RB
4182 /* If any of the predecessor edges that do not come from blocks dominated
4183 by us are still marked as possibly executable consider this block
4184 reachable. */
a764d660
RB
4185 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4186 FOR_EACH_EDGE (e, ei, bb->preds)
1d44def2
RB
4187 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4188 reachable |= (e->flags & EDGE_EXECUTABLE);
a764d660
RB
4189
4190 /* If the block is not reachable all outgoing edges are not
4191 executable. */
4192 if (!reachable)
4193 {
4194 if (dump_file && (dump_flags & TDF_DETAILS))
4195 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4196 "BB %d as not executable\n", bb->index);
4197
4198 FOR_EACH_EDGE (e, ei, bb->succs)
4199 e->flags &= ~EDGE_EXECUTABLE;
4200 return;
4201 }
4202
4203 gimple stmt = last_stmt (bb);
4204 if (!stmt)
4205 return;
4206
b2b222b3
RB
4207 enum gimple_code code = gimple_code (stmt);
4208 if (code != GIMPLE_COND
4209 && code != GIMPLE_SWITCH
4210 && code != GIMPLE_GOTO)
4211 return;
4212
4213 if (dump_file && (dump_flags & TDF_DETAILS))
4214 {
4215 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4216 bb->index);
4217 print_gimple_stmt (dump_file, stmt, 0, 0);
4218 }
4219
a764d660
RB
4220 /* Value-number the last stmts SSA uses. */
4221 ssa_op_iter i;
4222 tree op;
4223 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4224 if (VN_INFO (op)->visited == false
4225 && !DFS (op))
4226 {
4227 fail = true;
4228 return;
4229 }
4230
4231 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4232 if value-numbering can prove they are not reachable. Handling
4233 computed gotos is also possible. */
4234 tree val;
b2b222b3 4235 switch (code)
a764d660
RB
4236 {
4237 case GIMPLE_COND:
4238 {
4239 tree lhs = gimple_cond_lhs (stmt);
4240 tree rhs = gimple_cond_rhs (stmt);
4241 /* Work hard in computing the condition and take into account
4242 the valueization of the defining stmt. */
4243 if (TREE_CODE (lhs) == SSA_NAME)
4244 lhs = vn_get_expr_for (lhs);
4245 if (TREE_CODE (rhs) == SSA_NAME)
4246 rhs = vn_get_expr_for (rhs);
4247 val = fold_binary (gimple_cond_code (stmt),
4248 boolean_type_node, lhs, rhs);
4249 break;
4250 }
4251 case GIMPLE_SWITCH:
4252 val = gimple_switch_index (stmt);
4253 break;
4254 case GIMPLE_GOTO:
4255 val = gimple_goto_dest (stmt);
4256 break;
4257 default:
b2b222b3 4258 gcc_unreachable ();
a764d660
RB
4259 }
4260 if (!val)
4261 return;
4262
4263 edge taken = find_taken_edge (bb, vn_valueize (val));
4264 if (!taken)
4265 return;
4266
4267 if (dump_file && (dump_flags & TDF_DETAILS))
4268 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4269 "not executable\n", bb->index, bb->index, taken->dest->index);
4270
4271 FOR_EACH_EDGE (e, ei, bb->succs)
4272 if (e != taken)
4273 e->flags &= ~EDGE_EXECUTABLE;
4274}
4275
863d2a57 4276/* Do SCCVN. Returns true if it finished, false if we bailed out
1ec87690
RG
4277 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4278 how we use the alias oracle walking during the VN process. */
863d2a57
RG
4279
4280bool
1ec87690 4281run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
89fb70a3 4282{
a764d660 4283 basic_block bb;
89fb70a3
DB
4284 size_t i;
4285 tree param;
b8698a0f 4286
1ec87690
RG
4287 default_vn_walk_kind = default_vn_walk_kind_;
4288
89fb70a3
DB
4289 init_scc_vn ();
4290 current_info = valid_info;
4291
4292 for (param = DECL_ARGUMENTS (current_function_decl);
4293 param;
910ad8de 4294 param = DECL_CHAIN (param))
89fb70a3 4295 {
32244553
RG
4296 tree def = ssa_default_def (cfun, param);
4297 if (def)
b2b222b3
RB
4298 {
4299 VN_INFO (def)->visited = true;
4300 VN_INFO (def)->valnum = def;
4301 }
89fb70a3
DB
4302 }
4303
a764d660
RB
4304 /* Mark all edges as possibly executable. */
4305 FOR_ALL_BB_FN (bb, cfun)
4306 {
4307 edge_iterator ei;
4308 edge e;
4309 FOR_EACH_EDGE (e, ei, bb->succs)
4310 e->flags |= EDGE_EXECUTABLE;
4311 }
4312
4313 /* Walk all blocks in dominator order, value-numbering the last stmts
4314 SSA uses and decide whether outgoing edges are not executable. */
4315 cond_dom_walker walker;
4316 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4317 if (walker.fail)
4318 {
4319 free_scc_vn ();
4320 return false;
4321 }
4322
4323 /* Value-number remaining SSA names. */
3d45dd59 4324 for (i = 1; i < num_ssa_names; ++i)
89fb70a3
DB
4325 {
4326 tree name = ssa_name (i);
4327 if (name
4328 && VN_INFO (name)->visited == false
4329 && !has_zero_uses (name))
863d2a57
RG
4330 if (!DFS (name))
4331 {
4332 free_scc_vn ();
4333 return false;
4334 }
89fb70a3
DB
4335 }
4336
c9145754 4337 /* Initialize the value ids. */
b8698a0f 4338
c9145754
DB
4339 for (i = 1; i < num_ssa_names; ++i)
4340 {
4341 tree name = ssa_name (i);
4342 vn_ssa_aux_t info;
4343 if (!name)
4344 continue;
4345 info = VN_INFO (name);
f116fecf
RG
4346 if (info->valnum == name
4347 || info->valnum == VN_TOP)
c9145754
DB
4348 info->value_id = get_next_value_id ();
4349 else if (is_gimple_min_invariant (info->valnum))
4350 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4351 }
b8698a0f 4352
bb35348a
RB
4353 /* Propagate. */
4354 for (i = 1; i < num_ssa_names; ++i)
c9145754 4355 {
bb35348a
RB
4356 tree name = ssa_name (i);
4357 vn_ssa_aux_t info;
4358 if (!name)
4359 continue;
4360 info = VN_INFO (name);
4361 if (TREE_CODE (info->valnum) == SSA_NAME
4362 && info->valnum != name
4363 && info->value_id != VN_INFO (info->valnum)->value_id)
4364 info->value_id = VN_INFO (info->valnum)->value_id;
c9145754 4365 }
b8698a0f 4366
c9145754 4367 set_hashtable_value_ids ();
b8698a0f 4368
89fb70a3
DB
4369 if (dump_file && (dump_flags & TDF_DETAILS))
4370 {
4371 fprintf (dump_file, "Value numbers:\n");
4372 for (i = 0; i < num_ssa_names; i++)
4373 {
4374 tree name = ssa_name (i);
caf55296
RG
4375 if (name
4376 && VN_INFO (name)->visited
4377 && SSA_VAL (name) != name)
89fb70a3
DB
4378 {
4379 print_generic_expr (dump_file, name, 0);
4380 fprintf (dump_file, " = ");
caf55296 4381 print_generic_expr (dump_file, SSA_VAL (name), 0);
89fb70a3
DB
4382 fprintf (dump_file, "\n");
4383 }
4384 }
4385 }
863d2a57
RG
4386
4387 return true;
89fb70a3 4388}
c9145754
DB
4389
4390/* Return the maximum value id we have ever seen. */
4391
4392unsigned int
b8698a0f 4393get_max_value_id (void)
c9145754
DB
4394{
4395 return next_value_id;
4396}
4397
4398/* Return the next unique value id. */
4399
4400unsigned int
4401get_next_value_id (void)
4402{
4403 return next_value_id++;
4404}
4405
4406
330e765e 4407/* Compare two expressions E1 and E2 and return true if they are equal. */
c9145754
DB
4408
4409bool
4410expressions_equal_p (tree e1, tree e2)
4411{
330e765e 4412 /* The obvious case. */
c9145754
DB
4413 if (e1 == e2)
4414 return true;
4415
330e765e
EB
4416 /* If only one of them is null, they cannot be equal. */
4417 if (!e1 || !e2)
4418 return false;
4419
330e765e
EB
4420 /* Now perform the actual comparison. */
4421 if (TREE_CODE (e1) == TREE_CODE (e2)
4422 && operand_equal_p (e1, e2, OEP_PURE_SAME))
c9145754
DB
4423 return true;
4424
4425 return false;
4426}
4427
890065bf
RG
4428
4429/* Return true if the nary operation NARY may trap. This is a copy
4430 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4431
4432bool
4433vn_nary_may_trap (vn_nary_op_t nary)
4434{
4435 tree type;
6141b7db 4436 tree rhs2 = NULL_TREE;
890065bf
RG
4437 bool honor_nans = false;
4438 bool honor_snans = false;
4439 bool fp_operation = false;
4440 bool honor_trapv = false;
4441 bool handled, ret;
4442 unsigned i;
4443
4444 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4445 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4446 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4447 {
4448 type = nary->type;
4449 fp_operation = FLOAT_TYPE_P (type);
4450 if (fp_operation)
4451 {
4452 honor_nans = flag_trapping_math && !flag_finite_math_only;
4453 honor_snans = flag_signaling_nans != 0;
4454 }
4455 else if (INTEGRAL_TYPE_P (type)
4456 && TYPE_OVERFLOW_TRAPS (type))
4457 honor_trapv = true;
4458 }
6141b7db
RG
4459 if (nary->length >= 2)
4460 rhs2 = nary->op[1];
890065bf
RG
4461 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4462 honor_trapv,
4463 honor_nans, honor_snans, rhs2,
4464 &handled);
4465 if (handled
4466 && ret)
4467 return true;
4468
4469 for (i = 0; i < nary->length; ++i)
4470 if (tree_could_trap_p (nary->op[i]))
4471 return true;
4472
4473 return false;
4474}
This page took 4.56029 seconds and 5 git commands to generate.