]>
Commit | Line | Data |
---|---|---|
9ae8ffe7 | 1 | /* Alias analysis for GNU C |
9cd9e512 | 2 | Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 |
1afdf91c | 3 | Free Software Foundation, Inc. |
9ae8ffe7 JL |
4 | Contributed by John Carr (jfc@mit.edu). |
5 | ||
1322177d | 6 | This file is part of GCC. |
9ae8ffe7 | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 2, or (at your option) any later | |
11 | version. | |
9ae8ffe7 | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
9ae8ffe7 JL |
17 | |
18 | You should have received a copy of the GNU General Public License | |
1322177d | 19 | along with GCC; see the file COPYING. If not, write to the Free |
366ccddb KC |
20 | Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA |
21 | 02110-1301, USA. */ | |
9ae8ffe7 JL |
22 | |
23 | #include "config.h" | |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
9ae8ffe7 | 27 | #include "rtl.h" |
7790df19 | 28 | #include "tree.h" |
6baf1cc8 | 29 | #include "tm_p.h" |
49ad7cfa | 30 | #include "function.h" |
78528714 JQ |
31 | #include "alias.h" |
32 | #include "emit-rtl.h" | |
9ae8ffe7 JL |
33 | #include "regs.h" |
34 | #include "hard-reg-set.h" | |
e004f2f7 | 35 | #include "basic-block.h" |
9ae8ffe7 | 36 | #include "flags.h" |
264fac34 | 37 | #include "output.h" |
2e107e9e | 38 | #include "toplev.h" |
eab5c70a | 39 | #include "cselib.h" |
3932261a | 40 | #include "splay-tree.h" |
ac606739 | 41 | #include "ggc.h" |
d23c55c2 | 42 | #include "langhooks.h" |
0d446150 | 43 | #include "timevar.h" |
ab780373 | 44 | #include "target.h" |
b255a036 | 45 | #include "cgraph.h" |
9ddb66ca | 46 | #include "varray.h" |
ef330312 | 47 | #include "tree-pass.h" |
ea900239 DB |
48 | #include "ipa-type-escape.h" |
49 | ||
50 | /* The aliasing API provided here solves related but different problems: | |
51 | ||
c22cacf3 | 52 | Say there exists (in c) |
ea900239 DB |
53 | |
54 | struct X { | |
55 | struct Y y1; | |
56 | struct Z z2; | |
57 | } x1, *px1, *px2; | |
58 | ||
59 | struct Y y2, *py; | |
60 | struct Z z2, *pz; | |
61 | ||
62 | ||
63 | py = &px1.y1; | |
64 | px2 = &x1; | |
65 | ||
66 | Consider the four questions: | |
67 | ||
68 | Can a store to x1 interfere with px2->y1? | |
69 | Can a store to x1 interfere with px2->z2? | |
70 | (*px2).z2 | |
71 | Can a store to x1 change the value pointed to by with py? | |
72 | Can a store to x1 change the value pointed to by with pz? | |
73 | ||
74 | The answer to these questions can be yes, yes, yes, and maybe. | |
75 | ||
76 | The first two questions can be answered with a simple examination | |
77 | of the type system. If structure X contains a field of type Y then | |
78 | a store thru a pointer to an X can overwrite any field that is | |
79 | contained (recursively) in an X (unless we know that px1 != px2). | |
80 | ||
81 | The last two of the questions can be solved in the same way as the | |
82 | first two questions but this is too conservative. The observation | |
83 | is that in some cases analysis we can know if which (if any) fields | |
84 | are addressed and if those addresses are used in bad ways. This | |
85 | analysis may be language specific. In C, arbitrary operations may | |
86 | be applied to pointers. However, there is some indication that | |
87 | this may be too conservative for some C++ types. | |
88 | ||
89 | The pass ipa-type-escape does this analysis for the types whose | |
c22cacf3 | 90 | instances do not escape across the compilation boundary. |
ea900239 DB |
91 | |
92 | Historically in GCC, these two problems were combined and a single | |
93 | data structure was used to represent the solution to these | |
94 | problems. We now have two similar but different data structures, | |
95 | The data structure to solve the last two question is similar to the | |
96 | first, but does not contain have the fields in it whose address are | |
97 | never taken. For types that do escape the compilation unit, the | |
98 | data structures will have identical information. | |
99 | */ | |
3932261a MM |
100 | |
101 | /* The alias sets assigned to MEMs assist the back-end in determining | |
102 | which MEMs can alias which other MEMs. In general, two MEMs in | |
ac3d9668 RK |
103 | different alias sets cannot alias each other, with one important |
104 | exception. Consider something like: | |
3932261a | 105 | |
01d28c3f | 106 | struct S { int i; double d; }; |
3932261a MM |
107 | |
108 | a store to an `S' can alias something of either type `int' or type | |
109 | `double'. (However, a store to an `int' cannot alias a `double' | |
110 | and vice versa.) We indicate this via a tree structure that looks | |
111 | like: | |
c22cacf3 MS |
112 | struct S |
113 | / \ | |
3932261a | 114 | / \ |
c22cacf3 MS |
115 | |/_ _\| |
116 | int double | |
3932261a | 117 | |
ac3d9668 RK |
118 | (The arrows are directed and point downwards.) |
119 | In this situation we say the alias set for `struct S' is the | |
120 | `superset' and that those for `int' and `double' are `subsets'. | |
121 | ||
3bdf5ad1 RK |
122 | To see whether two alias sets can point to the same memory, we must |
123 | see if either alias set is a subset of the other. We need not trace | |
95bd1dd7 | 124 | past immediate descendants, however, since we propagate all |
3bdf5ad1 | 125 | grandchildren up one level. |
3932261a MM |
126 | |
127 | Alias set zero is implicitly a superset of all other alias sets. | |
128 | However, this is no actual entry for alias set zero. It is an | |
129 | error to attempt to explicitly construct a subset of zero. */ | |
130 | ||
b604074c | 131 | struct alias_set_entry GTY(()) |
d4b60170 | 132 | { |
3932261a | 133 | /* The alias set number, as stored in MEM_ALIAS_SET. */ |
3bdf5ad1 | 134 | HOST_WIDE_INT alias_set; |
3932261a MM |
135 | |
136 | /* The children of the alias set. These are not just the immediate | |
95bd1dd7 | 137 | children, but, in fact, all descendants. So, if we have: |
3932261a | 138 | |
ca7fd9cd | 139 | struct T { struct S s; float f; } |
3932261a MM |
140 | |
141 | continuing our example above, the children here will be all of | |
142 | `int', `double', `float', and `struct S'. */ | |
b604074c | 143 | splay_tree GTY((param1_is (int), param2_is (int))) children; |
2bf105ab RK |
144 | |
145 | /* Nonzero if would have a child of zero: this effectively makes this | |
146 | alias set the same as alias set zero. */ | |
147 | int has_zero_child; | |
b604074c GK |
148 | }; |
149 | typedef struct alias_set_entry *alias_set_entry; | |
9ae8ffe7 | 150 | |
4682ae04 AJ |
151 | static int rtx_equal_for_memref_p (rtx, rtx); |
152 | static rtx find_symbolic_term (rtx); | |
4682ae04 AJ |
153 | static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT); |
154 | static void record_set (rtx, rtx, void *); | |
155 | static int base_alias_check (rtx, rtx, enum machine_mode, | |
156 | enum machine_mode); | |
157 | static rtx find_base_value (rtx); | |
158 | static int mems_in_disjoint_alias_sets_p (rtx, rtx); | |
159 | static int insert_subset_children (splay_tree_node, void*); | |
160 | static tree find_base_decl (tree); | |
161 | static alias_set_entry get_alias_set_entry (HOST_WIDE_INT); | |
162 | static rtx fixed_scalar_and_varying_struct_p (rtx, rtx, rtx, rtx, | |
163 | int (*) (rtx, int)); | |
164 | static int aliases_everything_p (rtx); | |
165 | static bool nonoverlapping_component_refs_p (tree, tree); | |
166 | static tree decl_for_component_ref (tree); | |
167 | static rtx adjust_offset_for_component_ref (tree, rtx); | |
168 | static int nonoverlapping_memrefs_p (rtx, rtx); | |
389fdba0 | 169 | static int write_dependence_p (rtx, rtx, int); |
4682ae04 | 170 | |
4682ae04 | 171 | static void memory_modified_1 (rtx, rtx, void *); |
04e2b4d3 | 172 | static void record_alias_subset (HOST_WIDE_INT, HOST_WIDE_INT); |
9ae8ffe7 JL |
173 | |
174 | /* Set up all info needed to perform alias analysis on memory references. */ | |
175 | ||
d4b60170 | 176 | /* Returns the size in bytes of the mode of X. */ |
9ae8ffe7 JL |
177 | #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X))) |
178 | ||
41472af8 | 179 | /* Returns nonzero if MEM1 and MEM2 do not alias because they are in |
264fac34 MM |
180 | different alias sets. We ignore alias sets in functions making use |
181 | of variable arguments because the va_arg macros on some systems are | |
182 | not legal ANSI C. */ | |
183 | #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \ | |
3932261a | 184 | mems_in_disjoint_alias_sets_p (MEM1, MEM2) |
41472af8 | 185 | |
ea64ef27 | 186 | /* Cap the number of passes we make over the insns propagating alias |
ac3d9668 | 187 | information through set chains. 10 is a completely arbitrary choice. */ |
ea64ef27 | 188 | #define MAX_ALIAS_LOOP_PASSES 10 |
ca7fd9cd | 189 | |
9ae8ffe7 JL |
190 | /* reg_base_value[N] gives an address to which register N is related. |
191 | If all sets after the first add or subtract to the current value | |
192 | or otherwise modify it so it does not point to a different top level | |
193 | object, reg_base_value[N] is equal to the address part of the source | |
2a2c8203 JC |
194 | of the first set. |
195 | ||
196 | A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS | |
197 | expressions represent certain special values: function arguments and | |
ca7fd9cd | 198 | the stack, frame, and argument pointers. |
b3b5ad95 JL |
199 | |
200 | The contents of an ADDRESS is not normally used, the mode of the | |
201 | ADDRESS determines whether the ADDRESS is a function argument or some | |
202 | other special value. Pointer equality, not rtx_equal_p, determines whether | |
203 | two ADDRESS expressions refer to the same base address. | |
204 | ||
205 | The only use of the contents of an ADDRESS is for determining if the | |
206 | current function performs nonlocal memory memory references for the | |
207 | purposes of marking the function as a constant function. */ | |
2a2c8203 | 208 | |
08c79682 | 209 | static GTY(()) VEC(rtx,gc) *reg_base_value; |
ac606739 | 210 | static rtx *new_reg_base_value; |
c582d54a JH |
211 | |
212 | /* We preserve the copy of old array around to avoid amount of garbage | |
213 | produced. About 8% of garbage produced were attributed to this | |
214 | array. */ | |
08c79682 | 215 | static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value; |
d4b60170 | 216 | |
bf1660a6 JL |
217 | /* Static hunks of RTL used by the aliasing code; these are initialized |
218 | once per function to avoid unnecessary RTL allocations. */ | |
219 | static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER]; | |
220 | ||
08c79682 KH |
221 | #define REG_BASE_VALUE(X) \ |
222 | (REGNO (X) < VEC_length (rtx, reg_base_value) \ | |
223 | ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0) | |
9ae8ffe7 | 224 | |
c13e8210 | 225 | /* Vector indexed by N giving the initial (unchanging) value known for |
bb1acb3e RH |
226 | pseudo-register N. This array is initialized in init_alias_analysis, |
227 | and does not change until end_alias_analysis is called. */ | |
228 | static GTY((length("reg_known_value_size"))) rtx *reg_known_value; | |
9ae8ffe7 JL |
229 | |
230 | /* Indicates number of valid entries in reg_known_value. */ | |
bb1acb3e | 231 | static GTY(()) unsigned int reg_known_value_size; |
9ae8ffe7 JL |
232 | |
233 | /* Vector recording for each reg_known_value whether it is due to a | |
234 | REG_EQUIV note. Future passes (viz., reload) may replace the | |
235 | pseudo with the equivalent expression and so we account for the | |
ac3d9668 RK |
236 | dependences that would be introduced if that happens. |
237 | ||
238 | The REG_EQUIV notes created in assign_parms may mention the arg | |
239 | pointer, and there are explicit insns in the RTL that modify the | |
240 | arg pointer. Thus we must ensure that such insns don't get | |
241 | scheduled across each other because that would invalidate the | |
242 | REG_EQUIV notes. One could argue that the REG_EQUIV notes are | |
243 | wrong, but solving the problem in the scheduler will likely give | |
244 | better code, so we do it here. */ | |
bb1acb3e | 245 | static bool *reg_known_equiv_p; |
9ae8ffe7 | 246 | |
2a2c8203 JC |
247 | /* True when scanning insns from the start of the rtl to the |
248 | NOTE_INSN_FUNCTION_BEG note. */ | |
83bbd9b6 | 249 | static bool copying_arguments; |
9ae8ffe7 | 250 | |
1a5640b4 KH |
251 | DEF_VEC_P(alias_set_entry); |
252 | DEF_VEC_ALLOC_P(alias_set_entry,gc); | |
253 | ||
3932261a | 254 | /* The splay-tree used to store the various alias set entries. */ |
1a5640b4 | 255 | static GTY (()) VEC(alias_set_entry,gc) *alias_sets; |
ac3d9668 | 256 | \f |
3932261a MM |
257 | /* Returns a pointer to the alias set entry for ALIAS_SET, if there is |
258 | such an entry, or NULL otherwise. */ | |
259 | ||
9ddb66ca | 260 | static inline alias_set_entry |
4682ae04 | 261 | get_alias_set_entry (HOST_WIDE_INT alias_set) |
3932261a | 262 | { |
1a5640b4 | 263 | return VEC_index (alias_set_entry, alias_sets, alias_set); |
3932261a MM |
264 | } |
265 | ||
ac3d9668 RK |
266 | /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that |
267 | the two MEMs cannot alias each other. */ | |
3932261a | 268 | |
9ddb66ca | 269 | static inline int |
4682ae04 | 270 | mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2) |
3932261a | 271 | { |
3932261a MM |
272 | /* Perform a basic sanity check. Namely, that there are no alias sets |
273 | if we're not using strict aliasing. This helps to catch bugs | |
274 | whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or | |
275 | where a MEM is allocated in some way other than by the use of | |
276 | gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to | |
277 | use alias sets to indicate that spilled registers cannot alias each | |
278 | other, we might need to remove this check. */ | |
298e6adc NS |
279 | gcc_assert (flag_strict_aliasing |
280 | || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2))); | |
3932261a | 281 | |
1da68f56 RK |
282 | return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2)); |
283 | } | |
3932261a | 284 | |
1da68f56 RK |
285 | /* Insert the NODE into the splay tree given by DATA. Used by |
286 | record_alias_subset via splay_tree_foreach. */ | |
287 | ||
288 | static int | |
4682ae04 | 289 | insert_subset_children (splay_tree_node node, void *data) |
1da68f56 RK |
290 | { |
291 | splay_tree_insert ((splay_tree) data, node->key, node->value); | |
292 | ||
293 | return 0; | |
294 | } | |
295 | ||
c58936b6 DB |
296 | /* Return true if the first alias set is a subset of the second. */ |
297 | ||
298 | bool | |
299 | alias_set_subset_of (HOST_WIDE_INT set1, HOST_WIDE_INT set2) | |
300 | { | |
301 | alias_set_entry ase; | |
302 | ||
303 | /* Everything is a subset of the "aliases everything" set. */ | |
304 | if (set2 == 0) | |
305 | return true; | |
306 | ||
307 | /* Otherwise, check if set1 is a subset of set2. */ | |
308 | ase = get_alias_set_entry (set2); | |
309 | if (ase != 0 | |
310 | && (splay_tree_lookup (ase->children, | |
311 | (splay_tree_key) set1))) | |
312 | return true; | |
313 | return false; | |
314 | } | |
315 | ||
1da68f56 RK |
316 | /* Return 1 if the two specified alias sets may conflict. */ |
317 | ||
318 | int | |
4682ae04 | 319 | alias_sets_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2) |
1da68f56 RK |
320 | { |
321 | alias_set_entry ase; | |
322 | ||
323 | /* If have no alias set information for one of the operands, we have | |
324 | to assume it can alias anything. */ | |
325 | if (set1 == 0 || set2 == 0 | |
326 | /* If the two alias sets are the same, they may alias. */ | |
327 | || set1 == set2) | |
328 | return 1; | |
3932261a | 329 | |
3bdf5ad1 | 330 | /* See if the first alias set is a subset of the second. */ |
1da68f56 | 331 | ase = get_alias_set_entry (set1); |
2bf105ab RK |
332 | if (ase != 0 |
333 | && (ase->has_zero_child | |
334 | || splay_tree_lookup (ase->children, | |
1da68f56 RK |
335 | (splay_tree_key) set2))) |
336 | return 1; | |
3932261a MM |
337 | |
338 | /* Now do the same, but with the alias sets reversed. */ | |
1da68f56 | 339 | ase = get_alias_set_entry (set2); |
2bf105ab RK |
340 | if (ase != 0 |
341 | && (ase->has_zero_child | |
342 | || splay_tree_lookup (ase->children, | |
1da68f56 RK |
343 | (splay_tree_key) set1))) |
344 | return 1; | |
3932261a | 345 | |
1da68f56 | 346 | /* The two alias sets are distinct and neither one is the |
3932261a | 347 | child of the other. Therefore, they cannot alias. */ |
1da68f56 | 348 | return 0; |
3932261a | 349 | } |
5399d643 JW |
350 | |
351 | /* Return 1 if the two specified alias sets might conflict, or if any subtype | |
352 | of these alias sets might conflict. */ | |
353 | ||
354 | int | |
355 | alias_sets_might_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2) | |
356 | { | |
357 | if (set1 == 0 || set2 == 0 || set1 == set2) | |
358 | return 1; | |
359 | ||
360 | return 0; | |
361 | } | |
362 | ||
3bdf5ad1 | 363 | \f |
1da68f56 RK |
364 | /* Return 1 if any MEM object of type T1 will always conflict (using the |
365 | dependency routines in this file) with any MEM object of type T2. | |
366 | This is used when allocating temporary storage. If T1 and/or T2 are | |
367 | NULL_TREE, it means we know nothing about the storage. */ | |
368 | ||
369 | int | |
4682ae04 | 370 | objects_must_conflict_p (tree t1, tree t2) |
1da68f56 | 371 | { |
82d610ec RK |
372 | HOST_WIDE_INT set1, set2; |
373 | ||
e8ea2809 RK |
374 | /* If neither has a type specified, we don't know if they'll conflict |
375 | because we may be using them to store objects of various types, for | |
376 | example the argument and local variables areas of inlined functions. */ | |
981a4c34 | 377 | if (t1 == 0 && t2 == 0) |
e8ea2809 RK |
378 | return 0; |
379 | ||
1da68f56 RK |
380 | /* If they are the same type, they must conflict. */ |
381 | if (t1 == t2 | |
382 | /* Likewise if both are volatile. */ | |
383 | || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2))) | |
384 | return 1; | |
385 | ||
82d610ec RK |
386 | set1 = t1 ? get_alias_set (t1) : 0; |
387 | set2 = t2 ? get_alias_set (t2) : 0; | |
1da68f56 | 388 | |
82d610ec RK |
389 | /* Otherwise they conflict if they have no alias set or the same. We |
390 | can't simply use alias_sets_conflict_p here, because we must make | |
391 | sure that every subtype of t1 will conflict with every subtype of | |
392 | t2 for which a pair of subobjects of these respective subtypes | |
393 | overlaps on the stack. */ | |
394 | return set1 == 0 || set2 == 0 || set1 == set2; | |
1da68f56 RK |
395 | } |
396 | \f | |
3bdf5ad1 RK |
397 | /* T is an expression with pointer type. Find the DECL on which this |
398 | expression is based. (For example, in `a[i]' this would be `a'.) | |
399 | If there is no such DECL, or a unique decl cannot be determined, | |
f5143c46 | 400 | NULL_TREE is returned. */ |
3bdf5ad1 RK |
401 | |
402 | static tree | |
4682ae04 | 403 | find_base_decl (tree t) |
3bdf5ad1 | 404 | { |
6615c446 | 405 | tree d0, d1; |
3bdf5ad1 RK |
406 | |
407 | if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t))) | |
408 | return 0; | |
409 | ||
0b494699 ILT |
410 | /* If this is a declaration, return it. If T is based on a restrict |
411 | qualified decl, return that decl. */ | |
6615c446 | 412 | if (DECL_P (t)) |
0b494699 ILT |
413 | { |
414 | if (TREE_CODE (t) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (t)) | |
415 | t = DECL_GET_RESTRICT_BASE (t); | |
416 | return t; | |
417 | } | |
3bdf5ad1 RK |
418 | |
419 | /* Handle general expressions. It would be nice to deal with | |
420 | COMPONENT_REFs here. If we could tell that `a' and `b' were the | |
421 | same, then `a->f' and `b->f' are also the same. */ | |
422 | switch (TREE_CODE_CLASS (TREE_CODE (t))) | |
423 | { | |
6615c446 | 424 | case tcc_unary: |
3bdf5ad1 RK |
425 | return find_base_decl (TREE_OPERAND (t, 0)); |
426 | ||
6615c446 | 427 | case tcc_binary: |
3bdf5ad1 RK |
428 | /* Return 0 if found in neither or both are the same. */ |
429 | d0 = find_base_decl (TREE_OPERAND (t, 0)); | |
430 | d1 = find_base_decl (TREE_OPERAND (t, 1)); | |
431 | if (d0 == d1) | |
432 | return d0; | |
433 | else if (d0 == 0) | |
434 | return d1; | |
435 | else if (d1 == 0) | |
436 | return d0; | |
437 | else | |
438 | return 0; | |
439 | ||
3bdf5ad1 RK |
440 | default: |
441 | return 0; | |
442 | } | |
443 | } | |
444 | ||
2039d7aa RH |
445 | /* Return true if all nested component references handled by |
446 | get_inner_reference in T are such that we should use the alias set | |
447 | provided by the object at the heart of T. | |
448 | ||
449 | This is true for non-addressable components (which don't have their | |
450 | own alias set), as well as components of objects in alias set zero. | |
451 | This later point is a special case wherein we wish to override the | |
452 | alias set used by the component, but we don't have per-FIELD_DECL | |
453 | assignable alias sets. */ | |
454 | ||
455 | bool | |
456 | component_uses_parent_alias_set (tree t) | |
6e24b709 | 457 | { |
afe84921 RH |
458 | while (1) |
459 | { | |
2039d7aa | 460 | /* If we're at the end, it vacuously uses its own alias set. */ |
afe84921 | 461 | if (!handled_component_p (t)) |
2039d7aa | 462 | return false; |
afe84921 RH |
463 | |
464 | switch (TREE_CODE (t)) | |
465 | { | |
466 | case COMPONENT_REF: | |
467 | if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))) | |
2039d7aa | 468 | return true; |
afe84921 RH |
469 | break; |
470 | ||
471 | case ARRAY_REF: | |
472 | case ARRAY_RANGE_REF: | |
473 | if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))) | |
2039d7aa | 474 | return true; |
afe84921 RH |
475 | break; |
476 | ||
477 | case REALPART_EXPR: | |
478 | case IMAGPART_EXPR: | |
479 | break; | |
480 | ||
481 | default: | |
482 | /* Bitfields and casts are never addressable. */ | |
2039d7aa | 483 | return true; |
afe84921 RH |
484 | } |
485 | ||
486 | t = TREE_OPERAND (t, 0); | |
2039d7aa RH |
487 | if (get_alias_set (TREE_TYPE (t)) == 0) |
488 | return true; | |
afe84921 | 489 | } |
6e24b709 RK |
490 | } |
491 | ||
3bdf5ad1 RK |
492 | /* Return the alias set for T, which may be either a type or an |
493 | expression. Call language-specific routine for help, if needed. */ | |
494 | ||
495 | HOST_WIDE_INT | |
4682ae04 | 496 | get_alias_set (tree t) |
3bdf5ad1 RK |
497 | { |
498 | HOST_WIDE_INT set; | |
3bdf5ad1 RK |
499 | |
500 | /* If we're not doing any alias analysis, just assume everything | |
501 | aliases everything else. Also return 0 if this or its type is | |
502 | an error. */ | |
503 | if (! flag_strict_aliasing || t == error_mark_node | |
504 | || (! TYPE_P (t) | |
505 | && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node))) | |
506 | return 0; | |
507 | ||
508 | /* We can be passed either an expression or a type. This and the | |
f47e9b4e RK |
509 | language-specific routine may make mutually-recursive calls to each other |
510 | to figure out what to do. At each juncture, we see if this is a tree | |
511 | that the language may need to handle specially. First handle things that | |
738cc472 | 512 | aren't types. */ |
f824e5c3 | 513 | if (! TYPE_P (t)) |
3bdf5ad1 | 514 | { |
738cc472 | 515 | tree inner = t; |
738cc472 | 516 | |
8ac61af7 RK |
517 | /* Remove any nops, then give the language a chance to do |
518 | something with this tree before we look at it. */ | |
519 | STRIP_NOPS (t); | |
ae2bcd98 | 520 | set = lang_hooks.get_alias_set (t); |
8ac61af7 RK |
521 | if (set != -1) |
522 | return set; | |
523 | ||
738cc472 | 524 | /* First see if the actual object referenced is an INDIRECT_REF from a |
6fce44af RK |
525 | restrict-qualified pointer or a "void *". */ |
526 | while (handled_component_p (inner)) | |
738cc472 | 527 | { |
6fce44af | 528 | inner = TREE_OPERAND (inner, 0); |
8ac61af7 | 529 | STRIP_NOPS (inner); |
738cc472 RK |
530 | } |
531 | ||
532 | /* Check for accesses through restrict-qualified pointers. */ | |
1b096a0a | 533 | if (INDIRECT_REF_P (inner)) |
738cc472 RK |
534 | { |
535 | tree decl = find_base_decl (TREE_OPERAND (inner, 0)); | |
536 | ||
537 | if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl)) | |
538 | { | |
e5837c07 | 539 | /* If we haven't computed the actual alias set, do it now. */ |
738cc472 RK |
540 | if (DECL_POINTER_ALIAS_SET (decl) == -2) |
541 | { | |
4f430fb9 MM |
542 | tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl)); |
543 | ||
738cc472 RK |
544 | /* No two restricted pointers can point at the same thing. |
545 | However, a restricted pointer can point at the same thing | |
546 | as an unrestricted pointer, if that unrestricted pointer | |
547 | is based on the restricted pointer. So, we make the | |
548 | alias set for the restricted pointer a subset of the | |
549 | alias set for the type pointed to by the type of the | |
550 | decl. */ | |
551 | HOST_WIDE_INT pointed_to_alias_set | |
4f430fb9 | 552 | = get_alias_set (pointed_to_type); |
738cc472 RK |
553 | |
554 | if (pointed_to_alias_set == 0) | |
555 | /* It's not legal to make a subset of alias set zero. */ | |
e7844ffb | 556 | DECL_POINTER_ALIAS_SET (decl) = 0; |
4f430fb9 MM |
557 | else if (AGGREGATE_TYPE_P (pointed_to_type)) |
558 | /* For an aggregate, we must treat the restricted | |
559 | pointer the same as an ordinary pointer. If we | |
560 | were to make the type pointed to by the | |
561 | restricted pointer a subset of the pointed-to | |
562 | type, then we would believe that other subsets | |
563 | of the pointed-to type (such as fields of that | |
564 | type) do not conflict with the type pointed to | |
471854f8 | 565 | by the restricted pointer. */ |
4f430fb9 MM |
566 | DECL_POINTER_ALIAS_SET (decl) |
567 | = pointed_to_alias_set; | |
738cc472 RK |
568 | else |
569 | { | |
570 | DECL_POINTER_ALIAS_SET (decl) = new_alias_set (); | |
ca7fd9cd KH |
571 | record_alias_subset (pointed_to_alias_set, |
572 | DECL_POINTER_ALIAS_SET (decl)); | |
738cc472 RK |
573 | } |
574 | } | |
575 | ||
576 | /* We use the alias set indicated in the declaration. */ | |
577 | return DECL_POINTER_ALIAS_SET (decl); | |
578 | } | |
579 | ||
580 | /* If we have an INDIRECT_REF via a void pointer, we don't | |
22421b79 RK |
581 | know anything about what that might alias. Likewise if the |
582 | pointer is marked that way. */ | |
583 | else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE | |
584 | || (TYPE_REF_CAN_ALIAS_ALL | |
585 | (TREE_TYPE (TREE_OPERAND (inner, 0))))) | |
738cc472 RK |
586 | return 0; |
587 | } | |
588 | ||
589 | /* Otherwise, pick up the outermost object that we could have a pointer | |
6fce44af | 590 | to, processing conversions as above. */ |
2039d7aa | 591 | while (component_uses_parent_alias_set (t)) |
f47e9b4e | 592 | { |
6fce44af | 593 | t = TREE_OPERAND (t, 0); |
8ac61af7 RK |
594 | STRIP_NOPS (t); |
595 | } | |
f824e5c3 | 596 | |
738cc472 RK |
597 | /* If we've already determined the alias set for a decl, just return |
598 | it. This is necessary for C++ anonymous unions, whose component | |
599 | variables don't look like union members (boo!). */ | |
5755cd38 | 600 | if (TREE_CODE (t) == VAR_DECL |
3c0cb5de | 601 | && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t))) |
5755cd38 JM |
602 | return MEM_ALIAS_SET (DECL_RTL (t)); |
603 | ||
f824e5c3 RK |
604 | /* Now all we care about is the type. */ |
605 | t = TREE_TYPE (t); | |
3bdf5ad1 RK |
606 | } |
607 | ||
3bdf5ad1 RK |
608 | /* Variant qualifiers don't affect the alias set, so get the main |
609 | variant. If this is a type with a known alias set, return it. */ | |
610 | t = TYPE_MAIN_VARIANT (t); | |
738cc472 | 611 | if (TYPE_ALIAS_SET_KNOWN_P (t)) |
3bdf5ad1 RK |
612 | return TYPE_ALIAS_SET (t); |
613 | ||
614 | /* See if the language has special handling for this type. */ | |
ae2bcd98 | 615 | set = lang_hooks.get_alias_set (t); |
8ac61af7 | 616 | if (set != -1) |
738cc472 | 617 | return set; |
2bf105ab | 618 | |
3bdf5ad1 RK |
619 | /* There are no objects of FUNCTION_TYPE, so there's no point in |
620 | using up an alias set for them. (There are, of course, pointers | |
621 | and references to functions, but that's different.) */ | |
622 | else if (TREE_CODE (t) == FUNCTION_TYPE) | |
623 | set = 0; | |
74d86f4f RH |
624 | |
625 | /* Unless the language specifies otherwise, let vector types alias | |
626 | their components. This avoids some nasty type punning issues in | |
627 | normal usage. And indeed lets vectors be treated more like an | |
628 | array slice. */ | |
629 | else if (TREE_CODE (t) == VECTOR_TYPE) | |
630 | set = get_alias_set (TREE_TYPE (t)); | |
631 | ||
3bdf5ad1 RK |
632 | else |
633 | /* Otherwise make a new alias set for this type. */ | |
634 | set = new_alias_set (); | |
635 | ||
636 | TYPE_ALIAS_SET (t) = set; | |
2bf105ab RK |
637 | |
638 | /* If this is an aggregate type, we must record any component aliasing | |
639 | information. */ | |
1d79fd2c | 640 | if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE) |
2bf105ab RK |
641 | record_component_aliases (t); |
642 | ||
3bdf5ad1 RK |
643 | return set; |
644 | } | |
645 | ||
646 | /* Return a brand-new alias set. */ | |
647 | ||
648 | HOST_WIDE_INT | |
4682ae04 | 649 | new_alias_set (void) |
3bdf5ad1 | 650 | { |
3bdf5ad1 | 651 | if (flag_strict_aliasing) |
9ddb66ca | 652 | { |
1a5640b4 KH |
653 | if (alias_sets == 0) |
654 | VEC_safe_push (alias_set_entry, gc, alias_sets, 0); | |
655 | VEC_safe_push (alias_set_entry, gc, alias_sets, 0); | |
656 | return VEC_length (alias_set_entry, alias_sets) - 1; | |
9ddb66ca | 657 | } |
3bdf5ad1 RK |
658 | else |
659 | return 0; | |
660 | } | |
3932261a | 661 | |
01d28c3f JM |
662 | /* Indicate that things in SUBSET can alias things in SUPERSET, but that |
663 | not everything that aliases SUPERSET also aliases SUBSET. For example, | |
664 | in C, a store to an `int' can alias a load of a structure containing an | |
665 | `int', and vice versa. But it can't alias a load of a 'double' member | |
666 | of the same structure. Here, the structure would be the SUPERSET and | |
667 | `int' the SUBSET. This relationship is also described in the comment at | |
668 | the beginning of this file. | |
669 | ||
670 | This function should be called only once per SUPERSET/SUBSET pair. | |
3932261a MM |
671 | |
672 | It is illegal for SUPERSET to be zero; everything is implicitly a | |
673 | subset of alias set zero. */ | |
674 | ||
04e2b4d3 | 675 | static void |
4682ae04 | 676 | record_alias_subset (HOST_WIDE_INT superset, HOST_WIDE_INT subset) |
3932261a MM |
677 | { |
678 | alias_set_entry superset_entry; | |
679 | alias_set_entry subset_entry; | |
680 | ||
f47e9b4e RK |
681 | /* It is possible in complex type situations for both sets to be the same, |
682 | in which case we can ignore this operation. */ | |
683 | if (superset == subset) | |
684 | return; | |
685 | ||
298e6adc | 686 | gcc_assert (superset); |
3932261a MM |
687 | |
688 | superset_entry = get_alias_set_entry (superset); | |
ca7fd9cd | 689 | if (superset_entry == 0) |
3932261a MM |
690 | { |
691 | /* Create an entry for the SUPERSET, so that we have a place to | |
692 | attach the SUBSET. */ | |
b604074c | 693 | superset_entry = ggc_alloc (sizeof (struct alias_set_entry)); |
3932261a | 694 | superset_entry->alias_set = superset; |
ca7fd9cd | 695 | superset_entry->children |
b604074c | 696 | = splay_tree_new_ggc (splay_tree_compare_ints); |
570eb5c8 | 697 | superset_entry->has_zero_child = 0; |
1a5640b4 | 698 | VEC_replace (alias_set_entry, alias_sets, superset, superset_entry); |
3932261a MM |
699 | } |
700 | ||
2bf105ab RK |
701 | if (subset == 0) |
702 | superset_entry->has_zero_child = 1; | |
703 | else | |
704 | { | |
705 | subset_entry = get_alias_set_entry (subset); | |
706 | /* If there is an entry for the subset, enter all of its children | |
707 | (if they are not already present) as children of the SUPERSET. */ | |
ca7fd9cd | 708 | if (subset_entry) |
2bf105ab RK |
709 | { |
710 | if (subset_entry->has_zero_child) | |
711 | superset_entry->has_zero_child = 1; | |
d4b60170 | 712 | |
2bf105ab RK |
713 | splay_tree_foreach (subset_entry->children, insert_subset_children, |
714 | superset_entry->children); | |
715 | } | |
3932261a | 716 | |
2bf105ab | 717 | /* Enter the SUBSET itself as a child of the SUPERSET. */ |
ca7fd9cd | 718 | splay_tree_insert (superset_entry->children, |
2bf105ab RK |
719 | (splay_tree_key) subset, 0); |
720 | } | |
3932261a MM |
721 | } |
722 | ||
a0c33338 RK |
723 | /* Record that component types of TYPE, if any, are part of that type for |
724 | aliasing purposes. For record types, we only record component types | |
725 | for fields that are marked addressable. For array types, we always | |
726 | record the component types, so the front end should not call this | |
727 | function if the individual component aren't addressable. */ | |
728 | ||
729 | void | |
4682ae04 | 730 | record_component_aliases (tree type) |
a0c33338 | 731 | { |
3bdf5ad1 | 732 | HOST_WIDE_INT superset = get_alias_set (type); |
a0c33338 RK |
733 | tree field; |
734 | ||
735 | if (superset == 0) | |
736 | return; | |
737 | ||
738 | switch (TREE_CODE (type)) | |
739 | { | |
740 | case ARRAY_TYPE: | |
2bf105ab RK |
741 | if (! TYPE_NONALIASED_COMPONENT (type)) |
742 | record_alias_subset (superset, get_alias_set (TREE_TYPE (type))); | |
a0c33338 RK |
743 | break; |
744 | ||
745 | case RECORD_TYPE: | |
746 | case UNION_TYPE: | |
747 | case QUAL_UNION_TYPE: | |
6614fd40 | 748 | /* Recursively record aliases for the base classes, if there are any. */ |
fa743e8c | 749 | if (TYPE_BINFO (type)) |
ca7fd9cd KH |
750 | { |
751 | int i; | |
fa743e8c | 752 | tree binfo, base_binfo; |
c22cacf3 | 753 | |
fa743e8c NS |
754 | for (binfo = TYPE_BINFO (type), i = 0; |
755 | BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) | |
756 | record_alias_subset (superset, | |
757 | get_alias_set (BINFO_TYPE (base_binfo))); | |
ca7fd9cd | 758 | } |
a0c33338 | 759 | for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field)) |
b16a49a1 | 760 | if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field)) |
2bf105ab | 761 | record_alias_subset (superset, get_alias_set (TREE_TYPE (field))); |
a0c33338 RK |
762 | break; |
763 | ||
1d79fd2c JW |
764 | case COMPLEX_TYPE: |
765 | record_alias_subset (superset, get_alias_set (TREE_TYPE (type))); | |
766 | break; | |
767 | ||
a0c33338 RK |
768 | default: |
769 | break; | |
770 | } | |
771 | } | |
772 | ||
3bdf5ad1 RK |
773 | /* Allocate an alias set for use in storing and reading from the varargs |
774 | spill area. */ | |
775 | ||
f103e34d GK |
776 | static GTY(()) HOST_WIDE_INT varargs_set = -1; |
777 | ||
3bdf5ad1 | 778 | HOST_WIDE_INT |
4682ae04 | 779 | get_varargs_alias_set (void) |
3bdf5ad1 | 780 | { |
cd3ce9b4 JM |
781 | #if 1 |
782 | /* We now lower VA_ARG_EXPR, and there's currently no way to attach the | |
783 | varargs alias set to an INDIRECT_REF (FIXME!), so we can't | |
784 | consistently use the varargs alias set for loads from the varargs | |
785 | area. So don't use it anywhere. */ | |
786 | return 0; | |
787 | #else | |
f103e34d GK |
788 | if (varargs_set == -1) |
789 | varargs_set = new_alias_set (); | |
3bdf5ad1 | 790 | |
f103e34d | 791 | return varargs_set; |
cd3ce9b4 | 792 | #endif |
3bdf5ad1 RK |
793 | } |
794 | ||
795 | /* Likewise, but used for the fixed portions of the frame, e.g., register | |
796 | save areas. */ | |
797 | ||
f103e34d GK |
798 | static GTY(()) HOST_WIDE_INT frame_set = -1; |
799 | ||
3bdf5ad1 | 800 | HOST_WIDE_INT |
4682ae04 | 801 | get_frame_alias_set (void) |
3bdf5ad1 | 802 | { |
f103e34d GK |
803 | if (frame_set == -1) |
804 | frame_set = new_alias_set (); | |
3bdf5ad1 | 805 | |
f103e34d | 806 | return frame_set; |
3bdf5ad1 RK |
807 | } |
808 | ||
2a2c8203 JC |
809 | /* Inside SRC, the source of a SET, find a base address. */ |
810 | ||
9ae8ffe7 | 811 | static rtx |
4682ae04 | 812 | find_base_value (rtx src) |
9ae8ffe7 | 813 | { |
713f41f9 | 814 | unsigned int regno; |
0aacc8ed | 815 | |
9ae8ffe7 JL |
816 | switch (GET_CODE (src)) |
817 | { | |
818 | case SYMBOL_REF: | |
819 | case LABEL_REF: | |
820 | return src; | |
821 | ||
822 | case REG: | |
fb6754f0 | 823 | regno = REGNO (src); |
d4b60170 | 824 | /* At the start of a function, argument registers have known base |
2a2c8203 JC |
825 | values which may be lost later. Returning an ADDRESS |
826 | expression here allows optimization based on argument values | |
827 | even when the argument registers are used for other purposes. */ | |
713f41f9 BS |
828 | if (regno < FIRST_PSEUDO_REGISTER && copying_arguments) |
829 | return new_reg_base_value[regno]; | |
73774bc7 | 830 | |
eaf407a5 | 831 | /* If a pseudo has a known base value, return it. Do not do this |
9b462c42 RH |
832 | for non-fixed hard regs since it can result in a circular |
833 | dependency chain for registers which have values at function entry. | |
eaf407a5 JL |
834 | |
835 | The test above is not sufficient because the scheduler may move | |
836 | a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */ | |
9b462c42 | 837 | if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno]) |
08c79682 | 838 | && regno < VEC_length (rtx, reg_base_value)) |
83bbd9b6 RH |
839 | { |
840 | /* If we're inside init_alias_analysis, use new_reg_base_value | |
841 | to reduce the number of relaxation iterations. */ | |
1afdf91c RH |
842 | if (new_reg_base_value && new_reg_base_value[regno] |
843 | && REG_N_SETS (regno) == 1) | |
83bbd9b6 RH |
844 | return new_reg_base_value[regno]; |
845 | ||
08c79682 KH |
846 | if (VEC_index (rtx, reg_base_value, regno)) |
847 | return VEC_index (rtx, reg_base_value, regno); | |
83bbd9b6 | 848 | } |
73774bc7 | 849 | |
e3f049a8 | 850 | return 0; |
9ae8ffe7 JL |
851 | |
852 | case MEM: | |
853 | /* Check for an argument passed in memory. Only record in the | |
854 | copying-arguments block; it is too hard to track changes | |
855 | otherwise. */ | |
856 | if (copying_arguments | |
857 | && (XEXP (src, 0) == arg_pointer_rtx | |
858 | || (GET_CODE (XEXP (src, 0)) == PLUS | |
859 | && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx))) | |
38a448ca | 860 | return gen_rtx_ADDRESS (VOIDmode, src); |
9ae8ffe7 JL |
861 | return 0; |
862 | ||
863 | case CONST: | |
864 | src = XEXP (src, 0); | |
865 | if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS) | |
866 | break; | |
d4b60170 | 867 | |
ec5c56db | 868 | /* ... fall through ... */ |
2a2c8203 | 869 | |
9ae8ffe7 JL |
870 | case PLUS: |
871 | case MINUS: | |
2a2c8203 | 872 | { |
ec907dd8 JL |
873 | rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1); |
874 | ||
0134bf2d DE |
875 | /* If either operand is a REG that is a known pointer, then it |
876 | is the base. */ | |
877 | if (REG_P (src_0) && REG_POINTER (src_0)) | |
878 | return find_base_value (src_0); | |
879 | if (REG_P (src_1) && REG_POINTER (src_1)) | |
880 | return find_base_value (src_1); | |
881 | ||
ec907dd8 JL |
882 | /* If either operand is a REG, then see if we already have |
883 | a known value for it. */ | |
0134bf2d | 884 | if (REG_P (src_0)) |
ec907dd8 JL |
885 | { |
886 | temp = find_base_value (src_0); | |
d4b60170 | 887 | if (temp != 0) |
ec907dd8 JL |
888 | src_0 = temp; |
889 | } | |
890 | ||
0134bf2d | 891 | if (REG_P (src_1)) |
ec907dd8 JL |
892 | { |
893 | temp = find_base_value (src_1); | |
d4b60170 | 894 | if (temp!= 0) |
ec907dd8 JL |
895 | src_1 = temp; |
896 | } | |
2a2c8203 | 897 | |
0134bf2d DE |
898 | /* If either base is named object or a special address |
899 | (like an argument or stack reference), then use it for the | |
900 | base term. */ | |
901 | if (src_0 != 0 | |
902 | && (GET_CODE (src_0) == SYMBOL_REF | |
903 | || GET_CODE (src_0) == LABEL_REF | |
904 | || (GET_CODE (src_0) == ADDRESS | |
905 | && GET_MODE (src_0) != VOIDmode))) | |
906 | return src_0; | |
907 | ||
908 | if (src_1 != 0 | |
909 | && (GET_CODE (src_1) == SYMBOL_REF | |
910 | || GET_CODE (src_1) == LABEL_REF | |
911 | || (GET_CODE (src_1) == ADDRESS | |
912 | && GET_MODE (src_1) != VOIDmode))) | |
913 | return src_1; | |
914 | ||
d4b60170 | 915 | /* Guess which operand is the base address: |
ec907dd8 JL |
916 | If either operand is a symbol, then it is the base. If |
917 | either operand is a CONST_INT, then the other is the base. */ | |
d4b60170 | 918 | if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0)) |
2a2c8203 | 919 | return find_base_value (src_0); |
d4b60170 | 920 | else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1)) |
ec907dd8 JL |
921 | return find_base_value (src_1); |
922 | ||
9ae8ffe7 | 923 | return 0; |
2a2c8203 JC |
924 | } |
925 | ||
926 | case LO_SUM: | |
927 | /* The standard form is (lo_sum reg sym) so look only at the | |
928 | second operand. */ | |
929 | return find_base_value (XEXP (src, 1)); | |
9ae8ffe7 JL |
930 | |
931 | case AND: | |
932 | /* If the second operand is constant set the base | |
ec5c56db | 933 | address to the first operand. */ |
2a2c8203 JC |
934 | if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0) |
935 | return find_base_value (XEXP (src, 0)); | |
9ae8ffe7 JL |
936 | return 0; |
937 | ||
61f0131c R |
938 | case TRUNCATE: |
939 | if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode)) | |
940 | break; | |
941 | /* Fall through. */ | |
9ae8ffe7 | 942 | case HIGH: |
d288e53d DE |
943 | case PRE_INC: |
944 | case PRE_DEC: | |
945 | case POST_INC: | |
946 | case POST_DEC: | |
947 | case PRE_MODIFY: | |
948 | case POST_MODIFY: | |
2a2c8203 | 949 | return find_base_value (XEXP (src, 0)); |
1d300e19 | 950 | |
0aacc8ed RK |
951 | case ZERO_EXTEND: |
952 | case SIGN_EXTEND: /* used for NT/Alpha pointers */ | |
953 | { | |
954 | rtx temp = find_base_value (XEXP (src, 0)); | |
955 | ||
5ae6cd0d | 956 | if (temp != 0 && CONSTANT_P (temp)) |
0aacc8ed | 957 | temp = convert_memory_address (Pmode, temp); |
0aacc8ed RK |
958 | |
959 | return temp; | |
960 | } | |
961 | ||
1d300e19 KG |
962 | default: |
963 | break; | |
9ae8ffe7 JL |
964 | } |
965 | ||
966 | return 0; | |
967 | } | |
968 | ||
969 | /* Called from init_alias_analysis indirectly through note_stores. */ | |
970 | ||
d4b60170 | 971 | /* While scanning insns to find base values, reg_seen[N] is nonzero if |
9ae8ffe7 JL |
972 | register N has been set in this function. */ |
973 | static char *reg_seen; | |
974 | ||
13309a5f JC |
975 | /* Addresses which are known not to alias anything else are identified |
976 | by a unique integer. */ | |
ec907dd8 JL |
977 | static int unique_id; |
978 | ||
2a2c8203 | 979 | static void |
4682ae04 | 980 | record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED) |
9ae8ffe7 | 981 | { |
b3694847 | 982 | unsigned regno; |
9ae8ffe7 | 983 | rtx src; |
c28b4e40 | 984 | int n; |
9ae8ffe7 | 985 | |
f8cfc6aa | 986 | if (!REG_P (dest)) |
9ae8ffe7 JL |
987 | return; |
988 | ||
fb6754f0 | 989 | regno = REGNO (dest); |
9ae8ffe7 | 990 | |
08c79682 | 991 | gcc_assert (regno < VEC_length (rtx, reg_base_value)); |
ac606739 | 992 | |
c28b4e40 JW |
993 | /* If this spans multiple hard registers, then we must indicate that every |
994 | register has an unusable value. */ | |
995 | if (regno < FIRST_PSEUDO_REGISTER) | |
66fd46b6 | 996 | n = hard_regno_nregs[regno][GET_MODE (dest)]; |
c28b4e40 JW |
997 | else |
998 | n = 1; | |
999 | if (n != 1) | |
1000 | { | |
1001 | while (--n >= 0) | |
1002 | { | |
1003 | reg_seen[regno + n] = 1; | |
1004 | new_reg_base_value[regno + n] = 0; | |
1005 | } | |
1006 | return; | |
1007 | } | |
1008 | ||
9ae8ffe7 JL |
1009 | if (set) |
1010 | { | |
1011 | /* A CLOBBER wipes out any old value but does not prevent a previously | |
1012 | unset register from acquiring a base address (i.e. reg_seen is not | |
1013 | set). */ | |
1014 | if (GET_CODE (set) == CLOBBER) | |
1015 | { | |
ec907dd8 | 1016 | new_reg_base_value[regno] = 0; |
9ae8ffe7 JL |
1017 | return; |
1018 | } | |
1019 | src = SET_SRC (set); | |
1020 | } | |
1021 | else | |
1022 | { | |
9ae8ffe7 JL |
1023 | if (reg_seen[regno]) |
1024 | { | |
ec907dd8 | 1025 | new_reg_base_value[regno] = 0; |
9ae8ffe7 JL |
1026 | return; |
1027 | } | |
1028 | reg_seen[regno] = 1; | |
38a448ca RH |
1029 | new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode, |
1030 | GEN_INT (unique_id++)); | |
9ae8ffe7 JL |
1031 | return; |
1032 | } | |
1033 | ||
5da6f168 RS |
1034 | /* If this is not the first set of REGNO, see whether the new value |
1035 | is related to the old one. There are two cases of interest: | |
1036 | ||
1037 | (1) The register might be assigned an entirely new value | |
1038 | that has the same base term as the original set. | |
1039 | ||
1040 | (2) The set might be a simple self-modification that | |
1041 | cannot change REGNO's base value. | |
1042 | ||
1043 | If neither case holds, reject the original base value as invalid. | |
1044 | Note that the following situation is not detected: | |
1045 | ||
c22cacf3 | 1046 | extern int x, y; int *p = &x; p += (&y-&x); |
5da6f168 | 1047 | |
9ae8ffe7 JL |
1048 | ANSI C does not allow computing the difference of addresses |
1049 | of distinct top level objects. */ | |
5da6f168 RS |
1050 | if (new_reg_base_value[regno] != 0 |
1051 | && find_base_value (src) != new_reg_base_value[regno]) | |
9ae8ffe7 JL |
1052 | switch (GET_CODE (src)) |
1053 | { | |
2a2c8203 | 1054 | case LO_SUM: |
9ae8ffe7 JL |
1055 | case MINUS: |
1056 | if (XEXP (src, 0) != dest && XEXP (src, 1) != dest) | |
ec907dd8 | 1057 | new_reg_base_value[regno] = 0; |
9ae8ffe7 | 1058 | break; |
61f0131c R |
1059 | case PLUS: |
1060 | /* If the value we add in the PLUS is also a valid base value, | |
1061 | this might be the actual base value, and the original value | |
1062 | an index. */ | |
1063 | { | |
1064 | rtx other = NULL_RTX; | |
1065 | ||
1066 | if (XEXP (src, 0) == dest) | |
1067 | other = XEXP (src, 1); | |
1068 | else if (XEXP (src, 1) == dest) | |
1069 | other = XEXP (src, 0); | |
1070 | ||
1071 | if (! other || find_base_value (other)) | |
1072 | new_reg_base_value[regno] = 0; | |
1073 | break; | |
1074 | } | |
9ae8ffe7 JL |
1075 | case AND: |
1076 | if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT) | |
ec907dd8 | 1077 | new_reg_base_value[regno] = 0; |
9ae8ffe7 | 1078 | break; |
9ae8ffe7 | 1079 | default: |
ec907dd8 | 1080 | new_reg_base_value[regno] = 0; |
9ae8ffe7 JL |
1081 | break; |
1082 | } | |
1083 | /* If this is the first set of a register, record the value. */ | |
1084 | else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno]) | |
ec907dd8 JL |
1085 | && ! reg_seen[regno] && new_reg_base_value[regno] == 0) |
1086 | new_reg_base_value[regno] = find_base_value (src); | |
9ae8ffe7 JL |
1087 | |
1088 | reg_seen[regno] = 1; | |
1089 | } | |
1090 | ||
43fe47ca JW |
1091 | /* Clear alias info for a register. This is used if an RTL transformation |
1092 | changes the value of a register. This is used in flow by AUTO_INC_DEC | |
1093 | optimizations. We don't need to clear reg_base_value, since flow only | |
1094 | changes the offset. */ | |
1095 | ||
1096 | void | |
4682ae04 | 1097 | clear_reg_alias_info (rtx reg) |
43fe47ca | 1098 | { |
4e1a4144 JW |
1099 | unsigned int regno = REGNO (reg); |
1100 | ||
bb1acb3e RH |
1101 | if (regno >= FIRST_PSEUDO_REGISTER) |
1102 | { | |
1103 | regno -= FIRST_PSEUDO_REGISTER; | |
1104 | if (regno < reg_known_value_size) | |
1105 | { | |
1106 | reg_known_value[regno] = reg; | |
1107 | reg_known_equiv_p[regno] = false; | |
1108 | } | |
1109 | } | |
1110 | } | |
1111 | ||
1112 | /* If a value is known for REGNO, return it. */ | |
1113 | ||
c22cacf3 | 1114 | rtx |
bb1acb3e RH |
1115 | get_reg_known_value (unsigned int regno) |
1116 | { | |
1117 | if (regno >= FIRST_PSEUDO_REGISTER) | |
1118 | { | |
1119 | regno -= FIRST_PSEUDO_REGISTER; | |
1120 | if (regno < reg_known_value_size) | |
1121 | return reg_known_value[regno]; | |
1122 | } | |
1123 | return NULL; | |
43fe47ca JW |
1124 | } |
1125 | ||
bb1acb3e RH |
1126 | /* Set it. */ |
1127 | ||
1128 | static void | |
1129 | set_reg_known_value (unsigned int regno, rtx val) | |
1130 | { | |
1131 | if (regno >= FIRST_PSEUDO_REGISTER) | |
1132 | { | |
1133 | regno -= FIRST_PSEUDO_REGISTER; | |
1134 | if (regno < reg_known_value_size) | |
1135 | reg_known_value[regno] = val; | |
1136 | } | |
1137 | } | |
1138 | ||
1139 | /* Similarly for reg_known_equiv_p. */ | |
1140 | ||
1141 | bool | |
1142 | get_reg_known_equiv_p (unsigned int regno) | |
1143 | { | |
1144 | if (regno >= FIRST_PSEUDO_REGISTER) | |
1145 | { | |
1146 | regno -= FIRST_PSEUDO_REGISTER; | |
1147 | if (regno < reg_known_value_size) | |
1148 | return reg_known_equiv_p[regno]; | |
1149 | } | |
1150 | return false; | |
1151 | } | |
1152 | ||
1153 | static void | |
1154 | set_reg_known_equiv_p (unsigned int regno, bool val) | |
1155 | { | |
1156 | if (regno >= FIRST_PSEUDO_REGISTER) | |
1157 | { | |
1158 | regno -= FIRST_PSEUDO_REGISTER; | |
1159 | if (regno < reg_known_value_size) | |
1160 | reg_known_equiv_p[regno] = val; | |
1161 | } | |
1162 | } | |
1163 | ||
1164 | ||
db048faf MM |
1165 | /* Returns a canonical version of X, from the point of view alias |
1166 | analysis. (For example, if X is a MEM whose address is a register, | |
1167 | and the register has a known value (say a SYMBOL_REF), then a MEM | |
1168 | whose address is the SYMBOL_REF is returned.) */ | |
1169 | ||
1170 | rtx | |
4682ae04 | 1171 | canon_rtx (rtx x) |
9ae8ffe7 JL |
1172 | { |
1173 | /* Recursively look for equivalences. */ | |
f8cfc6aa | 1174 | if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER) |
bb1acb3e RH |
1175 | { |
1176 | rtx t = get_reg_known_value (REGNO (x)); | |
1177 | if (t == x) | |
1178 | return x; | |
1179 | if (t) | |
1180 | return canon_rtx (t); | |
1181 | } | |
1182 | ||
1183 | if (GET_CODE (x) == PLUS) | |
9ae8ffe7 JL |
1184 | { |
1185 | rtx x0 = canon_rtx (XEXP (x, 0)); | |
1186 | rtx x1 = canon_rtx (XEXP (x, 1)); | |
1187 | ||
1188 | if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1)) | |
1189 | { | |
9ae8ffe7 | 1190 | if (GET_CODE (x0) == CONST_INT) |
ed8908e7 | 1191 | return plus_constant (x1, INTVAL (x0)); |
9ae8ffe7 | 1192 | else if (GET_CODE (x1) == CONST_INT) |
ed8908e7 | 1193 | return plus_constant (x0, INTVAL (x1)); |
38a448ca | 1194 | return gen_rtx_PLUS (GET_MODE (x), x0, x1); |
9ae8ffe7 JL |
1195 | } |
1196 | } | |
d4b60170 | 1197 | |
9ae8ffe7 JL |
1198 | /* This gives us much better alias analysis when called from |
1199 | the loop optimizer. Note we want to leave the original | |
1200 | MEM alone, but need to return the canonicalized MEM with | |
1201 | all the flags with their original values. */ | |
3c0cb5de | 1202 | else if (MEM_P (x)) |
f1ec5147 | 1203 | x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0))); |
d4b60170 | 1204 | |
9ae8ffe7 JL |
1205 | return x; |
1206 | } | |
1207 | ||
1208 | /* Return 1 if X and Y are identical-looking rtx's. | |
45183e03 | 1209 | Expect that X and Y has been already canonicalized. |
9ae8ffe7 JL |
1210 | |
1211 | We use the data in reg_known_value above to see if two registers with | |
1212 | different numbers are, in fact, equivalent. */ | |
1213 | ||
1214 | static int | |
4682ae04 | 1215 | rtx_equal_for_memref_p (rtx x, rtx y) |
9ae8ffe7 | 1216 | { |
b3694847 SS |
1217 | int i; |
1218 | int j; | |
1219 | enum rtx_code code; | |
1220 | const char *fmt; | |
9ae8ffe7 JL |
1221 | |
1222 | if (x == 0 && y == 0) | |
1223 | return 1; | |
1224 | if (x == 0 || y == 0) | |
1225 | return 0; | |
d4b60170 | 1226 | |
9ae8ffe7 JL |
1227 | if (x == y) |
1228 | return 1; | |
1229 | ||
1230 | code = GET_CODE (x); | |
1231 | /* Rtx's of different codes cannot be equal. */ | |
1232 | if (code != GET_CODE (y)) | |
1233 | return 0; | |
1234 | ||
1235 | /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. | |
1236 | (REG:SI x) and (REG:HI x) are NOT equivalent. */ | |
1237 | ||
1238 | if (GET_MODE (x) != GET_MODE (y)) | |
1239 | return 0; | |
1240 | ||
db048faf MM |
1241 | /* Some RTL can be compared without a recursive examination. */ |
1242 | switch (code) | |
1243 | { | |
1244 | case REG: | |
1245 | return REGNO (x) == REGNO (y); | |
1246 | ||
1247 | case LABEL_REF: | |
1248 | return XEXP (x, 0) == XEXP (y, 0); | |
ca7fd9cd | 1249 | |
db048faf MM |
1250 | case SYMBOL_REF: |
1251 | return XSTR (x, 0) == XSTR (y, 0); | |
1252 | ||
40e02b4a | 1253 | case VALUE: |
db048faf MM |
1254 | case CONST_INT: |
1255 | case CONST_DOUBLE: | |
1256 | /* There's no need to compare the contents of CONST_DOUBLEs or | |
1257 | CONST_INTs because pointer equality is a good enough | |
1258 | comparison for these nodes. */ | |
1259 | return 0; | |
1260 | ||
db048faf MM |
1261 | default: |
1262 | break; | |
1263 | } | |
9ae8ffe7 | 1264 | |
45183e03 JH |
1265 | /* canon_rtx knows how to handle plus. No need to canonicalize. */ |
1266 | if (code == PLUS) | |
9ae8ffe7 JL |
1267 | return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)) |
1268 | && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1))) | |
1269 | || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1)) | |
1270 | && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0)))); | |
45183e03 JH |
1271 | /* For commutative operations, the RTX match if the operand match in any |
1272 | order. Also handle the simple binary and unary cases without a loop. */ | |
ec8e098d | 1273 | if (COMMUTATIVE_P (x)) |
45183e03 JH |
1274 | { |
1275 | rtx xop0 = canon_rtx (XEXP (x, 0)); | |
1276 | rtx yop0 = canon_rtx (XEXP (y, 0)); | |
1277 | rtx yop1 = canon_rtx (XEXP (y, 1)); | |
1278 | ||
1279 | return ((rtx_equal_for_memref_p (xop0, yop0) | |
1280 | && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1)) | |
1281 | || (rtx_equal_for_memref_p (xop0, yop1) | |
1282 | && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0))); | |
1283 | } | |
ec8e098d | 1284 | else if (NON_COMMUTATIVE_P (x)) |
45183e03 JH |
1285 | { |
1286 | return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)), | |
4682ae04 | 1287 | canon_rtx (XEXP (y, 0))) |
45183e03 JH |
1288 | && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), |
1289 | canon_rtx (XEXP (y, 1)))); | |
1290 | } | |
ec8e098d | 1291 | else if (UNARY_P (x)) |
45183e03 | 1292 | return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)), |
4682ae04 | 1293 | canon_rtx (XEXP (y, 0))); |
9ae8ffe7 JL |
1294 | |
1295 | /* Compare the elements. If any pair of corresponding elements | |
de12be17 JC |
1296 | fail to match, return 0 for the whole things. |
1297 | ||
1298 | Limit cases to types which actually appear in addresses. */ | |
9ae8ffe7 JL |
1299 | |
1300 | fmt = GET_RTX_FORMAT (code); | |
1301 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1302 | { | |
1303 | switch (fmt[i]) | |
1304 | { | |
9ae8ffe7 JL |
1305 | case 'i': |
1306 | if (XINT (x, i) != XINT (y, i)) | |
1307 | return 0; | |
1308 | break; | |
1309 | ||
9ae8ffe7 JL |
1310 | case 'E': |
1311 | /* Two vectors must have the same length. */ | |
1312 | if (XVECLEN (x, i) != XVECLEN (y, i)) | |
1313 | return 0; | |
1314 | ||
1315 | /* And the corresponding elements must match. */ | |
1316 | for (j = 0; j < XVECLEN (x, i); j++) | |
45183e03 JH |
1317 | if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)), |
1318 | canon_rtx (XVECEXP (y, i, j))) == 0) | |
9ae8ffe7 JL |
1319 | return 0; |
1320 | break; | |
1321 | ||
1322 | case 'e': | |
45183e03 JH |
1323 | if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)), |
1324 | canon_rtx (XEXP (y, i))) == 0) | |
9ae8ffe7 JL |
1325 | return 0; |
1326 | break; | |
1327 | ||
3237ac18 AH |
1328 | /* This can happen for asm operands. */ |
1329 | case 's': | |
1330 | if (strcmp (XSTR (x, i), XSTR (y, i))) | |
1331 | return 0; | |
1332 | break; | |
1333 | ||
aee21ba9 JL |
1334 | /* This can happen for an asm which clobbers memory. */ |
1335 | case '0': | |
1336 | break; | |
1337 | ||
9ae8ffe7 JL |
1338 | /* It is believed that rtx's at this level will never |
1339 | contain anything but integers and other rtx's, | |
1340 | except for within LABEL_REFs and SYMBOL_REFs. */ | |
1341 | default: | |
298e6adc | 1342 | gcc_unreachable (); |
9ae8ffe7 JL |
1343 | } |
1344 | } | |
1345 | return 1; | |
1346 | } | |
1347 | ||
1348 | /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within | |
1349 | X and return it, or return 0 if none found. */ | |
1350 | ||
1351 | static rtx | |
4682ae04 | 1352 | find_symbolic_term (rtx x) |
9ae8ffe7 | 1353 | { |
b3694847 SS |
1354 | int i; |
1355 | enum rtx_code code; | |
1356 | const char *fmt; | |
9ae8ffe7 JL |
1357 | |
1358 | code = GET_CODE (x); | |
1359 | if (code == SYMBOL_REF || code == LABEL_REF) | |
1360 | return x; | |
ec8e098d | 1361 | if (OBJECT_P (x)) |
9ae8ffe7 JL |
1362 | return 0; |
1363 | ||
1364 | fmt = GET_RTX_FORMAT (code); | |
1365 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1366 | { | |
1367 | rtx t; | |
1368 | ||
1369 | if (fmt[i] == 'e') | |
1370 | { | |
1371 | t = find_symbolic_term (XEXP (x, i)); | |
1372 | if (t != 0) | |
1373 | return t; | |
1374 | } | |
1375 | else if (fmt[i] == 'E') | |
1376 | break; | |
1377 | } | |
1378 | return 0; | |
1379 | } | |
1380 | ||
94f24ddc | 1381 | rtx |
4682ae04 | 1382 | find_base_term (rtx x) |
9ae8ffe7 | 1383 | { |
eab5c70a BS |
1384 | cselib_val *val; |
1385 | struct elt_loc_list *l; | |
1386 | ||
b949ea8b JW |
1387 | #if defined (FIND_BASE_TERM) |
1388 | /* Try machine-dependent ways to find the base term. */ | |
1389 | x = FIND_BASE_TERM (x); | |
1390 | #endif | |
1391 | ||
9ae8ffe7 JL |
1392 | switch (GET_CODE (x)) |
1393 | { | |
1394 | case REG: | |
1395 | return REG_BASE_VALUE (x); | |
1396 | ||
d288e53d DE |
1397 | case TRUNCATE: |
1398 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode)) | |
ca7fd9cd | 1399 | return 0; |
d288e53d | 1400 | /* Fall through. */ |
9ae8ffe7 | 1401 | case HIGH: |
6d849a2a JL |
1402 | case PRE_INC: |
1403 | case PRE_DEC: | |
1404 | case POST_INC: | |
1405 | case POST_DEC: | |
d288e53d DE |
1406 | case PRE_MODIFY: |
1407 | case POST_MODIFY: | |
6d849a2a JL |
1408 | return find_base_term (XEXP (x, 0)); |
1409 | ||
1abade85 RK |
1410 | case ZERO_EXTEND: |
1411 | case SIGN_EXTEND: /* Used for Alpha/NT pointers */ | |
1412 | { | |
1413 | rtx temp = find_base_term (XEXP (x, 0)); | |
1414 | ||
5ae6cd0d | 1415 | if (temp != 0 && CONSTANT_P (temp)) |
1abade85 | 1416 | temp = convert_memory_address (Pmode, temp); |
1abade85 RK |
1417 | |
1418 | return temp; | |
1419 | } | |
1420 | ||
eab5c70a BS |
1421 | case VALUE: |
1422 | val = CSELIB_VAL_PTR (x); | |
40e02b4a JH |
1423 | if (!val) |
1424 | return 0; | |
eab5c70a BS |
1425 | for (l = val->locs; l; l = l->next) |
1426 | if ((x = find_base_term (l->loc)) != 0) | |
1427 | return x; | |
1428 | return 0; | |
1429 | ||
9ae8ffe7 JL |
1430 | case CONST: |
1431 | x = XEXP (x, 0); | |
1432 | if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS) | |
1433 | return 0; | |
938d968e | 1434 | /* Fall through. */ |
9ae8ffe7 JL |
1435 | case LO_SUM: |
1436 | case PLUS: | |
1437 | case MINUS: | |
1438 | { | |
3c567fae JL |
1439 | rtx tmp1 = XEXP (x, 0); |
1440 | rtx tmp2 = XEXP (x, 1); | |
1441 | ||
f5143c46 | 1442 | /* This is a little bit tricky since we have to determine which of |
3c567fae JL |
1443 | the two operands represents the real base address. Otherwise this |
1444 | routine may return the index register instead of the base register. | |
1445 | ||
1446 | That may cause us to believe no aliasing was possible, when in | |
1447 | fact aliasing is possible. | |
1448 | ||
1449 | We use a few simple tests to guess the base register. Additional | |
1450 | tests can certainly be added. For example, if one of the operands | |
1451 | is a shift or multiply, then it must be the index register and the | |
1452 | other operand is the base register. */ | |
ca7fd9cd | 1453 | |
b949ea8b JW |
1454 | if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2)) |
1455 | return find_base_term (tmp2); | |
1456 | ||
3c567fae JL |
1457 | /* If either operand is known to be a pointer, then use it |
1458 | to determine the base term. */ | |
3502dc9c | 1459 | if (REG_P (tmp1) && REG_POINTER (tmp1)) |
3c567fae JL |
1460 | return find_base_term (tmp1); |
1461 | ||
3502dc9c | 1462 | if (REG_P (tmp2) && REG_POINTER (tmp2)) |
3c567fae JL |
1463 | return find_base_term (tmp2); |
1464 | ||
1465 | /* Neither operand was known to be a pointer. Go ahead and find the | |
1466 | base term for both operands. */ | |
1467 | tmp1 = find_base_term (tmp1); | |
1468 | tmp2 = find_base_term (tmp2); | |
1469 | ||
1470 | /* If either base term is named object or a special address | |
1471 | (like an argument or stack reference), then use it for the | |
1472 | base term. */ | |
d4b60170 | 1473 | if (tmp1 != 0 |
3c567fae JL |
1474 | && (GET_CODE (tmp1) == SYMBOL_REF |
1475 | || GET_CODE (tmp1) == LABEL_REF | |
1476 | || (GET_CODE (tmp1) == ADDRESS | |
1477 | && GET_MODE (tmp1) != VOIDmode))) | |
1478 | return tmp1; | |
1479 | ||
d4b60170 | 1480 | if (tmp2 != 0 |
3c567fae JL |
1481 | && (GET_CODE (tmp2) == SYMBOL_REF |
1482 | || GET_CODE (tmp2) == LABEL_REF | |
1483 | || (GET_CODE (tmp2) == ADDRESS | |
1484 | && GET_MODE (tmp2) != VOIDmode))) | |
1485 | return tmp2; | |
1486 | ||
1487 | /* We could not determine which of the two operands was the | |
1488 | base register and which was the index. So we can determine | |
1489 | nothing from the base alias check. */ | |
1490 | return 0; | |
9ae8ffe7 JL |
1491 | } |
1492 | ||
1493 | case AND: | |
d288e53d DE |
1494 | if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0) |
1495 | return find_base_term (XEXP (x, 0)); | |
9ae8ffe7 JL |
1496 | return 0; |
1497 | ||
1498 | case SYMBOL_REF: | |
1499 | case LABEL_REF: | |
1500 | return x; | |
1501 | ||
1502 | default: | |
1503 | return 0; | |
1504 | } | |
1505 | } | |
1506 | ||
1507 | /* Return 0 if the addresses X and Y are known to point to different | |
1508 | objects, 1 if they might be pointers to the same object. */ | |
1509 | ||
1510 | static int | |
4682ae04 AJ |
1511 | base_alias_check (rtx x, rtx y, enum machine_mode x_mode, |
1512 | enum machine_mode y_mode) | |
9ae8ffe7 JL |
1513 | { |
1514 | rtx x_base = find_base_term (x); | |
1515 | rtx y_base = find_base_term (y); | |
1516 | ||
1c72c7f6 JC |
1517 | /* If the address itself has no known base see if a known equivalent |
1518 | value has one. If either address still has no known base, nothing | |
1519 | is known about aliasing. */ | |
1520 | if (x_base == 0) | |
1521 | { | |
1522 | rtx x_c; | |
d4b60170 | 1523 | |
1c72c7f6 JC |
1524 | if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x) |
1525 | return 1; | |
d4b60170 | 1526 | |
1c72c7f6 JC |
1527 | x_base = find_base_term (x_c); |
1528 | if (x_base == 0) | |
1529 | return 1; | |
1530 | } | |
9ae8ffe7 | 1531 | |
1c72c7f6 JC |
1532 | if (y_base == 0) |
1533 | { | |
1534 | rtx y_c; | |
1535 | if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y) | |
1536 | return 1; | |
d4b60170 | 1537 | |
1c72c7f6 JC |
1538 | y_base = find_base_term (y_c); |
1539 | if (y_base == 0) | |
1540 | return 1; | |
1541 | } | |
1542 | ||
1543 | /* If the base addresses are equal nothing is known about aliasing. */ | |
1544 | if (rtx_equal_p (x_base, y_base)) | |
9ae8ffe7 JL |
1545 | return 1; |
1546 | ||
ca7fd9cd | 1547 | /* The base addresses of the read and write are different expressions. |
56ee9281 RH |
1548 | If they are both symbols and they are not accessed via AND, there is |
1549 | no conflict. We can bring knowledge of object alignment into play | |
1550 | here. For example, on alpha, "char a, b;" can alias one another, | |
1551 | though "char a; long b;" cannot. */ | |
9ae8ffe7 | 1552 | if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS) |
c02f035f | 1553 | { |
56ee9281 RH |
1554 | if (GET_CODE (x) == AND && GET_CODE (y) == AND) |
1555 | return 1; | |
1556 | if (GET_CODE (x) == AND | |
1557 | && (GET_CODE (XEXP (x, 1)) != CONST_INT | |
8fa2140d | 1558 | || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1)))) |
56ee9281 RH |
1559 | return 1; |
1560 | if (GET_CODE (y) == AND | |
1561 | && (GET_CODE (XEXP (y, 1)) != CONST_INT | |
8fa2140d | 1562 | || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1)))) |
56ee9281 | 1563 | return 1; |
b2972551 JL |
1564 | /* Differing symbols never alias. */ |
1565 | return 0; | |
c02f035f | 1566 | } |
9ae8ffe7 JL |
1567 | |
1568 | /* If one address is a stack reference there can be no alias: | |
1569 | stack references using different base registers do not alias, | |
1570 | a stack reference can not alias a parameter, and a stack reference | |
1571 | can not alias a global. */ | |
1572 | if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode) | |
1573 | || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode)) | |
1574 | return 0; | |
1575 | ||
1576 | if (! flag_argument_noalias) | |
1577 | return 1; | |
1578 | ||
1579 | if (flag_argument_noalias > 1) | |
1580 | return 0; | |
1581 | ||
ec5c56db | 1582 | /* Weak noalias assertion (arguments are distinct, but may match globals). */ |
9ae8ffe7 JL |
1583 | return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode); |
1584 | } | |
1585 | ||
eab5c70a BS |
1586 | /* Convert the address X into something we can use. This is done by returning |
1587 | it unchanged unless it is a value; in the latter case we call cselib to get | |
1588 | a more useful rtx. */ | |
3bdf5ad1 | 1589 | |
a13d4ebf | 1590 | rtx |
4682ae04 | 1591 | get_addr (rtx x) |
eab5c70a BS |
1592 | { |
1593 | cselib_val *v; | |
1594 | struct elt_loc_list *l; | |
1595 | ||
1596 | if (GET_CODE (x) != VALUE) | |
1597 | return x; | |
1598 | v = CSELIB_VAL_PTR (x); | |
40e02b4a JH |
1599 | if (v) |
1600 | { | |
1601 | for (l = v->locs; l; l = l->next) | |
1602 | if (CONSTANT_P (l->loc)) | |
1603 | return l->loc; | |
1604 | for (l = v->locs; l; l = l->next) | |
3c0cb5de | 1605 | if (!REG_P (l->loc) && !MEM_P (l->loc)) |
40e02b4a JH |
1606 | return l->loc; |
1607 | if (v->locs) | |
1608 | return v->locs->loc; | |
1609 | } | |
eab5c70a BS |
1610 | return x; |
1611 | } | |
1612 | ||
39cec1ac MH |
1613 | /* Return the address of the (N_REFS + 1)th memory reference to ADDR |
1614 | where SIZE is the size in bytes of the memory reference. If ADDR | |
1615 | is not modified by the memory reference then ADDR is returned. */ | |
1616 | ||
04e2b4d3 | 1617 | static rtx |
4682ae04 | 1618 | addr_side_effect_eval (rtx addr, int size, int n_refs) |
39cec1ac MH |
1619 | { |
1620 | int offset = 0; | |
ca7fd9cd | 1621 | |
39cec1ac MH |
1622 | switch (GET_CODE (addr)) |
1623 | { | |
1624 | case PRE_INC: | |
1625 | offset = (n_refs + 1) * size; | |
1626 | break; | |
1627 | case PRE_DEC: | |
1628 | offset = -(n_refs + 1) * size; | |
1629 | break; | |
1630 | case POST_INC: | |
1631 | offset = n_refs * size; | |
1632 | break; | |
1633 | case POST_DEC: | |
1634 | offset = -n_refs * size; | |
1635 | break; | |
1636 | ||
1637 | default: | |
1638 | return addr; | |
1639 | } | |
ca7fd9cd | 1640 | |
39cec1ac | 1641 | if (offset) |
45183e03 | 1642 | addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), |
c22cacf3 | 1643 | GEN_INT (offset)); |
39cec1ac MH |
1644 | else |
1645 | addr = XEXP (addr, 0); | |
45183e03 | 1646 | addr = canon_rtx (addr); |
39cec1ac MH |
1647 | |
1648 | return addr; | |
1649 | } | |
1650 | ||
9ae8ffe7 JL |
1651 | /* Return nonzero if X and Y (memory addresses) could reference the |
1652 | same location in memory. C is an offset accumulator. When | |
1653 | C is nonzero, we are testing aliases between X and Y + C. | |
1654 | XSIZE is the size in bytes of the X reference, | |
1655 | similarly YSIZE is the size in bytes for Y. | |
45183e03 | 1656 | Expect that canon_rtx has been already called for X and Y. |
9ae8ffe7 JL |
1657 | |
1658 | If XSIZE or YSIZE is zero, we do not know the amount of memory being | |
1659 | referenced (the reference was BLKmode), so make the most pessimistic | |
1660 | assumptions. | |
1661 | ||
c02f035f RH |
1662 | If XSIZE or YSIZE is negative, we may access memory outside the object |
1663 | being referenced as a side effect. This can happen when using AND to | |
1664 | align memory references, as is done on the Alpha. | |
1665 | ||
9ae8ffe7 | 1666 | Nice to notice that varying addresses cannot conflict with fp if no |
0211b6ab | 1667 | local variables had their addresses taken, but that's too hard now. */ |
9ae8ffe7 | 1668 | |
9ae8ffe7 | 1669 | static int |
4682ae04 | 1670 | memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c) |
9ae8ffe7 | 1671 | { |
eab5c70a BS |
1672 | if (GET_CODE (x) == VALUE) |
1673 | x = get_addr (x); | |
1674 | if (GET_CODE (y) == VALUE) | |
1675 | y = get_addr (y); | |
9ae8ffe7 JL |
1676 | if (GET_CODE (x) == HIGH) |
1677 | x = XEXP (x, 0); | |
1678 | else if (GET_CODE (x) == LO_SUM) | |
1679 | x = XEXP (x, 1); | |
1680 | else | |
45183e03 | 1681 | x = addr_side_effect_eval (x, xsize, 0); |
9ae8ffe7 JL |
1682 | if (GET_CODE (y) == HIGH) |
1683 | y = XEXP (y, 0); | |
1684 | else if (GET_CODE (y) == LO_SUM) | |
1685 | y = XEXP (y, 1); | |
1686 | else | |
45183e03 | 1687 | y = addr_side_effect_eval (y, ysize, 0); |
9ae8ffe7 JL |
1688 | |
1689 | if (rtx_equal_for_memref_p (x, y)) | |
1690 | { | |
c02f035f | 1691 | if (xsize <= 0 || ysize <= 0) |
9ae8ffe7 JL |
1692 | return 1; |
1693 | if (c >= 0 && xsize > c) | |
1694 | return 1; | |
1695 | if (c < 0 && ysize+c > 0) | |
1696 | return 1; | |
1697 | return 0; | |
1698 | } | |
1699 | ||
6e73e666 JC |
1700 | /* This code used to check for conflicts involving stack references and |
1701 | globals but the base address alias code now handles these cases. */ | |
9ae8ffe7 JL |
1702 | |
1703 | if (GET_CODE (x) == PLUS) | |
1704 | { | |
1705 | /* The fact that X is canonicalized means that this | |
1706 | PLUS rtx is canonicalized. */ | |
1707 | rtx x0 = XEXP (x, 0); | |
1708 | rtx x1 = XEXP (x, 1); | |
1709 | ||
1710 | if (GET_CODE (y) == PLUS) | |
1711 | { | |
1712 | /* The fact that Y is canonicalized means that this | |
1713 | PLUS rtx is canonicalized. */ | |
1714 | rtx y0 = XEXP (y, 0); | |
1715 | rtx y1 = XEXP (y, 1); | |
1716 | ||
1717 | if (rtx_equal_for_memref_p (x1, y1)) | |
1718 | return memrefs_conflict_p (xsize, x0, ysize, y0, c); | |
1719 | if (rtx_equal_for_memref_p (x0, y0)) | |
1720 | return memrefs_conflict_p (xsize, x1, ysize, y1, c); | |
1721 | if (GET_CODE (x1) == CONST_INT) | |
63be02db JM |
1722 | { |
1723 | if (GET_CODE (y1) == CONST_INT) | |
1724 | return memrefs_conflict_p (xsize, x0, ysize, y0, | |
1725 | c - INTVAL (x1) + INTVAL (y1)); | |
1726 | else | |
1727 | return memrefs_conflict_p (xsize, x0, ysize, y, | |
1728 | c - INTVAL (x1)); | |
1729 | } | |
9ae8ffe7 JL |
1730 | else if (GET_CODE (y1) == CONST_INT) |
1731 | return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); | |
1732 | ||
6e73e666 | 1733 | return 1; |
9ae8ffe7 JL |
1734 | } |
1735 | else if (GET_CODE (x1) == CONST_INT) | |
1736 | return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1)); | |
1737 | } | |
1738 | else if (GET_CODE (y) == PLUS) | |
1739 | { | |
1740 | /* The fact that Y is canonicalized means that this | |
1741 | PLUS rtx is canonicalized. */ | |
1742 | rtx y0 = XEXP (y, 0); | |
1743 | rtx y1 = XEXP (y, 1); | |
1744 | ||
1745 | if (GET_CODE (y1) == CONST_INT) | |
1746 | return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); | |
1747 | else | |
1748 | return 1; | |
1749 | } | |
1750 | ||
1751 | if (GET_CODE (x) == GET_CODE (y)) | |
1752 | switch (GET_CODE (x)) | |
1753 | { | |
1754 | case MULT: | |
1755 | { | |
1756 | /* Handle cases where we expect the second operands to be the | |
1757 | same, and check only whether the first operand would conflict | |
1758 | or not. */ | |
1759 | rtx x0, y0; | |
1760 | rtx x1 = canon_rtx (XEXP (x, 1)); | |
1761 | rtx y1 = canon_rtx (XEXP (y, 1)); | |
1762 | if (! rtx_equal_for_memref_p (x1, y1)) | |
1763 | return 1; | |
1764 | x0 = canon_rtx (XEXP (x, 0)); | |
1765 | y0 = canon_rtx (XEXP (y, 0)); | |
1766 | if (rtx_equal_for_memref_p (x0, y0)) | |
1767 | return (xsize == 0 || ysize == 0 | |
1768 | || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); | |
1769 | ||
1770 | /* Can't properly adjust our sizes. */ | |
1771 | if (GET_CODE (x1) != CONST_INT) | |
1772 | return 1; | |
1773 | xsize /= INTVAL (x1); | |
1774 | ysize /= INTVAL (x1); | |
1775 | c /= INTVAL (x1); | |
1776 | return memrefs_conflict_p (xsize, x0, ysize, y0, c); | |
1777 | } | |
1d300e19 KG |
1778 | |
1779 | default: | |
1780 | break; | |
9ae8ffe7 JL |
1781 | } |
1782 | ||
1783 | /* Treat an access through an AND (e.g. a subword access on an Alpha) | |
ca7fd9cd | 1784 | as an access with indeterminate size. Assume that references |
56ee9281 RH |
1785 | besides AND are aligned, so if the size of the other reference is |
1786 | at least as large as the alignment, assume no other overlap. */ | |
9ae8ffe7 | 1787 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT) |
56ee9281 | 1788 | { |
02e3377d | 1789 | if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1))) |
56ee9281 | 1790 | xsize = -1; |
45183e03 | 1791 | return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c); |
56ee9281 | 1792 | } |
9ae8ffe7 | 1793 | if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT) |
c02f035f | 1794 | { |
56ee9281 | 1795 | /* ??? If we are indexing far enough into the array/structure, we |
ca7fd9cd | 1796 | may yet be able to determine that we can not overlap. But we |
c02f035f | 1797 | also need to that we are far enough from the end not to overlap |
56ee9281 | 1798 | a following reference, so we do nothing with that for now. */ |
02e3377d | 1799 | if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1))) |
56ee9281 | 1800 | ysize = -1; |
45183e03 | 1801 | return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c); |
c02f035f | 1802 | } |
9ae8ffe7 JL |
1803 | |
1804 | if (CONSTANT_P (x)) | |
1805 | { | |
1806 | if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT) | |
1807 | { | |
1808 | c += (INTVAL (y) - INTVAL (x)); | |
c02f035f | 1809 | return (xsize <= 0 || ysize <= 0 |
9ae8ffe7 JL |
1810 | || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); |
1811 | } | |
1812 | ||
1813 | if (GET_CODE (x) == CONST) | |
1814 | { | |
1815 | if (GET_CODE (y) == CONST) | |
1816 | return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), | |
1817 | ysize, canon_rtx (XEXP (y, 0)), c); | |
1818 | else | |
1819 | return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), | |
1820 | ysize, y, c); | |
1821 | } | |
1822 | if (GET_CODE (y) == CONST) | |
1823 | return memrefs_conflict_p (xsize, x, ysize, | |
1824 | canon_rtx (XEXP (y, 0)), c); | |
1825 | ||
1826 | if (CONSTANT_P (y)) | |
b949ea8b | 1827 | return (xsize <= 0 || ysize <= 0 |
c02f035f | 1828 | || (rtx_equal_for_memref_p (x, y) |
b949ea8b | 1829 | && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)))); |
9ae8ffe7 JL |
1830 | |
1831 | return 1; | |
1832 | } | |
1833 | return 1; | |
1834 | } | |
1835 | ||
1836 | /* Functions to compute memory dependencies. | |
1837 | ||
1838 | Since we process the insns in execution order, we can build tables | |
1839 | to keep track of what registers are fixed (and not aliased), what registers | |
1840 | are varying in known ways, and what registers are varying in unknown | |
1841 | ways. | |
1842 | ||
1843 | If both memory references are volatile, then there must always be a | |
1844 | dependence between the two references, since their order can not be | |
1845 | changed. A volatile and non-volatile reference can be interchanged | |
ca7fd9cd | 1846 | though. |
9ae8ffe7 | 1847 | |
dc1618bc RK |
1848 | A MEM_IN_STRUCT reference at a non-AND varying address can never |
1849 | conflict with a non-MEM_IN_STRUCT reference at a fixed address. We | |
1850 | also must allow AND addresses, because they may generate accesses | |
1851 | outside the object being referenced. This is used to generate | |
1852 | aligned addresses from unaligned addresses, for instance, the alpha | |
1853 | storeqi_unaligned pattern. */ | |
9ae8ffe7 JL |
1854 | |
1855 | /* Read dependence: X is read after read in MEM takes place. There can | |
1856 | only be a dependence here if both reads are volatile. */ | |
1857 | ||
1858 | int | |
4682ae04 | 1859 | read_dependence (rtx mem, rtx x) |
9ae8ffe7 JL |
1860 | { |
1861 | return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem); | |
1862 | } | |
1863 | ||
c6df88cb MM |
1864 | /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and |
1865 | MEM2 is a reference to a structure at a varying address, or returns | |
1866 | MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL | |
1867 | value is returned MEM1 and MEM2 can never alias. VARIES_P is used | |
1868 | to decide whether or not an address may vary; it should return | |
eab5c70a BS |
1869 | nonzero whenever variation is possible. |
1870 | MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */ | |
ca7fd9cd | 1871 | |
2c72b78f | 1872 | static rtx |
4682ae04 AJ |
1873 | fixed_scalar_and_varying_struct_p (rtx mem1, rtx mem2, rtx mem1_addr, |
1874 | rtx mem2_addr, | |
1875 | int (*varies_p) (rtx, int)) | |
ca7fd9cd | 1876 | { |
3e0abe15 GK |
1877 | if (! flag_strict_aliasing) |
1878 | return NULL_RTX; | |
1879 | ||
afa8f0fb AP |
1880 | if (MEM_ALIAS_SET (mem2) |
1881 | && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2) | |
e38fe8e0 | 1882 | && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1)) |
c6df88cb MM |
1883 | /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a |
1884 | varying address. */ | |
1885 | return mem1; | |
1886 | ||
afa8f0fb AP |
1887 | if (MEM_ALIAS_SET (mem1) |
1888 | && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2) | |
e38fe8e0 | 1889 | && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1)) |
c6df88cb MM |
1890 | /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a |
1891 | varying address. */ | |
1892 | return mem2; | |
1893 | ||
1894 | return NULL_RTX; | |
1895 | } | |
1896 | ||
1897 | /* Returns nonzero if something about the mode or address format MEM1 | |
1898 | indicates that it might well alias *anything*. */ | |
1899 | ||
2c72b78f | 1900 | static int |
4682ae04 | 1901 | aliases_everything_p (rtx mem) |
c6df88cb | 1902 | { |
c6df88cb | 1903 | if (GET_CODE (XEXP (mem, 0)) == AND) |
35fd3193 | 1904 | /* If the address is an AND, it's very hard to know at what it is |
c6df88cb MM |
1905 | actually pointing. */ |
1906 | return 1; | |
ca7fd9cd | 1907 | |
c6df88cb MM |
1908 | return 0; |
1909 | } | |
1910 | ||
998d7deb RH |
1911 | /* Return true if we can determine that the fields referenced cannot |
1912 | overlap for any pair of objects. */ | |
1913 | ||
1914 | static bool | |
4682ae04 | 1915 | nonoverlapping_component_refs_p (tree x, tree y) |
998d7deb RH |
1916 | { |
1917 | tree fieldx, fieldy, typex, typey, orig_y; | |
1918 | ||
1919 | do | |
1920 | { | |
1921 | /* The comparison has to be done at a common type, since we don't | |
d6a7951f | 1922 | know how the inheritance hierarchy works. */ |
998d7deb RH |
1923 | orig_y = y; |
1924 | do | |
1925 | { | |
1926 | fieldx = TREE_OPERAND (x, 1); | |
c05a0766 | 1927 | typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx)); |
998d7deb RH |
1928 | |
1929 | y = orig_y; | |
1930 | do | |
1931 | { | |
1932 | fieldy = TREE_OPERAND (y, 1); | |
c05a0766 | 1933 | typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy)); |
998d7deb RH |
1934 | |
1935 | if (typex == typey) | |
1936 | goto found; | |
1937 | ||
1938 | y = TREE_OPERAND (y, 0); | |
1939 | } | |
1940 | while (y && TREE_CODE (y) == COMPONENT_REF); | |
1941 | ||
1942 | x = TREE_OPERAND (x, 0); | |
1943 | } | |
1944 | while (x && TREE_CODE (x) == COMPONENT_REF); | |
998d7deb | 1945 | /* Never found a common type. */ |
c05a0766 | 1946 | return false; |
998d7deb RH |
1947 | |
1948 | found: | |
1949 | /* If we're left with accessing different fields of a structure, | |
1950 | then no overlap. */ | |
1951 | if (TREE_CODE (typex) == RECORD_TYPE | |
1952 | && fieldx != fieldy) | |
1953 | return true; | |
1954 | ||
1955 | /* The comparison on the current field failed. If we're accessing | |
1956 | a very nested structure, look at the next outer level. */ | |
1957 | x = TREE_OPERAND (x, 0); | |
1958 | y = TREE_OPERAND (y, 0); | |
1959 | } | |
1960 | while (x && y | |
1961 | && TREE_CODE (x) == COMPONENT_REF | |
1962 | && TREE_CODE (y) == COMPONENT_REF); | |
ca7fd9cd | 1963 | |
998d7deb RH |
1964 | return false; |
1965 | } | |
1966 | ||
1967 | /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */ | |
1968 | ||
1969 | static tree | |
4682ae04 | 1970 | decl_for_component_ref (tree x) |
998d7deb RH |
1971 | { |
1972 | do | |
1973 | { | |
1974 | x = TREE_OPERAND (x, 0); | |
1975 | } | |
1976 | while (x && TREE_CODE (x) == COMPONENT_REF); | |
1977 | ||
1978 | return x && DECL_P (x) ? x : NULL_TREE; | |
1979 | } | |
1980 | ||
1981 | /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the | |
1982 | offset of the field reference. */ | |
1983 | ||
1984 | static rtx | |
4682ae04 | 1985 | adjust_offset_for_component_ref (tree x, rtx offset) |
998d7deb RH |
1986 | { |
1987 | HOST_WIDE_INT ioffset; | |
1988 | ||
1989 | if (! offset) | |
1990 | return NULL_RTX; | |
1991 | ||
1992 | ioffset = INTVAL (offset); | |
ca7fd9cd | 1993 | do |
998d7deb | 1994 | { |
44de5aeb | 1995 | tree offset = component_ref_field_offset (x); |
998d7deb RH |
1996 | tree field = TREE_OPERAND (x, 1); |
1997 | ||
44de5aeb | 1998 | if (! host_integerp (offset, 1)) |
998d7deb | 1999 | return NULL_RTX; |
44de5aeb | 2000 | ioffset += (tree_low_cst (offset, 1) |
998d7deb RH |
2001 | + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1) |
2002 | / BITS_PER_UNIT)); | |
2003 | ||
2004 | x = TREE_OPERAND (x, 0); | |
2005 | } | |
2006 | while (x && TREE_CODE (x) == COMPONENT_REF); | |
2007 | ||
2008 | return GEN_INT (ioffset); | |
2009 | } | |
2010 | ||
95bd1dd7 | 2011 | /* Return nonzero if we can determine the exprs corresponding to memrefs |
a4311dfe RK |
2012 | X and Y and they do not overlap. */ |
2013 | ||
2014 | static int | |
4682ae04 | 2015 | nonoverlapping_memrefs_p (rtx x, rtx y) |
a4311dfe | 2016 | { |
998d7deb | 2017 | tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y); |
a4311dfe RK |
2018 | rtx rtlx, rtly; |
2019 | rtx basex, basey; | |
998d7deb | 2020 | rtx moffsetx, moffsety; |
a4311dfe RK |
2021 | HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem; |
2022 | ||
998d7deb RH |
2023 | /* Unless both have exprs, we can't tell anything. */ |
2024 | if (exprx == 0 || expry == 0) | |
2025 | return 0; | |
c22cacf3 | 2026 | |
998d7deb RH |
2027 | /* If both are field references, we may be able to determine something. */ |
2028 | if (TREE_CODE (exprx) == COMPONENT_REF | |
2029 | && TREE_CODE (expry) == COMPONENT_REF | |
2030 | && nonoverlapping_component_refs_p (exprx, expry)) | |
2031 | return 1; | |
2032 | ||
c22cacf3 | 2033 | |
998d7deb RH |
2034 | /* If the field reference test failed, look at the DECLs involved. */ |
2035 | moffsetx = MEM_OFFSET (x); | |
2036 | if (TREE_CODE (exprx) == COMPONENT_REF) | |
2037 | { | |
ea900239 DB |
2038 | if (TREE_CODE (expry) == VAR_DECL |
2039 | && POINTER_TYPE_P (TREE_TYPE (expry))) | |
2040 | { | |
2041 | tree field = TREE_OPERAND (exprx, 1); | |
2042 | tree fieldcontext = DECL_FIELD_CONTEXT (field); | |
2043 | if (ipa_type_escape_field_does_not_clobber_p (fieldcontext, | |
2044 | TREE_TYPE (field))) | |
c22cacf3 | 2045 | return 1; |
ea900239 DB |
2046 | } |
2047 | { | |
2048 | tree t = decl_for_component_ref (exprx); | |
2049 | if (! t) | |
2050 | return 0; | |
2051 | moffsetx = adjust_offset_for_component_ref (exprx, moffsetx); | |
2052 | exprx = t; | |
2053 | } | |
998d7deb | 2054 | } |
1b096a0a | 2055 | else if (INDIRECT_REF_P (exprx)) |
c67a1cf6 RH |
2056 | { |
2057 | exprx = TREE_OPERAND (exprx, 0); | |
2058 | if (flag_argument_noalias < 2 | |
2059 | || TREE_CODE (exprx) != PARM_DECL) | |
2060 | return 0; | |
2061 | } | |
2062 | ||
998d7deb RH |
2063 | moffsety = MEM_OFFSET (y); |
2064 | if (TREE_CODE (expry) == COMPONENT_REF) | |
2065 | { | |
ea900239 DB |
2066 | if (TREE_CODE (exprx) == VAR_DECL |
2067 | && POINTER_TYPE_P (TREE_TYPE (exprx))) | |
2068 | { | |
2069 | tree field = TREE_OPERAND (expry, 1); | |
2070 | tree fieldcontext = DECL_FIELD_CONTEXT (field); | |
2071 | if (ipa_type_escape_field_does_not_clobber_p (fieldcontext, | |
2072 | TREE_TYPE (field))) | |
c22cacf3 | 2073 | return 1; |
ea900239 DB |
2074 | } |
2075 | { | |
2076 | tree t = decl_for_component_ref (expry); | |
2077 | if (! t) | |
2078 | return 0; | |
2079 | moffsety = adjust_offset_for_component_ref (expry, moffsety); | |
2080 | expry = t; | |
2081 | } | |
998d7deb | 2082 | } |
1b096a0a | 2083 | else if (INDIRECT_REF_P (expry)) |
c67a1cf6 RH |
2084 | { |
2085 | expry = TREE_OPERAND (expry, 0); | |
2086 | if (flag_argument_noalias < 2 | |
2087 | || TREE_CODE (expry) != PARM_DECL) | |
2088 | return 0; | |
2089 | } | |
998d7deb RH |
2090 | |
2091 | if (! DECL_P (exprx) || ! DECL_P (expry)) | |
a4311dfe RK |
2092 | return 0; |
2093 | ||
998d7deb RH |
2094 | rtlx = DECL_RTL (exprx); |
2095 | rtly = DECL_RTL (expry); | |
a4311dfe | 2096 | |
1edcd60b RK |
2097 | /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they |
2098 | can't overlap unless they are the same because we never reuse that part | |
2099 | of the stack frame used for locals for spilled pseudos. */ | |
3c0cb5de | 2100 | if ((!MEM_P (rtlx) || !MEM_P (rtly)) |
1edcd60b | 2101 | && ! rtx_equal_p (rtlx, rtly)) |
a4311dfe RK |
2102 | return 1; |
2103 | ||
2104 | /* Get the base and offsets of both decls. If either is a register, we | |
2105 | know both are and are the same, so use that as the base. The only | |
2106 | we can avoid overlap is if we can deduce that they are nonoverlapping | |
2107 | pieces of that decl, which is very rare. */ | |
3c0cb5de | 2108 | basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx; |
a4311dfe RK |
2109 | if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT) |
2110 | offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0); | |
2111 | ||
3c0cb5de | 2112 | basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly; |
a4311dfe RK |
2113 | if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT) |
2114 | offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0); | |
2115 | ||
d746694a | 2116 | /* If the bases are different, we know they do not overlap if both |
ca7fd9cd | 2117 | are constants or if one is a constant and the other a pointer into the |
d746694a RK |
2118 | stack frame. Otherwise a different base means we can't tell if they |
2119 | overlap or not. */ | |
2120 | if (! rtx_equal_p (basex, basey)) | |
ca7fd9cd KH |
2121 | return ((CONSTANT_P (basex) && CONSTANT_P (basey)) |
2122 | || (CONSTANT_P (basex) && REG_P (basey) | |
2123 | && REGNO_PTR_FRAME_P (REGNO (basey))) | |
2124 | || (CONSTANT_P (basey) && REG_P (basex) | |
2125 | && REGNO_PTR_FRAME_P (REGNO (basex)))); | |
a4311dfe | 2126 | |
3c0cb5de | 2127 | sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx)) |
a4311dfe RK |
2128 | : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx)) |
2129 | : -1); | |
3c0cb5de | 2130 | sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly)) |
a4311dfe RK |
2131 | : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) : |
2132 | -1); | |
2133 | ||
0af5bc3e RK |
2134 | /* If we have an offset for either memref, it can update the values computed |
2135 | above. */ | |
998d7deb RH |
2136 | if (moffsetx) |
2137 | offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx); | |
2138 | if (moffsety) | |
2139 | offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety); | |
a4311dfe | 2140 | |
0af5bc3e | 2141 | /* If a memref has both a size and an offset, we can use the smaller size. |
efc981bb | 2142 | We can't do this if the offset isn't known because we must view this |
0af5bc3e | 2143 | memref as being anywhere inside the DECL's MEM. */ |
998d7deb | 2144 | if (MEM_SIZE (x) && moffsetx) |
a4311dfe | 2145 | sizex = INTVAL (MEM_SIZE (x)); |
998d7deb | 2146 | if (MEM_SIZE (y) && moffsety) |
a4311dfe RK |
2147 | sizey = INTVAL (MEM_SIZE (y)); |
2148 | ||
2149 | /* Put the values of the memref with the lower offset in X's values. */ | |
2150 | if (offsetx > offsety) | |
2151 | { | |
2152 | tem = offsetx, offsetx = offsety, offsety = tem; | |
2153 | tem = sizex, sizex = sizey, sizey = tem; | |
2154 | } | |
2155 | ||
2156 | /* If we don't know the size of the lower-offset value, we can't tell | |
2157 | if they conflict. Otherwise, we do the test. */ | |
a6f7c915 | 2158 | return sizex >= 0 && offsety >= offsetx + sizex; |
a4311dfe RK |
2159 | } |
2160 | ||
9ae8ffe7 JL |
2161 | /* True dependence: X is read after store in MEM takes place. */ |
2162 | ||
2163 | int | |
4682ae04 AJ |
2164 | true_dependence (rtx mem, enum machine_mode mem_mode, rtx x, |
2165 | int (*varies) (rtx, int)) | |
9ae8ffe7 | 2166 | { |
b3694847 | 2167 | rtx x_addr, mem_addr; |
49982682 | 2168 | rtx base; |
9ae8ffe7 JL |
2169 | |
2170 | if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem)) | |
2171 | return 1; | |
2172 | ||
c4484b8f | 2173 | /* (mem:BLK (scratch)) is a special mechanism to conflict with everything. |
ac3768f6 | 2174 | This is used in epilogue deallocation functions, and in cselib. */ |
c4484b8f RH |
2175 | if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH) |
2176 | return 1; | |
2177 | if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH) | |
2178 | return 1; | |
9cd9e512 RH |
2179 | if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER |
2180 | || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER) | |
2181 | return 1; | |
c4484b8f | 2182 | |
41472af8 MM |
2183 | if (DIFFERENT_ALIAS_SETS_P (x, mem)) |
2184 | return 0; | |
2185 | ||
389fdba0 RH |
2186 | /* Read-only memory is by definition never modified, and therefore can't |
2187 | conflict with anything. We don't expect to find read-only set on MEM, | |
41806d92 | 2188 | but stupid user tricks can produce them, so don't die. */ |
389fdba0 | 2189 | if (MEM_READONLY_P (x)) |
9ae8ffe7 JL |
2190 | return 0; |
2191 | ||
a4311dfe RK |
2192 | if (nonoverlapping_memrefs_p (mem, x)) |
2193 | return 0; | |
2194 | ||
56ee9281 RH |
2195 | if (mem_mode == VOIDmode) |
2196 | mem_mode = GET_MODE (mem); | |
2197 | ||
eab5c70a BS |
2198 | x_addr = get_addr (XEXP (x, 0)); |
2199 | mem_addr = get_addr (XEXP (mem, 0)); | |
2200 | ||
55efb413 JW |
2201 | base = find_base_term (x_addr); |
2202 | if (base && (GET_CODE (base) == LABEL_REF | |
2203 | || (GET_CODE (base) == SYMBOL_REF | |
2204 | && CONSTANT_POOL_ADDRESS_P (base)))) | |
2205 | return 0; | |
2206 | ||
eab5c70a | 2207 | if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode)) |
1c72c7f6 JC |
2208 | return 0; |
2209 | ||
eab5c70a BS |
2210 | x_addr = canon_rtx (x_addr); |
2211 | mem_addr = canon_rtx (mem_addr); | |
6e73e666 | 2212 | |
0211b6ab JW |
2213 | if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr, |
2214 | SIZE_FOR_MODE (x), x_addr, 0)) | |
2215 | return 0; | |
2216 | ||
c6df88cb | 2217 | if (aliases_everything_p (x)) |
0211b6ab JW |
2218 | return 1; |
2219 | ||
f5143c46 | 2220 | /* We cannot use aliases_everything_p to test MEM, since we must look |
c6df88cb MM |
2221 | at MEM_MODE, rather than GET_MODE (MEM). */ |
2222 | if (mem_mode == QImode || GET_CODE (mem_addr) == AND) | |
a13d4ebf AM |
2223 | return 1; |
2224 | ||
2225 | /* In true_dependence we also allow BLKmode to alias anything. Why | |
2226 | don't we do this in anti_dependence and output_dependence? */ | |
2227 | if (mem_mode == BLKmode || GET_MODE (x) == BLKmode) | |
2228 | return 1; | |
2229 | ||
2230 | return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, | |
2231 | varies); | |
2232 | } | |
2233 | ||
2234 | /* Canonical true dependence: X is read after store in MEM takes place. | |
ca7fd9cd KH |
2235 | Variant of true_dependence which assumes MEM has already been |
2236 | canonicalized (hence we no longer do that here). | |
2237 | The mem_addr argument has been added, since true_dependence computed | |
a13d4ebf AM |
2238 | this value prior to canonicalizing. */ |
2239 | ||
2240 | int | |
4682ae04 AJ |
2241 | canon_true_dependence (rtx mem, enum machine_mode mem_mode, rtx mem_addr, |
2242 | rtx x, int (*varies) (rtx, int)) | |
a13d4ebf | 2243 | { |
b3694847 | 2244 | rtx x_addr; |
a13d4ebf AM |
2245 | |
2246 | if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem)) | |
2247 | return 1; | |
2248 | ||
0fe854a7 RH |
2249 | /* (mem:BLK (scratch)) is a special mechanism to conflict with everything. |
2250 | This is used in epilogue deallocation functions. */ | |
2251 | if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH) | |
2252 | return 1; | |
2253 | if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH) | |
2254 | return 1; | |
9cd9e512 RH |
2255 | if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER |
2256 | || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER) | |
2257 | return 1; | |
0fe854a7 | 2258 | |
a13d4ebf AM |
2259 | if (DIFFERENT_ALIAS_SETS_P (x, mem)) |
2260 | return 0; | |
2261 | ||
389fdba0 RH |
2262 | /* Read-only memory is by definition never modified, and therefore can't |
2263 | conflict with anything. We don't expect to find read-only set on MEM, | |
41806d92 | 2264 | but stupid user tricks can produce them, so don't die. */ |
389fdba0 | 2265 | if (MEM_READONLY_P (x)) |
a13d4ebf AM |
2266 | return 0; |
2267 | ||
a4311dfe RK |
2268 | if (nonoverlapping_memrefs_p (x, mem)) |
2269 | return 0; | |
2270 | ||
a13d4ebf AM |
2271 | x_addr = get_addr (XEXP (x, 0)); |
2272 | ||
2273 | if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode)) | |
2274 | return 0; | |
2275 | ||
2276 | x_addr = canon_rtx (x_addr); | |
2277 | if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr, | |
2278 | SIZE_FOR_MODE (x), x_addr, 0)) | |
2279 | return 0; | |
2280 | ||
2281 | if (aliases_everything_p (x)) | |
2282 | return 1; | |
2283 | ||
f5143c46 | 2284 | /* We cannot use aliases_everything_p to test MEM, since we must look |
a13d4ebf AM |
2285 | at MEM_MODE, rather than GET_MODE (MEM). */ |
2286 | if (mem_mode == QImode || GET_CODE (mem_addr) == AND) | |
c6df88cb | 2287 | return 1; |
0211b6ab | 2288 | |
c6df88cb MM |
2289 | /* In true_dependence we also allow BLKmode to alias anything. Why |
2290 | don't we do this in anti_dependence and output_dependence? */ | |
2291 | if (mem_mode == BLKmode || GET_MODE (x) == BLKmode) | |
2292 | return 1; | |
0211b6ab | 2293 | |
eab5c70a BS |
2294 | return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, |
2295 | varies); | |
9ae8ffe7 JL |
2296 | } |
2297 | ||
da7d8304 | 2298 | /* Returns nonzero if a write to X might alias a previous read from |
389fdba0 | 2299 | (or, if WRITEP is nonzero, a write to) MEM. */ |
9ae8ffe7 | 2300 | |
2c72b78f | 2301 | static int |
389fdba0 | 2302 | write_dependence_p (rtx mem, rtx x, int writep) |
9ae8ffe7 | 2303 | { |
6e73e666 | 2304 | rtx x_addr, mem_addr; |
c6df88cb | 2305 | rtx fixed_scalar; |
49982682 | 2306 | rtx base; |
6e73e666 | 2307 | |
9ae8ffe7 JL |
2308 | if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem)) |
2309 | return 1; | |
2310 | ||
c4484b8f RH |
2311 | /* (mem:BLK (scratch)) is a special mechanism to conflict with everything. |
2312 | This is used in epilogue deallocation functions. */ | |
2313 | if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH) | |
2314 | return 1; | |
2315 | if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH) | |
2316 | return 1; | |
9cd9e512 RH |
2317 | if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER |
2318 | || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER) | |
2319 | return 1; | |
c4484b8f | 2320 | |
eab5c70a BS |
2321 | if (DIFFERENT_ALIAS_SETS_P (x, mem)) |
2322 | return 0; | |
2323 | ||
389fdba0 RH |
2324 | /* A read from read-only memory can't conflict with read-write memory. */ |
2325 | if (!writep && MEM_READONLY_P (mem)) | |
2326 | return 0; | |
55efb413 | 2327 | |
a4311dfe RK |
2328 | if (nonoverlapping_memrefs_p (x, mem)) |
2329 | return 0; | |
2330 | ||
55efb413 JW |
2331 | x_addr = get_addr (XEXP (x, 0)); |
2332 | mem_addr = get_addr (XEXP (mem, 0)); | |
2333 | ||
49982682 JW |
2334 | if (! writep) |
2335 | { | |
55efb413 | 2336 | base = find_base_term (mem_addr); |
49982682 JW |
2337 | if (base && (GET_CODE (base) == LABEL_REF |
2338 | || (GET_CODE (base) == SYMBOL_REF | |
2339 | && CONSTANT_POOL_ADDRESS_P (base)))) | |
2340 | return 0; | |
2341 | } | |
2342 | ||
eab5c70a BS |
2343 | if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), |
2344 | GET_MODE (mem))) | |
41472af8 MM |
2345 | return 0; |
2346 | ||
eab5c70a BS |
2347 | x_addr = canon_rtx (x_addr); |
2348 | mem_addr = canon_rtx (mem_addr); | |
6e73e666 | 2349 | |
c6df88cb MM |
2350 | if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr, |
2351 | SIZE_FOR_MODE (x), x_addr, 0)) | |
2352 | return 0; | |
2353 | ||
ca7fd9cd | 2354 | fixed_scalar |
eab5c70a BS |
2355 | = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, |
2356 | rtx_addr_varies_p); | |
2357 | ||
c6df88cb MM |
2358 | return (!(fixed_scalar == mem && !aliases_everything_p (x)) |
2359 | && !(fixed_scalar == x && !aliases_everything_p (mem))); | |
2360 | } | |
2361 | ||
2362 | /* Anti dependence: X is written after read in MEM takes place. */ | |
2363 | ||
2364 | int | |
4682ae04 | 2365 | anti_dependence (rtx mem, rtx x) |
c6df88cb | 2366 | { |
389fdba0 | 2367 | return write_dependence_p (mem, x, /*writep=*/0); |
9ae8ffe7 JL |
2368 | } |
2369 | ||
2370 | /* Output dependence: X is written after store in MEM takes place. */ | |
2371 | ||
2372 | int | |
4682ae04 | 2373 | output_dependence (rtx mem, rtx x) |
9ae8ffe7 | 2374 | { |
389fdba0 | 2375 | return write_dependence_p (mem, x, /*writep=*/1); |
9ae8ffe7 | 2376 | } |
c14b9960 | 2377 | \f |
6e73e666 | 2378 | |
6e73e666 | 2379 | void |
4682ae04 | 2380 | init_alias_once (void) |
6e73e666 | 2381 | { |
b3694847 | 2382 | int i; |
6e73e666 | 2383 | |
6e73e666 JC |
2384 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
2385 | /* Check whether this register can hold an incoming pointer | |
2386 | argument. FUNCTION_ARG_REGNO_P tests outgoing register | |
ec5c56db | 2387 | numbers, so translate if necessary due to register windows. */ |
6e73e666 JC |
2388 | if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i)) |
2389 | && HARD_REGNO_MODE_OK (i, Pmode)) | |
bf1660a6 JL |
2390 | static_reg_base_value[i] |
2391 | = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i)); | |
2392 | ||
bf1660a6 JL |
2393 | static_reg_base_value[STACK_POINTER_REGNUM] |
2394 | = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx); | |
2395 | static_reg_base_value[ARG_POINTER_REGNUM] | |
2396 | = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx); | |
2397 | static_reg_base_value[FRAME_POINTER_REGNUM] | |
2398 | = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx); | |
2399 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2400 | static_reg_base_value[HARD_FRAME_POINTER_REGNUM] | |
2401 | = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx); | |
2402 | #endif | |
2403 | } | |
2404 | ||
7b52eede JH |
2405 | /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed |
2406 | to be memory reference. */ | |
2407 | static bool memory_modified; | |
2408 | static void | |
4682ae04 | 2409 | memory_modified_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data) |
7b52eede | 2410 | { |
3c0cb5de | 2411 | if (MEM_P (x)) |
7b52eede JH |
2412 | { |
2413 | if (anti_dependence (x, (rtx)data) || output_dependence (x, (rtx)data)) | |
2414 | memory_modified = true; | |
2415 | } | |
2416 | } | |
2417 | ||
2418 | ||
2419 | /* Return true when INSN possibly modify memory contents of MEM | |
454ff5cb | 2420 | (i.e. address can be modified). */ |
7b52eede | 2421 | bool |
4682ae04 | 2422 | memory_modified_in_insn_p (rtx mem, rtx insn) |
7b52eede JH |
2423 | { |
2424 | if (!INSN_P (insn)) | |
2425 | return false; | |
2426 | memory_modified = false; | |
2427 | note_stores (PATTERN (insn), memory_modified_1, mem); | |
2428 | return memory_modified; | |
2429 | } | |
2430 | ||
c13e8210 MM |
2431 | /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE |
2432 | array. */ | |
2433 | ||
9ae8ffe7 | 2434 | void |
4682ae04 | 2435 | init_alias_analysis (void) |
9ae8ffe7 | 2436 | { |
c582d54a | 2437 | unsigned int maxreg = max_reg_num (); |
ea64ef27 | 2438 | int changed, pass; |
b3694847 SS |
2439 | int i; |
2440 | unsigned int ui; | |
2441 | rtx insn; | |
9ae8ffe7 | 2442 | |
0d446150 JH |
2443 | timevar_push (TV_ALIAS_ANALYSIS); |
2444 | ||
bb1acb3e RH |
2445 | reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER; |
2446 | reg_known_value = ggc_calloc (reg_known_value_size, sizeof (rtx)); | |
2447 | reg_known_equiv_p = xcalloc (reg_known_value_size, sizeof (bool)); | |
9ae8ffe7 | 2448 | |
08c79682 | 2449 | /* If we have memory allocated from the previous run, use it. */ |
c582d54a | 2450 | if (old_reg_base_value) |
08c79682 KH |
2451 | reg_base_value = old_reg_base_value; |
2452 | ||
2453 | if (reg_base_value) | |
2454 | VEC_truncate (rtx, reg_base_value, 0); | |
2455 | ||
a590ac65 | 2456 | VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg); |
ac606739 | 2457 | |
5ed6ace5 MD |
2458 | new_reg_base_value = XNEWVEC (rtx, maxreg); |
2459 | reg_seen = XNEWVEC (char, maxreg); | |
ec907dd8 JL |
2460 | |
2461 | /* The basic idea is that each pass through this loop will use the | |
2462 | "constant" information from the previous pass to propagate alias | |
2463 | information through another level of assignments. | |
2464 | ||
2465 | This could get expensive if the assignment chains are long. Maybe | |
2466 | we should throttle the number of iterations, possibly based on | |
6e73e666 | 2467 | the optimization level or flag_expensive_optimizations. |
ec907dd8 JL |
2468 | |
2469 | We could propagate more information in the first pass by making use | |
2470 | of REG_N_SETS to determine immediately that the alias information | |
ea64ef27 JL |
2471 | for a pseudo is "constant". |
2472 | ||
2473 | A program with an uninitialized variable can cause an infinite loop | |
2474 | here. Instead of doing a full dataflow analysis to detect such problems | |
2475 | we just cap the number of iterations for the loop. | |
2476 | ||
2477 | The state of the arrays for the set chain in question does not matter | |
2478 | since the program has undefined behavior. */ | |
6e73e666 | 2479 | |
ea64ef27 | 2480 | pass = 0; |
6e73e666 | 2481 | do |
ec907dd8 JL |
2482 | { |
2483 | /* Assume nothing will change this iteration of the loop. */ | |
2484 | changed = 0; | |
2485 | ||
ec907dd8 JL |
2486 | /* We want to assign the same IDs each iteration of this loop, so |
2487 | start counting from zero each iteration of the loop. */ | |
2488 | unique_id = 0; | |
2489 | ||
f5143c46 | 2490 | /* We're at the start of the function each iteration through the |
ec907dd8 | 2491 | loop, so we're copying arguments. */ |
83bbd9b6 | 2492 | copying_arguments = true; |
9ae8ffe7 | 2493 | |
6e73e666 | 2494 | /* Wipe the potential alias information clean for this pass. */ |
c582d54a | 2495 | memset (new_reg_base_value, 0, maxreg * sizeof (rtx)); |
8072f69c | 2496 | |
6e73e666 | 2497 | /* Wipe the reg_seen array clean. */ |
c582d54a | 2498 | memset (reg_seen, 0, maxreg); |
9ae8ffe7 | 2499 | |
6e73e666 JC |
2500 | /* Mark all hard registers which may contain an address. |
2501 | The stack, frame and argument pointers may contain an address. | |
2502 | An argument register which can hold a Pmode value may contain | |
2503 | an address even if it is not in BASE_REGS. | |
8072f69c | 2504 | |
6e73e666 JC |
2505 | The address expression is VOIDmode for an argument and |
2506 | Pmode for other registers. */ | |
2507 | ||
7f243674 JL |
2508 | memcpy (new_reg_base_value, static_reg_base_value, |
2509 | FIRST_PSEUDO_REGISTER * sizeof (rtx)); | |
6e73e666 | 2510 | |
ec907dd8 JL |
2511 | /* Walk the insns adding values to the new_reg_base_value array. */ |
2512 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
9ae8ffe7 | 2513 | { |
2c3c49de | 2514 | if (INSN_P (insn)) |
ec907dd8 | 2515 | { |
6e73e666 | 2516 | rtx note, set; |
efc9bd41 RK |
2517 | |
2518 | #if defined (HAVE_prologue) || defined (HAVE_epilogue) | |
f5143c46 | 2519 | /* The prologue/epilogue insns are not threaded onto the |
657959ca JL |
2520 | insn chain until after reload has completed. Thus, |
2521 | there is no sense wasting time checking if INSN is in | |
2522 | the prologue/epilogue until after reload has completed. */ | |
2523 | if (reload_completed | |
2524 | && prologue_epilogue_contains (insn)) | |
efc9bd41 RK |
2525 | continue; |
2526 | #endif | |
2527 | ||
ec907dd8 | 2528 | /* If this insn has a noalias note, process it, Otherwise, |
c22cacf3 MS |
2529 | scan for sets. A simple set will have no side effects |
2530 | which could change the base value of any other register. */ | |
6e73e666 | 2531 | |
ec907dd8 | 2532 | if (GET_CODE (PATTERN (insn)) == SET |
efc9bd41 RK |
2533 | && REG_NOTES (insn) != 0 |
2534 | && find_reg_note (insn, REG_NOALIAS, NULL_RTX)) | |
84832317 | 2535 | record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL); |
ec907dd8 | 2536 | else |
84832317 | 2537 | note_stores (PATTERN (insn), record_set, NULL); |
6e73e666 JC |
2538 | |
2539 | set = single_set (insn); | |
2540 | ||
2541 | if (set != 0 | |
f8cfc6aa | 2542 | && REG_P (SET_DEST (set)) |
fb6754f0 | 2543 | && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER) |
6e73e666 | 2544 | { |
fb6754f0 | 2545 | unsigned int regno = REGNO (SET_DEST (set)); |
713f41f9 | 2546 | rtx src = SET_SRC (set); |
bb1acb3e | 2547 | rtx t; |
713f41f9 BS |
2548 | |
2549 | if (REG_NOTES (insn) != 0 | |
2550 | && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0 | |
2551 | && REG_N_SETS (regno) == 1) | |
2552 | || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0) | |
2553 | && GET_CODE (XEXP (note, 0)) != EXPR_LIST | |
bb2cf916 | 2554 | && ! rtx_varies_p (XEXP (note, 0), 1) |
bb1acb3e RH |
2555 | && ! reg_overlap_mentioned_p (SET_DEST (set), |
2556 | XEXP (note, 0))) | |
713f41f9 | 2557 | { |
bb1acb3e RH |
2558 | set_reg_known_value (regno, XEXP (note, 0)); |
2559 | set_reg_known_equiv_p (regno, | |
2560 | REG_NOTE_KIND (note) == REG_EQUIV); | |
713f41f9 BS |
2561 | } |
2562 | else if (REG_N_SETS (regno) == 1 | |
2563 | && GET_CODE (src) == PLUS | |
f8cfc6aa | 2564 | && REG_P (XEXP (src, 0)) |
bb1acb3e | 2565 | && (t = get_reg_known_value (REGNO (XEXP (src, 0)))) |
713f41f9 BS |
2566 | && GET_CODE (XEXP (src, 1)) == CONST_INT) |
2567 | { | |
bb1acb3e RH |
2568 | t = plus_constant (t, INTVAL (XEXP (src, 1))); |
2569 | set_reg_known_value (regno, t); | |
2570 | set_reg_known_equiv_p (regno, 0); | |
713f41f9 BS |
2571 | } |
2572 | else if (REG_N_SETS (regno) == 1 | |
2573 | && ! rtx_varies_p (src, 1)) | |
2574 | { | |
bb1acb3e RH |
2575 | set_reg_known_value (regno, src); |
2576 | set_reg_known_equiv_p (regno, 0); | |
713f41f9 | 2577 | } |
6e73e666 | 2578 | } |
ec907dd8 | 2579 | } |
4b4bf941 | 2580 | else if (NOTE_P (insn) |
ec907dd8 | 2581 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG) |
83bbd9b6 | 2582 | copying_arguments = false; |
6e73e666 | 2583 | } |
ec907dd8 | 2584 | |
6e73e666 | 2585 | /* Now propagate values from new_reg_base_value to reg_base_value. */ |
298e6adc | 2586 | gcc_assert (maxreg == (unsigned int) max_reg_num()); |
c22cacf3 | 2587 | |
c582d54a | 2588 | for (ui = 0; ui < maxreg; ui++) |
6e73e666 | 2589 | { |
e51712db | 2590 | if (new_reg_base_value[ui] |
08c79682 | 2591 | && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui) |
c582d54a | 2592 | && ! rtx_equal_p (new_reg_base_value[ui], |
08c79682 | 2593 | VEC_index (rtx, reg_base_value, ui))) |
ec907dd8 | 2594 | { |
08c79682 | 2595 | VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]); |
6e73e666 | 2596 | changed = 1; |
ec907dd8 | 2597 | } |
9ae8ffe7 | 2598 | } |
9ae8ffe7 | 2599 | } |
6e73e666 | 2600 | while (changed && ++pass < MAX_ALIAS_LOOP_PASSES); |
9ae8ffe7 JL |
2601 | |
2602 | /* Fill in the remaining entries. */ | |
bb1acb3e | 2603 | for (i = 0; i < (int)reg_known_value_size; i++) |
9ae8ffe7 | 2604 | if (reg_known_value[i] == 0) |
bb1acb3e | 2605 | reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER]; |
9ae8ffe7 | 2606 | |
e05e2395 MM |
2607 | /* Clean up. */ |
2608 | free (new_reg_base_value); | |
ec907dd8 | 2609 | new_reg_base_value = 0; |
e05e2395 | 2610 | free (reg_seen); |
9ae8ffe7 | 2611 | reg_seen = 0; |
0d446150 | 2612 | timevar_pop (TV_ALIAS_ANALYSIS); |
9ae8ffe7 JL |
2613 | } |
2614 | ||
2615 | void | |
4682ae04 | 2616 | end_alias_analysis (void) |
9ae8ffe7 | 2617 | { |
c582d54a | 2618 | old_reg_base_value = reg_base_value; |
bb1acb3e | 2619 | ggc_free (reg_known_value); |
9ae8ffe7 | 2620 | reg_known_value = 0; |
ac606739 | 2621 | reg_known_value_size = 0; |
bb1acb3e | 2622 | free (reg_known_equiv_p); |
e05e2395 | 2623 | reg_known_equiv_p = 0; |
9ae8ffe7 | 2624 | } |
e2500fed GK |
2625 | |
2626 | #include "gt-alias.h" |