]> gcc.gnu.org Git - gcc.git/blob - gcc/var-tracking.c
coretypes.h: Include machmode.h...
[gcc.git] / gcc / var-tracking.c
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
27
28 How does the variable tracking pass work?
29
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
37
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
44
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
54
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
60
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
70
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
78
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
85
86 */
87
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "hash-set.h"
94 #include "vec.h"
95 #include "input.h"
96 #include "alias.h"
97 #include "symtab.h"
98 #include "inchash.h"
99 #include "tree.h"
100 #include "varasm.h"
101 #include "stor-layout.h"
102 #include "hash-map.h"
103 #include "hash-table.h"
104 #include "predict.h"
105 #include "hard-reg-set.h"
106 #include "function.h"
107 #include "dominance.h"
108 #include "cfg.h"
109 #include "cfgrtl.h"
110 #include "cfganal.h"
111 #include "basic-block.h"
112 #include "tm_p.h"
113 #include "flags.h"
114 #include "insn-config.h"
115 #include "reload.h"
116 #include "sbitmap.h"
117 #include "alloc-pool.h"
118 #include "regs.h"
119 #include "hashtab.h"
120 #include "statistics.h"
121 #include "expmed.h"
122 #include "dojump.h"
123 #include "explow.h"
124 #include "calls.h"
125 #include "emit-rtl.h"
126 #include "stmt.h"
127 #include "expr.h"
128 #include "tree-pass.h"
129 #include "bitmap.h"
130 #include "tree-dfa.h"
131 #include "tree-ssa.h"
132 #include "cselib.h"
133 #include "target.h"
134 #include "params.h"
135 #include "diagnostic.h"
136 #include "tree-pretty-print.h"
137 #include "recog.h"
138 #include "rtl-iter.h"
139 #include "fibonacci_heap.h"
140
141 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
142 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
143
144 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
145 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
146 Currently the value is the same as IDENTIFIER_NODE, which has such
147 a property. If this compile time assertion ever fails, make sure that
148 the new tree code that equals (int) VALUE has the same property. */
149 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
150
151 /* Type of micro operation. */
152 enum micro_operation_type
153 {
154 MO_USE, /* Use location (REG or MEM). */
155 MO_USE_NO_VAR,/* Use location which is not associated with a variable
156 or the variable is not trackable. */
157 MO_VAL_USE, /* Use location which is associated with a value. */
158 MO_VAL_LOC, /* Use location which appears in a debug insn. */
159 MO_VAL_SET, /* Set location associated with a value. */
160 MO_SET, /* Set location. */
161 MO_COPY, /* Copy the same portion of a variable from one
162 location to another. */
163 MO_CLOBBER, /* Clobber location. */
164 MO_CALL, /* Call insn. */
165 MO_ADJUST /* Adjust stack pointer. */
166
167 };
168
169 static const char * const ATTRIBUTE_UNUSED
170 micro_operation_type_name[] = {
171 "MO_USE",
172 "MO_USE_NO_VAR",
173 "MO_VAL_USE",
174 "MO_VAL_LOC",
175 "MO_VAL_SET",
176 "MO_SET",
177 "MO_COPY",
178 "MO_CLOBBER",
179 "MO_CALL",
180 "MO_ADJUST"
181 };
182
183 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
184 Notes emitted as AFTER_CALL are to take effect during the call,
185 rather than after the call. */
186 enum emit_note_where
187 {
188 EMIT_NOTE_BEFORE_INSN,
189 EMIT_NOTE_AFTER_INSN,
190 EMIT_NOTE_AFTER_CALL_INSN
191 };
192
193 /* Structure holding information about micro operation. */
194 typedef struct micro_operation_def
195 {
196 /* Type of micro operation. */
197 enum micro_operation_type type;
198
199 /* The instruction which the micro operation is in, for MO_USE,
200 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
201 instruction or note in the original flow (before any var-tracking
202 notes are inserted, to simplify emission of notes), for MO_SET
203 and MO_CLOBBER. */
204 rtx_insn *insn;
205
206 union {
207 /* Location. For MO_SET and MO_COPY, this is the SET that
208 performs the assignment, if known, otherwise it is the target
209 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
210 CONCAT of the VALUE and the LOC associated with it. For
211 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
212 associated with it. */
213 rtx loc;
214
215 /* Stack adjustment. */
216 HOST_WIDE_INT adjust;
217 } u;
218 } micro_operation;
219
220
221 /* A declaration of a variable, or an RTL value being handled like a
222 declaration. */
223 typedef void *decl_or_value;
224
225 /* Return true if a decl_or_value DV is a DECL or NULL. */
226 static inline bool
227 dv_is_decl_p (decl_or_value dv)
228 {
229 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
230 }
231
232 /* Return true if a decl_or_value is a VALUE rtl. */
233 static inline bool
234 dv_is_value_p (decl_or_value dv)
235 {
236 return dv && !dv_is_decl_p (dv);
237 }
238
239 /* Return the decl in the decl_or_value. */
240 static inline tree
241 dv_as_decl (decl_or_value dv)
242 {
243 gcc_checking_assert (dv_is_decl_p (dv));
244 return (tree) dv;
245 }
246
247 /* Return the value in the decl_or_value. */
248 static inline rtx
249 dv_as_value (decl_or_value dv)
250 {
251 gcc_checking_assert (dv_is_value_p (dv));
252 return (rtx)dv;
253 }
254
255 /* Return the opaque pointer in the decl_or_value. */
256 static inline void *
257 dv_as_opaque (decl_or_value dv)
258 {
259 return dv;
260 }
261
262
263 /* Description of location of a part of a variable. The content of a physical
264 register is described by a chain of these structures.
265 The chains are pretty short (usually 1 or 2 elements) and thus
266 chain is the best data structure. */
267 typedef struct attrs_def
268 {
269 /* Pointer to next member of the list. */
270 struct attrs_def *next;
271
272 /* The rtx of register. */
273 rtx loc;
274
275 /* The declaration corresponding to LOC. */
276 decl_or_value dv;
277
278 /* Offset from start of DECL. */
279 HOST_WIDE_INT offset;
280
281 /* Pool allocation new operator. */
282 inline void *operator new (size_t)
283 {
284 return pool.allocate ();
285 }
286
287 /* Delete operator utilizing pool allocation. */
288 inline void operator delete (void *ptr)
289 {
290 pool.remove ((attrs_def *) ptr);
291 }
292
293 /* Memory allocation pool. */
294 static pool_allocator<attrs_def> pool;
295 } *attrs;
296
297 /* Structure for chaining the locations. */
298 typedef struct location_chain_def
299 {
300 /* Next element in the chain. */
301 struct location_chain_def *next;
302
303 /* The location (REG, MEM or VALUE). */
304 rtx loc;
305
306 /* The "value" stored in this location. */
307 rtx set_src;
308
309 /* Initialized? */
310 enum var_init_status init;
311
312 /* Pool allocation new operator. */
313 inline void *operator new (size_t)
314 {
315 return pool.allocate ();
316 }
317
318 /* Delete operator utilizing pool allocation. */
319 inline void operator delete (void *ptr)
320 {
321 pool.remove ((location_chain_def *) ptr);
322 }
323
324 /* Memory allocation pool. */
325 static pool_allocator<location_chain_def> pool;
326 } *location_chain;
327
328 /* A vector of loc_exp_dep holds the active dependencies of a one-part
329 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
330 location of DV. Each entry is also part of VALUE' s linked-list of
331 backlinks back to DV. */
332 typedef struct loc_exp_dep_s
333 {
334 /* The dependent DV. */
335 decl_or_value dv;
336 /* The dependency VALUE or DECL_DEBUG. */
337 rtx value;
338 /* The next entry in VALUE's backlinks list. */
339 struct loc_exp_dep_s *next;
340 /* A pointer to the pointer to this entry (head or prev's next) in
341 the doubly-linked list. */
342 struct loc_exp_dep_s **pprev;
343
344 /* Pool allocation new operator. */
345 inline void *operator new (size_t)
346 {
347 return pool.allocate ();
348 }
349
350 /* Delete operator utilizing pool allocation. */
351 inline void operator delete (void *ptr)
352 {
353 pool.remove ((loc_exp_dep_s *) ptr);
354 }
355
356 /* Memory allocation pool. */
357 static pool_allocator<loc_exp_dep_s> pool;
358 } loc_exp_dep;
359
360
361 /* This data structure holds information about the depth of a variable
362 expansion. */
363 typedef struct expand_depth_struct
364 {
365 /* This measures the complexity of the expanded expression. It
366 grows by one for each level of expansion that adds more than one
367 operand. */
368 int complexity;
369 /* This counts the number of ENTRY_VALUE expressions in an
370 expansion. We want to minimize their use. */
371 int entryvals;
372 } expand_depth;
373
374 /* This data structure is allocated for one-part variables at the time
375 of emitting notes. */
376 struct onepart_aux
377 {
378 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
379 computation used the expansion of this variable, and that ought
380 to be notified should this variable change. If the DV's cur_loc
381 expanded to NULL, all components of the loc list are regarded as
382 active, so that any changes in them give us a chance to get a
383 location. Otherwise, only components of the loc that expanded to
384 non-NULL are regarded as active dependencies. */
385 loc_exp_dep *backlinks;
386 /* This holds the LOC that was expanded into cur_loc. We need only
387 mark a one-part variable as changed if the FROM loc is removed,
388 or if it has no known location and a loc is added, or if it gets
389 a change notification from any of its active dependencies. */
390 rtx from;
391 /* The depth of the cur_loc expression. */
392 expand_depth depth;
393 /* Dependencies actively used when expand FROM into cur_loc. */
394 vec<loc_exp_dep, va_heap, vl_embed> deps;
395 };
396
397 /* Structure describing one part of variable. */
398 typedef struct variable_part_def
399 {
400 /* Chain of locations of the part. */
401 location_chain loc_chain;
402
403 /* Location which was last emitted to location list. */
404 rtx cur_loc;
405
406 union variable_aux
407 {
408 /* The offset in the variable, if !var->onepart. */
409 HOST_WIDE_INT offset;
410
411 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
412 struct onepart_aux *onepaux;
413 } aux;
414 } variable_part;
415
416 /* Maximum number of location parts. */
417 #define MAX_VAR_PARTS 16
418
419 /* Enumeration type used to discriminate various types of one-part
420 variables. */
421 typedef enum onepart_enum
422 {
423 /* Not a one-part variable. */
424 NOT_ONEPART = 0,
425 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
426 ONEPART_VDECL = 1,
427 /* A DEBUG_EXPR_DECL. */
428 ONEPART_DEXPR = 2,
429 /* A VALUE. */
430 ONEPART_VALUE = 3
431 } onepart_enum_t;
432
433 /* Structure describing where the variable is located. */
434 typedef struct variable_def
435 {
436 /* The declaration of the variable, or an RTL value being handled
437 like a declaration. */
438 decl_or_value dv;
439
440 /* Reference count. */
441 int refcount;
442
443 /* Number of variable parts. */
444 char n_var_parts;
445
446 /* What type of DV this is, according to enum onepart_enum. */
447 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
448
449 /* True if this variable_def struct is currently in the
450 changed_variables hash table. */
451 bool in_changed_variables;
452
453 /* The variable parts. */
454 variable_part var_part[1];
455 } *variable;
456 typedef const struct variable_def *const_variable;
457
458 /* Pointer to the BB's information specific to variable tracking pass. */
459 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
460
461 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
462 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
463
464 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
465
466 /* Access VAR's Ith part's offset, checking that it's not a one-part
467 variable. */
468 #define VAR_PART_OFFSET(var, i) __extension__ \
469 (*({ variable const __v = (var); \
470 gcc_checking_assert (!__v->onepart); \
471 &__v->var_part[(i)].aux.offset; }))
472
473 /* Access VAR's one-part auxiliary data, checking that it is a
474 one-part variable. */
475 #define VAR_LOC_1PAUX(var) __extension__ \
476 (*({ variable const __v = (var); \
477 gcc_checking_assert (__v->onepart); \
478 &__v->var_part[0].aux.onepaux; }))
479
480 #else
481 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
482 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
483 #endif
484
485 /* These are accessor macros for the one-part auxiliary data. When
486 convenient for users, they're guarded by tests that the data was
487 allocated. */
488 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
489 ? VAR_LOC_1PAUX (var)->backlinks \
490 : NULL)
491 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
492 ? &VAR_LOC_1PAUX (var)->backlinks \
493 : NULL)
494 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
495 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
496 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
497 ? &VAR_LOC_1PAUX (var)->deps \
498 : NULL)
499
500
501
502 typedef unsigned int dvuid;
503
504 /* Return the uid of DV. */
505
506 static inline dvuid
507 dv_uid (decl_or_value dv)
508 {
509 if (dv_is_value_p (dv))
510 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
511 else
512 return DECL_UID (dv_as_decl (dv));
513 }
514
515 /* Compute the hash from the uid. */
516
517 static inline hashval_t
518 dv_uid2hash (dvuid uid)
519 {
520 return uid;
521 }
522
523 /* The hash function for a mask table in a shared_htab chain. */
524
525 static inline hashval_t
526 dv_htab_hash (decl_or_value dv)
527 {
528 return dv_uid2hash (dv_uid (dv));
529 }
530
531 static void variable_htab_free (void *);
532
533 /* Variable hashtable helpers. */
534
535 struct variable_hasher
536 {
537 typedef variable_def *value_type;
538 typedef void *compare_type;
539 static inline hashval_t hash (const variable_def *);
540 static inline bool equal (const variable_def *, const void *);
541 static inline void remove (variable_def *);
542 };
543
544 /* The hash function for variable_htab, computes the hash value
545 from the declaration of variable X. */
546
547 inline hashval_t
548 variable_hasher::hash (const variable_def *v)
549 {
550 return dv_htab_hash (v->dv);
551 }
552
553 /* Compare the declaration of variable X with declaration Y. */
554
555 inline bool
556 variable_hasher::equal (const variable_def *v, const void *y)
557 {
558 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
559
560 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
561 }
562
563 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
564
565 inline void
566 variable_hasher::remove (variable_def *var)
567 {
568 variable_htab_free (var);
569 }
570
571 typedef hash_table<variable_hasher> variable_table_type;
572 typedef variable_table_type::iterator variable_iterator_type;
573
574 /* Structure for passing some other parameters to function
575 emit_note_insn_var_location. */
576 typedef struct emit_note_data_def
577 {
578 /* The instruction which the note will be emitted before/after. */
579 rtx_insn *insn;
580
581 /* Where the note will be emitted (before/after insn)? */
582 enum emit_note_where where;
583
584 /* The variables and values active at this point. */
585 variable_table_type *vars;
586 } emit_note_data;
587
588 /* Structure holding a refcounted hash table. If refcount > 1,
589 it must be first unshared before modified. */
590 typedef struct shared_hash_def
591 {
592 /* Reference count. */
593 int refcount;
594
595 /* Actual hash table. */
596 variable_table_type *htab;
597
598 /* Pool allocation new operator. */
599 inline void *operator new (size_t)
600 {
601 return pool.allocate ();
602 }
603
604 /* Delete operator utilizing pool allocation. */
605 inline void operator delete (void *ptr)
606 {
607 pool.remove ((shared_hash_def *) ptr);
608 }
609
610 /* Memory allocation pool. */
611 static pool_allocator<shared_hash_def> pool;
612 } *shared_hash;
613
614 /* Structure holding the IN or OUT set for a basic block. */
615 typedef struct dataflow_set_def
616 {
617 /* Adjustment of stack offset. */
618 HOST_WIDE_INT stack_adjust;
619
620 /* Attributes for registers (lists of attrs). */
621 attrs regs[FIRST_PSEUDO_REGISTER];
622
623 /* Variable locations. */
624 shared_hash vars;
625
626 /* Vars that is being traversed. */
627 shared_hash traversed_vars;
628 } dataflow_set;
629
630 /* The structure (one for each basic block) containing the information
631 needed for variable tracking. */
632 typedef struct variable_tracking_info_def
633 {
634 /* The vector of micro operations. */
635 vec<micro_operation> mos;
636
637 /* The IN and OUT set for dataflow analysis. */
638 dataflow_set in;
639 dataflow_set out;
640
641 /* The permanent-in dataflow set for this block. This is used to
642 hold values for which we had to compute entry values. ??? This
643 should probably be dynamically allocated, to avoid using more
644 memory in non-debug builds. */
645 dataflow_set *permp;
646
647 /* Has the block been visited in DFS? */
648 bool visited;
649
650 /* Has the block been flooded in VTA? */
651 bool flooded;
652
653 } *variable_tracking_info;
654
655 /* Alloc pool for struct attrs_def. */
656 pool_allocator<attrs_def> attrs_def::pool ("attrs_def pool", 1024);
657
658 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
659
660 static pool_allocator<variable_def> var_pool
661 ("variable_def pool", 64,
662 (MAX_VAR_PARTS - 1) * sizeof (((variable)NULL)->var_part[0]));
663
664 /* Alloc pool for struct variable_def with a single var_part entry. */
665 static pool_allocator<variable_def> valvar_pool
666 ("small variable_def pool", 256);
667
668 /* Alloc pool for struct location_chain_def. */
669 pool_allocator<location_chain_def> location_chain_def::pool
670 ("location_chain_def pool", 1024);
671
672 /* Alloc pool for struct shared_hash_def. */
673 pool_allocator<shared_hash_def> shared_hash_def::pool
674 ("shared_hash_def pool", 256);
675
676 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
677 pool_allocator<loc_exp_dep> loc_exp_dep::pool ("loc_exp_dep pool", 64);
678
679 /* Changed variables, notes will be emitted for them. */
680 static variable_table_type *changed_variables;
681
682 /* Shall notes be emitted? */
683 static bool emit_notes;
684
685 /* Values whose dynamic location lists have gone empty, but whose
686 cselib location lists are still usable. Use this to hold the
687 current location, the backlinks, etc, during emit_notes. */
688 static variable_table_type *dropped_values;
689
690 /* Empty shared hashtable. */
691 static shared_hash empty_shared_hash;
692
693 /* Scratch register bitmap used by cselib_expand_value_rtx. */
694 static bitmap scratch_regs = NULL;
695
696 #ifdef HAVE_window_save
697 typedef struct GTY(()) parm_reg {
698 rtx outgoing;
699 rtx incoming;
700 } parm_reg_t;
701
702
703 /* Vector of windowed parameter registers, if any. */
704 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
705 #endif
706
707 /* Variable used to tell whether cselib_process_insn called our hook. */
708 static bool cselib_hook_called;
709
710 /* Local function prototypes. */
711 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
712 HOST_WIDE_INT *);
713 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
714 HOST_WIDE_INT *);
715 static bool vt_stack_adjustments (void);
716
717 static void init_attrs_list_set (attrs *);
718 static void attrs_list_clear (attrs *);
719 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
720 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
721 static void attrs_list_copy (attrs *, attrs);
722 static void attrs_list_union (attrs *, attrs);
723
724 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
725 variable var, enum var_init_status);
726 static void vars_copy (variable_table_type *, variable_table_type *);
727 static tree var_debug_decl (tree);
728 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
729 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
730 enum var_init_status, rtx);
731 static void var_reg_delete (dataflow_set *, rtx, bool);
732 static void var_regno_delete (dataflow_set *, int);
733 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
734 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
735 enum var_init_status, rtx);
736 static void var_mem_delete (dataflow_set *, rtx, bool);
737
738 static void dataflow_set_init (dataflow_set *);
739 static void dataflow_set_clear (dataflow_set *);
740 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
741 static int variable_union_info_cmp_pos (const void *, const void *);
742 static void dataflow_set_union (dataflow_set *, dataflow_set *);
743 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
744 static bool canon_value_cmp (rtx, rtx);
745 static int loc_cmp (rtx, rtx);
746 static bool variable_part_different_p (variable_part *, variable_part *);
747 static bool onepart_variable_different_p (variable, variable);
748 static bool variable_different_p (variable, variable);
749 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
750 static void dataflow_set_destroy (dataflow_set *);
751
752 static bool contains_symbol_ref (rtx);
753 static bool track_expr_p (tree, bool);
754 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
755 static void add_uses_1 (rtx *, void *);
756 static void add_stores (rtx, const_rtx, void *);
757 static bool compute_bb_dataflow (basic_block);
758 static bool vt_find_locations (void);
759
760 static void dump_attrs_list (attrs);
761 static void dump_var (variable);
762 static void dump_vars (variable_table_type *);
763 static void dump_dataflow_set (dataflow_set *);
764 static void dump_dataflow_sets (void);
765
766 static void set_dv_changed (decl_or_value, bool);
767 static void variable_was_changed (variable, dataflow_set *);
768 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
769 decl_or_value, HOST_WIDE_INT,
770 enum var_init_status, rtx);
771 static void set_variable_part (dataflow_set *, rtx,
772 decl_or_value, HOST_WIDE_INT,
773 enum var_init_status, rtx, enum insert_option);
774 static variable_def **clobber_slot_part (dataflow_set *, rtx,
775 variable_def **, HOST_WIDE_INT, rtx);
776 static void clobber_variable_part (dataflow_set *, rtx,
777 decl_or_value, HOST_WIDE_INT, rtx);
778 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
779 HOST_WIDE_INT);
780 static void delete_variable_part (dataflow_set *, rtx,
781 decl_or_value, HOST_WIDE_INT);
782 static void emit_notes_in_bb (basic_block, dataflow_set *);
783 static void vt_emit_notes (void);
784
785 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
786 static void vt_add_function_parameters (void);
787 static bool vt_initialize (void);
788 static void vt_finalize (void);
789
790 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
791
792 static int
793 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
794 void *arg)
795 {
796 if (dest != stack_pointer_rtx)
797 return 0;
798
799 switch (GET_CODE (op))
800 {
801 case PRE_INC:
802 case PRE_DEC:
803 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
804 return 0;
805 case POST_INC:
806 case POST_DEC:
807 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
808 return 0;
809 case PRE_MODIFY:
810 case POST_MODIFY:
811 /* We handle only adjustments by constant amount. */
812 gcc_assert (GET_CODE (src) == PLUS
813 && CONST_INT_P (XEXP (src, 1))
814 && XEXP (src, 0) == stack_pointer_rtx);
815 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
816 -= INTVAL (XEXP (src, 1));
817 return 0;
818 default:
819 gcc_unreachable ();
820 }
821 }
822
823 /* Given a SET, calculate the amount of stack adjustment it contains
824 PRE- and POST-modifying stack pointer.
825 This function is similar to stack_adjust_offset. */
826
827 static void
828 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
829 HOST_WIDE_INT *post)
830 {
831 rtx src = SET_SRC (pattern);
832 rtx dest = SET_DEST (pattern);
833 enum rtx_code code;
834
835 if (dest == stack_pointer_rtx)
836 {
837 /* (set (reg sp) (plus (reg sp) (const_int))) */
838 code = GET_CODE (src);
839 if (! (code == PLUS || code == MINUS)
840 || XEXP (src, 0) != stack_pointer_rtx
841 || !CONST_INT_P (XEXP (src, 1)))
842 return;
843
844 if (code == MINUS)
845 *post += INTVAL (XEXP (src, 1));
846 else
847 *post -= INTVAL (XEXP (src, 1));
848 return;
849 }
850 HOST_WIDE_INT res[2] = { 0, 0 };
851 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
852 *pre += res[0];
853 *post += res[1];
854 }
855
856 /* Given an INSN, calculate the amount of stack adjustment it contains
857 PRE- and POST-modifying stack pointer. */
858
859 static void
860 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
861 HOST_WIDE_INT *post)
862 {
863 rtx pattern;
864
865 *pre = 0;
866 *post = 0;
867
868 pattern = PATTERN (insn);
869 if (RTX_FRAME_RELATED_P (insn))
870 {
871 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
872 if (expr)
873 pattern = XEXP (expr, 0);
874 }
875
876 if (GET_CODE (pattern) == SET)
877 stack_adjust_offset_pre_post (pattern, pre, post);
878 else if (GET_CODE (pattern) == PARALLEL
879 || GET_CODE (pattern) == SEQUENCE)
880 {
881 int i;
882
883 /* There may be stack adjustments inside compound insns. Search
884 for them. */
885 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
886 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
887 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
888 }
889 }
890
891 /* Compute stack adjustments for all blocks by traversing DFS tree.
892 Return true when the adjustments on all incoming edges are consistent.
893 Heavily borrowed from pre_and_rev_post_order_compute. */
894
895 static bool
896 vt_stack_adjustments (void)
897 {
898 edge_iterator *stack;
899 int sp;
900
901 /* Initialize entry block. */
902 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
903 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
904 = INCOMING_FRAME_SP_OFFSET;
905 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
906 = INCOMING_FRAME_SP_OFFSET;
907
908 /* Allocate stack for back-tracking up CFG. */
909 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
910 sp = 0;
911
912 /* Push the first edge on to the stack. */
913 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
914
915 while (sp)
916 {
917 edge_iterator ei;
918 basic_block src;
919 basic_block dest;
920
921 /* Look at the edge on the top of the stack. */
922 ei = stack[sp - 1];
923 src = ei_edge (ei)->src;
924 dest = ei_edge (ei)->dest;
925
926 /* Check if the edge destination has been visited yet. */
927 if (!VTI (dest)->visited)
928 {
929 rtx_insn *insn;
930 HOST_WIDE_INT pre, post, offset;
931 VTI (dest)->visited = true;
932 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
933
934 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
935 for (insn = BB_HEAD (dest);
936 insn != NEXT_INSN (BB_END (dest));
937 insn = NEXT_INSN (insn))
938 if (INSN_P (insn))
939 {
940 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
941 offset += pre + post;
942 }
943
944 VTI (dest)->out.stack_adjust = offset;
945
946 if (EDGE_COUNT (dest->succs) > 0)
947 /* Since the DEST node has been visited for the first
948 time, check its successors. */
949 stack[sp++] = ei_start (dest->succs);
950 }
951 else
952 {
953 /* We can end up with different stack adjustments for the exit block
954 of a shrink-wrapped function if stack_adjust_offset_pre_post
955 doesn't understand the rtx pattern used to restore the stack
956 pointer in the epilogue. For example, on s390(x), the stack
957 pointer is often restored via a load-multiple instruction
958 and so no stack_adjust offset is recorded for it. This means
959 that the stack offset at the end of the epilogue block is the
960 the same as the offset before the epilogue, whereas other paths
961 to the exit block will have the correct stack_adjust.
962
963 It is safe to ignore these differences because (a) we never
964 use the stack_adjust for the exit block in this pass and
965 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
966 function are correct.
967
968 We must check whether the adjustments on other edges are
969 the same though. */
970 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
971 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
972 {
973 free (stack);
974 return false;
975 }
976
977 if (! ei_one_before_end_p (ei))
978 /* Go to the next edge. */
979 ei_next (&stack[sp - 1]);
980 else
981 /* Return to previous level if there are no more edges. */
982 sp--;
983 }
984 }
985
986 free (stack);
987 return true;
988 }
989
990 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
991 hard_frame_pointer_rtx is being mapped to it and offset for it. */
992 static rtx cfa_base_rtx;
993 static HOST_WIDE_INT cfa_base_offset;
994
995 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
996 or hard_frame_pointer_rtx. */
997
998 static inline rtx
999 compute_cfa_pointer (HOST_WIDE_INT adjustment)
1000 {
1001 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
1002 }
1003
1004 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
1005 or -1 if the replacement shouldn't be done. */
1006 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
1007
1008 /* Data for adjust_mems callback. */
1009
1010 struct adjust_mem_data
1011 {
1012 bool store;
1013 machine_mode mem_mode;
1014 HOST_WIDE_INT stack_adjust;
1015 rtx_expr_list *side_effects;
1016 };
1017
1018 /* Helper for adjust_mems. Return true if X is suitable for
1019 transformation of wider mode arithmetics to narrower mode. */
1020
1021 static bool
1022 use_narrower_mode_test (rtx x, const_rtx subreg)
1023 {
1024 subrtx_var_iterator::array_type array;
1025 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
1026 {
1027 rtx x = *iter;
1028 if (CONSTANT_P (x))
1029 iter.skip_subrtxes ();
1030 else
1031 switch (GET_CODE (x))
1032 {
1033 case REG:
1034 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
1035 return false;
1036 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
1037 subreg_lowpart_offset (GET_MODE (subreg),
1038 GET_MODE (x))))
1039 return false;
1040 break;
1041 case PLUS:
1042 case MINUS:
1043 case MULT:
1044 break;
1045 case ASHIFT:
1046 iter.substitute (XEXP (x, 0));
1047 break;
1048 default:
1049 return false;
1050 }
1051 }
1052 return true;
1053 }
1054
1055 /* Transform X into narrower mode MODE from wider mode WMODE. */
1056
1057 static rtx
1058 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
1059 {
1060 rtx op0, op1;
1061 if (CONSTANT_P (x))
1062 return lowpart_subreg (mode, x, wmode);
1063 switch (GET_CODE (x))
1064 {
1065 case REG:
1066 return lowpart_subreg (mode, x, wmode);
1067 case PLUS:
1068 case MINUS:
1069 case MULT:
1070 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1071 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1072 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1073 case ASHIFT:
1074 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1075 op1 = XEXP (x, 1);
1076 /* Ensure shift amount is not wider than mode. */
1077 if (GET_MODE (op1) == VOIDmode)
1078 op1 = lowpart_subreg (mode, op1, wmode);
1079 else if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (op1)))
1080 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1081 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1082 default:
1083 gcc_unreachable ();
1084 }
1085 }
1086
1087 /* Helper function for adjusting used MEMs. */
1088
1089 static rtx
1090 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1091 {
1092 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1093 rtx mem, addr = loc, tem;
1094 machine_mode mem_mode_save;
1095 bool store_save;
1096 switch (GET_CODE (loc))
1097 {
1098 case REG:
1099 /* Don't do any sp or fp replacements outside of MEM addresses
1100 on the LHS. */
1101 if (amd->mem_mode == VOIDmode && amd->store)
1102 return loc;
1103 if (loc == stack_pointer_rtx
1104 && !frame_pointer_needed
1105 && cfa_base_rtx)
1106 return compute_cfa_pointer (amd->stack_adjust);
1107 else if (loc == hard_frame_pointer_rtx
1108 && frame_pointer_needed
1109 && hard_frame_pointer_adjustment != -1
1110 && cfa_base_rtx)
1111 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1112 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1113 return loc;
1114 case MEM:
1115 mem = loc;
1116 if (!amd->store)
1117 {
1118 mem = targetm.delegitimize_address (mem);
1119 if (mem != loc && !MEM_P (mem))
1120 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1121 }
1122
1123 addr = XEXP (mem, 0);
1124 mem_mode_save = amd->mem_mode;
1125 amd->mem_mode = GET_MODE (mem);
1126 store_save = amd->store;
1127 amd->store = false;
1128 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1129 amd->store = store_save;
1130 amd->mem_mode = mem_mode_save;
1131 if (mem == loc)
1132 addr = targetm.delegitimize_address (addr);
1133 if (addr != XEXP (mem, 0))
1134 mem = replace_equiv_address_nv (mem, addr);
1135 if (!amd->store)
1136 mem = avoid_constant_pool_reference (mem);
1137 return mem;
1138 case PRE_INC:
1139 case PRE_DEC:
1140 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1141 gen_int_mode (GET_CODE (loc) == PRE_INC
1142 ? GET_MODE_SIZE (amd->mem_mode)
1143 : -GET_MODE_SIZE (amd->mem_mode),
1144 GET_MODE (loc)));
1145 case POST_INC:
1146 case POST_DEC:
1147 if (addr == loc)
1148 addr = XEXP (loc, 0);
1149 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1150 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1151 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1152 gen_int_mode ((GET_CODE (loc) == PRE_INC
1153 || GET_CODE (loc) == POST_INC)
1154 ? GET_MODE_SIZE (amd->mem_mode)
1155 : -GET_MODE_SIZE (amd->mem_mode),
1156 GET_MODE (loc)));
1157 store_save = amd->store;
1158 amd->store = false;
1159 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1160 amd->store = store_save;
1161 amd->side_effects = alloc_EXPR_LIST (0,
1162 gen_rtx_SET (XEXP (loc, 0), tem),
1163 amd->side_effects);
1164 return addr;
1165 case PRE_MODIFY:
1166 addr = XEXP (loc, 1);
1167 case POST_MODIFY:
1168 if (addr == loc)
1169 addr = XEXP (loc, 0);
1170 gcc_assert (amd->mem_mode != VOIDmode);
1171 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1172 store_save = amd->store;
1173 amd->store = false;
1174 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1175 adjust_mems, data);
1176 amd->store = store_save;
1177 amd->side_effects = alloc_EXPR_LIST (0,
1178 gen_rtx_SET (XEXP (loc, 0), tem),
1179 amd->side_effects);
1180 return addr;
1181 case SUBREG:
1182 /* First try without delegitimization of whole MEMs and
1183 avoid_constant_pool_reference, which is more likely to succeed. */
1184 store_save = amd->store;
1185 amd->store = true;
1186 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1187 data);
1188 amd->store = store_save;
1189 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1190 if (mem == SUBREG_REG (loc))
1191 {
1192 tem = loc;
1193 goto finish_subreg;
1194 }
1195 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1196 GET_MODE (SUBREG_REG (loc)),
1197 SUBREG_BYTE (loc));
1198 if (tem)
1199 goto finish_subreg;
1200 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1201 GET_MODE (SUBREG_REG (loc)),
1202 SUBREG_BYTE (loc));
1203 if (tem == NULL_RTX)
1204 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1205 finish_subreg:
1206 if (MAY_HAVE_DEBUG_INSNS
1207 && GET_CODE (tem) == SUBREG
1208 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1209 || GET_CODE (SUBREG_REG (tem)) == MINUS
1210 || GET_CODE (SUBREG_REG (tem)) == MULT
1211 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1212 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1213 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1214 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1215 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1216 && GET_MODE_PRECISION (GET_MODE (tem))
1217 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1218 && subreg_lowpart_p (tem)
1219 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1220 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1221 GET_MODE (SUBREG_REG (tem)));
1222 return tem;
1223 case ASM_OPERANDS:
1224 /* Don't do any replacements in second and following
1225 ASM_OPERANDS of inline-asm with multiple sets.
1226 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1227 and ASM_OPERANDS_LABEL_VEC need to be equal between
1228 all the ASM_OPERANDs in the insn and adjust_insn will
1229 fix this up. */
1230 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1231 return loc;
1232 break;
1233 default:
1234 break;
1235 }
1236 return NULL_RTX;
1237 }
1238
1239 /* Helper function for replacement of uses. */
1240
1241 static void
1242 adjust_mem_uses (rtx *x, void *data)
1243 {
1244 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1245 if (new_x != *x)
1246 validate_change (NULL_RTX, x, new_x, true);
1247 }
1248
1249 /* Helper function for replacement of stores. */
1250
1251 static void
1252 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1253 {
1254 if (MEM_P (loc))
1255 {
1256 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1257 adjust_mems, data);
1258 if (new_dest != SET_DEST (expr))
1259 {
1260 rtx xexpr = CONST_CAST_RTX (expr);
1261 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1262 }
1263 }
1264 }
1265
1266 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1267 replace them with their value in the insn and add the side-effects
1268 as other sets to the insn. */
1269
1270 static void
1271 adjust_insn (basic_block bb, rtx_insn *insn)
1272 {
1273 struct adjust_mem_data amd;
1274 rtx set;
1275
1276 #ifdef HAVE_window_save
1277 /* If the target machine has an explicit window save instruction, the
1278 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1279 if (RTX_FRAME_RELATED_P (insn)
1280 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1281 {
1282 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1283 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1284 parm_reg_t *p;
1285
1286 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1287 {
1288 XVECEXP (rtl, 0, i * 2)
1289 = gen_rtx_SET (p->incoming, p->outgoing);
1290 /* Do not clobber the attached DECL, but only the REG. */
1291 XVECEXP (rtl, 0, i * 2 + 1)
1292 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1293 gen_raw_REG (GET_MODE (p->outgoing),
1294 REGNO (p->outgoing)));
1295 }
1296
1297 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1298 return;
1299 }
1300 #endif
1301
1302 amd.mem_mode = VOIDmode;
1303 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1304 amd.side_effects = NULL;
1305
1306 amd.store = true;
1307 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1308
1309 amd.store = false;
1310 if (GET_CODE (PATTERN (insn)) == PARALLEL
1311 && asm_noperands (PATTERN (insn)) > 0
1312 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1313 {
1314 rtx body, set0;
1315 int i;
1316
1317 /* inline-asm with multiple sets is tiny bit more complicated,
1318 because the 3 vectors in ASM_OPERANDS need to be shared between
1319 all ASM_OPERANDS in the instruction. adjust_mems will
1320 not touch ASM_OPERANDS other than the first one, asm_noperands
1321 test above needs to be called before that (otherwise it would fail)
1322 and afterwards this code fixes it up. */
1323 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1324 body = PATTERN (insn);
1325 set0 = XVECEXP (body, 0, 0);
1326 gcc_checking_assert (GET_CODE (set0) == SET
1327 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1328 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1329 for (i = 1; i < XVECLEN (body, 0); i++)
1330 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1331 break;
1332 else
1333 {
1334 set = XVECEXP (body, 0, i);
1335 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1336 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1337 == i);
1338 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1339 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1340 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1341 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1342 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1343 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1344 {
1345 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1346 ASM_OPERANDS_INPUT_VEC (newsrc)
1347 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1348 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1349 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1350 ASM_OPERANDS_LABEL_VEC (newsrc)
1351 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1352 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1353 }
1354 }
1355 }
1356 else
1357 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1358
1359 /* For read-only MEMs containing some constant, prefer those
1360 constants. */
1361 set = single_set (insn);
1362 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1363 {
1364 rtx note = find_reg_equal_equiv_note (insn);
1365
1366 if (note && CONSTANT_P (XEXP (note, 0)))
1367 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1368 }
1369
1370 if (amd.side_effects)
1371 {
1372 rtx *pat, new_pat, s;
1373 int i, oldn, newn;
1374
1375 pat = &PATTERN (insn);
1376 if (GET_CODE (*pat) == COND_EXEC)
1377 pat = &COND_EXEC_CODE (*pat);
1378 if (GET_CODE (*pat) == PARALLEL)
1379 oldn = XVECLEN (*pat, 0);
1380 else
1381 oldn = 1;
1382 for (s = amd.side_effects, newn = 0; s; newn++)
1383 s = XEXP (s, 1);
1384 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1385 if (GET_CODE (*pat) == PARALLEL)
1386 for (i = 0; i < oldn; i++)
1387 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1388 else
1389 XVECEXP (new_pat, 0, 0) = *pat;
1390 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1391 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1392 free_EXPR_LIST_list (&amd.side_effects);
1393 validate_change (NULL_RTX, pat, new_pat, true);
1394 }
1395 }
1396
1397 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1398 static inline rtx
1399 dv_as_rtx (decl_or_value dv)
1400 {
1401 tree decl;
1402
1403 if (dv_is_value_p (dv))
1404 return dv_as_value (dv);
1405
1406 decl = dv_as_decl (dv);
1407
1408 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1409 return DECL_RTL_KNOWN_SET (decl);
1410 }
1411
1412 /* Return nonzero if a decl_or_value must not have more than one
1413 variable part. The returned value discriminates among various
1414 kinds of one-part DVs ccording to enum onepart_enum. */
1415 static inline onepart_enum_t
1416 dv_onepart_p (decl_or_value dv)
1417 {
1418 tree decl;
1419
1420 if (!MAY_HAVE_DEBUG_INSNS)
1421 return NOT_ONEPART;
1422
1423 if (dv_is_value_p (dv))
1424 return ONEPART_VALUE;
1425
1426 decl = dv_as_decl (dv);
1427
1428 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1429 return ONEPART_DEXPR;
1430
1431 if (target_for_debug_bind (decl) != NULL_TREE)
1432 return ONEPART_VDECL;
1433
1434 return NOT_ONEPART;
1435 }
1436
1437 /* Return the variable pool to be used for a dv of type ONEPART. */
1438 static inline pool_allocator <variable_def> &
1439 onepart_pool (onepart_enum_t onepart)
1440 {
1441 return onepart ? valvar_pool : var_pool;
1442 }
1443
1444 /* Build a decl_or_value out of a decl. */
1445 static inline decl_or_value
1446 dv_from_decl (tree decl)
1447 {
1448 decl_or_value dv;
1449 dv = decl;
1450 gcc_checking_assert (dv_is_decl_p (dv));
1451 return dv;
1452 }
1453
1454 /* Build a decl_or_value out of a value. */
1455 static inline decl_or_value
1456 dv_from_value (rtx value)
1457 {
1458 decl_or_value dv;
1459 dv = value;
1460 gcc_checking_assert (dv_is_value_p (dv));
1461 return dv;
1462 }
1463
1464 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1465 static inline decl_or_value
1466 dv_from_rtx (rtx x)
1467 {
1468 decl_or_value dv;
1469
1470 switch (GET_CODE (x))
1471 {
1472 case DEBUG_EXPR:
1473 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1474 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1475 break;
1476
1477 case VALUE:
1478 dv = dv_from_value (x);
1479 break;
1480
1481 default:
1482 gcc_unreachable ();
1483 }
1484
1485 return dv;
1486 }
1487
1488 extern void debug_dv (decl_or_value dv);
1489
1490 DEBUG_FUNCTION void
1491 debug_dv (decl_or_value dv)
1492 {
1493 if (dv_is_value_p (dv))
1494 debug_rtx (dv_as_value (dv));
1495 else
1496 debug_generic_stmt (dv_as_decl (dv));
1497 }
1498
1499 static void loc_exp_dep_clear (variable var);
1500
1501 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1502
1503 static void
1504 variable_htab_free (void *elem)
1505 {
1506 int i;
1507 variable var = (variable) elem;
1508 location_chain node, next;
1509
1510 gcc_checking_assert (var->refcount > 0);
1511
1512 var->refcount--;
1513 if (var->refcount > 0)
1514 return;
1515
1516 for (i = 0; i < var->n_var_parts; i++)
1517 {
1518 for (node = var->var_part[i].loc_chain; node; node = next)
1519 {
1520 next = node->next;
1521 delete node;
1522 }
1523 var->var_part[i].loc_chain = NULL;
1524 }
1525 if (var->onepart && VAR_LOC_1PAUX (var))
1526 {
1527 loc_exp_dep_clear (var);
1528 if (VAR_LOC_DEP_LST (var))
1529 VAR_LOC_DEP_LST (var)->pprev = NULL;
1530 XDELETE (VAR_LOC_1PAUX (var));
1531 /* These may be reused across functions, so reset
1532 e.g. NO_LOC_P. */
1533 if (var->onepart == ONEPART_DEXPR)
1534 set_dv_changed (var->dv, true);
1535 }
1536 onepart_pool (var->onepart).remove (var);
1537 }
1538
1539 /* Initialize the set (array) SET of attrs to empty lists. */
1540
1541 static void
1542 init_attrs_list_set (attrs *set)
1543 {
1544 int i;
1545
1546 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1547 set[i] = NULL;
1548 }
1549
1550 /* Make the list *LISTP empty. */
1551
1552 static void
1553 attrs_list_clear (attrs *listp)
1554 {
1555 attrs list, next;
1556
1557 for (list = *listp; list; list = next)
1558 {
1559 next = list->next;
1560 delete list;
1561 }
1562 *listp = NULL;
1563 }
1564
1565 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1566
1567 static attrs
1568 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1569 {
1570 for (; list; list = list->next)
1571 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1572 return list;
1573 return NULL;
1574 }
1575
1576 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1577
1578 static void
1579 attrs_list_insert (attrs *listp, decl_or_value dv,
1580 HOST_WIDE_INT offset, rtx loc)
1581 {
1582 attrs list = new attrs_def;
1583 list->loc = loc;
1584 list->dv = dv;
1585 list->offset = offset;
1586 list->next = *listp;
1587 *listp = list;
1588 }
1589
1590 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1591
1592 static void
1593 attrs_list_copy (attrs *dstp, attrs src)
1594 {
1595 attrs_list_clear (dstp);
1596 for (; src; src = src->next)
1597 {
1598 attrs n = new attrs_def;
1599 n->loc = src->loc;
1600 n->dv = src->dv;
1601 n->offset = src->offset;
1602 n->next = *dstp;
1603 *dstp = n;
1604 }
1605 }
1606
1607 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1608
1609 static void
1610 attrs_list_union (attrs *dstp, attrs src)
1611 {
1612 for (; src; src = src->next)
1613 {
1614 if (!attrs_list_member (*dstp, src->dv, src->offset))
1615 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1616 }
1617 }
1618
1619 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1620 *DSTP. */
1621
1622 static void
1623 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1624 {
1625 gcc_assert (!*dstp);
1626 for (; src; src = src->next)
1627 {
1628 if (!dv_onepart_p (src->dv))
1629 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1630 }
1631 for (src = src2; src; src = src->next)
1632 {
1633 if (!dv_onepart_p (src->dv)
1634 && !attrs_list_member (*dstp, src->dv, src->offset))
1635 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1636 }
1637 }
1638
1639 /* Shared hashtable support. */
1640
1641 /* Return true if VARS is shared. */
1642
1643 static inline bool
1644 shared_hash_shared (shared_hash vars)
1645 {
1646 return vars->refcount > 1;
1647 }
1648
1649 /* Return the hash table for VARS. */
1650
1651 static inline variable_table_type *
1652 shared_hash_htab (shared_hash vars)
1653 {
1654 return vars->htab;
1655 }
1656
1657 /* Return true if VAR is shared, or maybe because VARS is shared. */
1658
1659 static inline bool
1660 shared_var_p (variable var, shared_hash vars)
1661 {
1662 /* Don't count an entry in the changed_variables table as a duplicate. */
1663 return ((var->refcount > 1 + (int) var->in_changed_variables)
1664 || shared_hash_shared (vars));
1665 }
1666
1667 /* Copy variables into a new hash table. */
1668
1669 static shared_hash
1670 shared_hash_unshare (shared_hash vars)
1671 {
1672 shared_hash new_vars = new shared_hash_def;
1673 gcc_assert (vars->refcount > 1);
1674 new_vars->refcount = 1;
1675 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1676 vars_copy (new_vars->htab, vars->htab);
1677 vars->refcount--;
1678 return new_vars;
1679 }
1680
1681 /* Increment reference counter on VARS and return it. */
1682
1683 static inline shared_hash
1684 shared_hash_copy (shared_hash vars)
1685 {
1686 vars->refcount++;
1687 return vars;
1688 }
1689
1690 /* Decrement reference counter and destroy hash table if not shared
1691 anymore. */
1692
1693 static void
1694 shared_hash_destroy (shared_hash vars)
1695 {
1696 gcc_checking_assert (vars->refcount > 0);
1697 if (--vars->refcount == 0)
1698 {
1699 delete vars->htab;
1700 delete vars;
1701 }
1702 }
1703
1704 /* Unshare *PVARS if shared and return slot for DV. If INS is
1705 INSERT, insert it if not already present. */
1706
1707 static inline variable_def **
1708 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1709 hashval_t dvhash, enum insert_option ins)
1710 {
1711 if (shared_hash_shared (*pvars))
1712 *pvars = shared_hash_unshare (*pvars);
1713 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1714 }
1715
1716 static inline variable_def **
1717 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1718 enum insert_option ins)
1719 {
1720 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1721 }
1722
1723 /* Return slot for DV, if it is already present in the hash table.
1724 If it is not present, insert it only VARS is not shared, otherwise
1725 return NULL. */
1726
1727 static inline variable_def **
1728 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1729 {
1730 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1731 shared_hash_shared (vars)
1732 ? NO_INSERT : INSERT);
1733 }
1734
1735 static inline variable_def **
1736 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1737 {
1738 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1739 }
1740
1741 /* Return slot for DV only if it is already present in the hash table. */
1742
1743 static inline variable_def **
1744 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1745 hashval_t dvhash)
1746 {
1747 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1748 }
1749
1750 static inline variable_def **
1751 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1752 {
1753 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1754 }
1755
1756 /* Return variable for DV or NULL if not already present in the hash
1757 table. */
1758
1759 static inline variable
1760 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1761 {
1762 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1763 }
1764
1765 static inline variable
1766 shared_hash_find (shared_hash vars, decl_or_value dv)
1767 {
1768 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1769 }
1770
1771 /* Return true if TVAL is better than CVAL as a canonival value. We
1772 choose lowest-numbered VALUEs, using the RTX address as a
1773 tie-breaker. The idea is to arrange them into a star topology,
1774 such that all of them are at most one step away from the canonical
1775 value, and the canonical value has backlinks to all of them, in
1776 addition to all the actual locations. We don't enforce this
1777 topology throughout the entire dataflow analysis, though.
1778 */
1779
1780 static inline bool
1781 canon_value_cmp (rtx tval, rtx cval)
1782 {
1783 return !cval
1784 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1785 }
1786
1787 static bool dst_can_be_shared;
1788
1789 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1790
1791 static variable_def **
1792 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1793 enum var_init_status initialized)
1794 {
1795 variable new_var;
1796 int i;
1797
1798 new_var = onepart_pool (var->onepart).allocate ();
1799 new_var->dv = var->dv;
1800 new_var->refcount = 1;
1801 var->refcount--;
1802 new_var->n_var_parts = var->n_var_parts;
1803 new_var->onepart = var->onepart;
1804 new_var->in_changed_variables = false;
1805
1806 if (! flag_var_tracking_uninit)
1807 initialized = VAR_INIT_STATUS_INITIALIZED;
1808
1809 for (i = 0; i < var->n_var_parts; i++)
1810 {
1811 location_chain node;
1812 location_chain *nextp;
1813
1814 if (i == 0 && var->onepart)
1815 {
1816 /* One-part auxiliary data is only used while emitting
1817 notes, so propagate it to the new variable in the active
1818 dataflow set. If we're not emitting notes, this will be
1819 a no-op. */
1820 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1821 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1822 VAR_LOC_1PAUX (var) = NULL;
1823 }
1824 else
1825 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1826 nextp = &new_var->var_part[i].loc_chain;
1827 for (node = var->var_part[i].loc_chain; node; node = node->next)
1828 {
1829 location_chain new_lc;
1830
1831 new_lc = new location_chain_def;
1832 new_lc->next = NULL;
1833 if (node->init > initialized)
1834 new_lc->init = node->init;
1835 else
1836 new_lc->init = initialized;
1837 if (node->set_src && !(MEM_P (node->set_src)))
1838 new_lc->set_src = node->set_src;
1839 else
1840 new_lc->set_src = NULL;
1841 new_lc->loc = node->loc;
1842
1843 *nextp = new_lc;
1844 nextp = &new_lc->next;
1845 }
1846
1847 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1848 }
1849
1850 dst_can_be_shared = false;
1851 if (shared_hash_shared (set->vars))
1852 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1853 else if (set->traversed_vars && set->vars != set->traversed_vars)
1854 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1855 *slot = new_var;
1856 if (var->in_changed_variables)
1857 {
1858 variable_def **cslot
1859 = changed_variables->find_slot_with_hash (var->dv,
1860 dv_htab_hash (var->dv),
1861 NO_INSERT);
1862 gcc_assert (*cslot == (void *) var);
1863 var->in_changed_variables = false;
1864 variable_htab_free (var);
1865 *cslot = new_var;
1866 new_var->in_changed_variables = true;
1867 }
1868 return slot;
1869 }
1870
1871 /* Copy all variables from hash table SRC to hash table DST. */
1872
1873 static void
1874 vars_copy (variable_table_type *dst, variable_table_type *src)
1875 {
1876 variable_iterator_type hi;
1877 variable var;
1878
1879 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1880 {
1881 variable_def **dstp;
1882 var->refcount++;
1883 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1884 INSERT);
1885 *dstp = var;
1886 }
1887 }
1888
1889 /* Map a decl to its main debug decl. */
1890
1891 static inline tree
1892 var_debug_decl (tree decl)
1893 {
1894 if (decl && TREE_CODE (decl) == VAR_DECL
1895 && DECL_HAS_DEBUG_EXPR_P (decl))
1896 {
1897 tree debugdecl = DECL_DEBUG_EXPR (decl);
1898 if (DECL_P (debugdecl))
1899 decl = debugdecl;
1900 }
1901
1902 return decl;
1903 }
1904
1905 /* Set the register LOC to contain DV, OFFSET. */
1906
1907 static void
1908 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1909 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1910 enum insert_option iopt)
1911 {
1912 attrs node;
1913 bool decl_p = dv_is_decl_p (dv);
1914
1915 if (decl_p)
1916 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1917
1918 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1919 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1920 && node->offset == offset)
1921 break;
1922 if (!node)
1923 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1924 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1925 }
1926
1927 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1928
1929 static void
1930 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1931 rtx set_src)
1932 {
1933 tree decl = REG_EXPR (loc);
1934 HOST_WIDE_INT offset = REG_OFFSET (loc);
1935
1936 var_reg_decl_set (set, loc, initialized,
1937 dv_from_decl (decl), offset, set_src, INSERT);
1938 }
1939
1940 static enum var_init_status
1941 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1942 {
1943 variable var;
1944 int i;
1945 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1946
1947 if (! flag_var_tracking_uninit)
1948 return VAR_INIT_STATUS_INITIALIZED;
1949
1950 var = shared_hash_find (set->vars, dv);
1951 if (var)
1952 {
1953 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1954 {
1955 location_chain nextp;
1956 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1957 if (rtx_equal_p (nextp->loc, loc))
1958 {
1959 ret_val = nextp->init;
1960 break;
1961 }
1962 }
1963 }
1964
1965 return ret_val;
1966 }
1967
1968 /* Delete current content of register LOC in dataflow set SET and set
1969 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1970 MODIFY is true, any other live copies of the same variable part are
1971 also deleted from the dataflow set, otherwise the variable part is
1972 assumed to be copied from another location holding the same
1973 part. */
1974
1975 static void
1976 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1977 enum var_init_status initialized, rtx set_src)
1978 {
1979 tree decl = REG_EXPR (loc);
1980 HOST_WIDE_INT offset = REG_OFFSET (loc);
1981 attrs node, next;
1982 attrs *nextp;
1983
1984 decl = var_debug_decl (decl);
1985
1986 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1987 initialized = get_init_value (set, loc, dv_from_decl (decl));
1988
1989 nextp = &set->regs[REGNO (loc)];
1990 for (node = *nextp; node; node = next)
1991 {
1992 next = node->next;
1993 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1994 {
1995 delete_variable_part (set, node->loc, node->dv, node->offset);
1996 delete node;
1997 *nextp = next;
1998 }
1999 else
2000 {
2001 node->loc = loc;
2002 nextp = &node->next;
2003 }
2004 }
2005 if (modify)
2006 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
2007 var_reg_set (set, loc, initialized, set_src);
2008 }
2009
2010 /* Delete the association of register LOC in dataflow set SET with any
2011 variables that aren't onepart. If CLOBBER is true, also delete any
2012 other live copies of the same variable part, and delete the
2013 association with onepart dvs too. */
2014
2015 static void
2016 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
2017 {
2018 attrs *nextp = &set->regs[REGNO (loc)];
2019 attrs node, next;
2020
2021 if (clobber)
2022 {
2023 tree decl = REG_EXPR (loc);
2024 HOST_WIDE_INT offset = REG_OFFSET (loc);
2025
2026 decl = var_debug_decl (decl);
2027
2028 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2029 }
2030
2031 for (node = *nextp; node; node = next)
2032 {
2033 next = node->next;
2034 if (clobber || !dv_onepart_p (node->dv))
2035 {
2036 delete_variable_part (set, node->loc, node->dv, node->offset);
2037 delete node;
2038 *nextp = next;
2039 }
2040 else
2041 nextp = &node->next;
2042 }
2043 }
2044
2045 /* Delete content of register with number REGNO in dataflow set SET. */
2046
2047 static void
2048 var_regno_delete (dataflow_set *set, int regno)
2049 {
2050 attrs *reg = &set->regs[regno];
2051 attrs node, next;
2052
2053 for (node = *reg; node; node = next)
2054 {
2055 next = node->next;
2056 delete_variable_part (set, node->loc, node->dv, node->offset);
2057 delete node;
2058 }
2059 *reg = NULL;
2060 }
2061
2062 /* Return true if I is the negated value of a power of two. */
2063 static bool
2064 negative_power_of_two_p (HOST_WIDE_INT i)
2065 {
2066 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2067 return x == (x & -x);
2068 }
2069
2070 /* Strip constant offsets and alignments off of LOC. Return the base
2071 expression. */
2072
2073 static rtx
2074 vt_get_canonicalize_base (rtx loc)
2075 {
2076 while ((GET_CODE (loc) == PLUS
2077 || GET_CODE (loc) == AND)
2078 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2079 && (GET_CODE (loc) != AND
2080 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2081 loc = XEXP (loc, 0);
2082
2083 return loc;
2084 }
2085
2086 /* This caches canonicalized addresses for VALUEs, computed using
2087 information in the global cselib table. */
2088 static hash_map<rtx, rtx> *global_get_addr_cache;
2089
2090 /* This caches canonicalized addresses for VALUEs, computed using
2091 information from the global cache and information pertaining to a
2092 basic block being analyzed. */
2093 static hash_map<rtx, rtx> *local_get_addr_cache;
2094
2095 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2096
2097 /* Return the canonical address for LOC, that must be a VALUE, using a
2098 cached global equivalence or computing it and storing it in the
2099 global cache. */
2100
2101 static rtx
2102 get_addr_from_global_cache (rtx const loc)
2103 {
2104 rtx x;
2105
2106 gcc_checking_assert (GET_CODE (loc) == VALUE);
2107
2108 bool existed;
2109 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2110 if (existed)
2111 return *slot;
2112
2113 x = canon_rtx (get_addr (loc));
2114
2115 /* Tentative, avoiding infinite recursion. */
2116 *slot = x;
2117
2118 if (x != loc)
2119 {
2120 rtx nx = vt_canonicalize_addr (NULL, x);
2121 if (nx != x)
2122 {
2123 /* The table may have moved during recursion, recompute
2124 SLOT. */
2125 *global_get_addr_cache->get (loc) = x = nx;
2126 }
2127 }
2128
2129 return x;
2130 }
2131
2132 /* Return the canonical address for LOC, that must be a VALUE, using a
2133 cached local equivalence or computing it and storing it in the
2134 local cache. */
2135
2136 static rtx
2137 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2138 {
2139 rtx x;
2140 decl_or_value dv;
2141 variable var;
2142 location_chain l;
2143
2144 gcc_checking_assert (GET_CODE (loc) == VALUE);
2145
2146 bool existed;
2147 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2148 if (existed)
2149 return *slot;
2150
2151 x = get_addr_from_global_cache (loc);
2152
2153 /* Tentative, avoiding infinite recursion. */
2154 *slot = x;
2155
2156 /* Recurse to cache local expansion of X, or if we need to search
2157 for a VALUE in the expansion. */
2158 if (x != loc)
2159 {
2160 rtx nx = vt_canonicalize_addr (set, x);
2161 if (nx != x)
2162 {
2163 slot = local_get_addr_cache->get (loc);
2164 *slot = x = nx;
2165 }
2166 return x;
2167 }
2168
2169 dv = dv_from_rtx (x);
2170 var = shared_hash_find (set->vars, dv);
2171 if (!var)
2172 return x;
2173
2174 /* Look for an improved equivalent expression. */
2175 for (l = var->var_part[0].loc_chain; l; l = l->next)
2176 {
2177 rtx base = vt_get_canonicalize_base (l->loc);
2178 if (GET_CODE (base) == VALUE
2179 && canon_value_cmp (base, loc))
2180 {
2181 rtx nx = vt_canonicalize_addr (set, l->loc);
2182 if (x != nx)
2183 {
2184 slot = local_get_addr_cache->get (loc);
2185 *slot = x = nx;
2186 }
2187 break;
2188 }
2189 }
2190
2191 return x;
2192 }
2193
2194 /* Canonicalize LOC using equivalences from SET in addition to those
2195 in the cselib static table. It expects a VALUE-based expression,
2196 and it will only substitute VALUEs with other VALUEs or
2197 function-global equivalences, so that, if two addresses have base
2198 VALUEs that are locally or globally related in ways that
2199 memrefs_conflict_p cares about, they will both canonicalize to
2200 expressions that have the same base VALUE.
2201
2202 The use of VALUEs as canonical base addresses enables the canonical
2203 RTXs to remain unchanged globally, if they resolve to a constant,
2204 or throughout a basic block otherwise, so that they can be cached
2205 and the cache needs not be invalidated when REGs, MEMs or such
2206 change. */
2207
2208 static rtx
2209 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2210 {
2211 HOST_WIDE_INT ofst = 0;
2212 machine_mode mode = GET_MODE (oloc);
2213 rtx loc = oloc;
2214 rtx x;
2215 bool retry = true;
2216
2217 while (retry)
2218 {
2219 while (GET_CODE (loc) == PLUS
2220 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2221 {
2222 ofst += INTVAL (XEXP (loc, 1));
2223 loc = XEXP (loc, 0);
2224 }
2225
2226 /* Alignment operations can't normally be combined, so just
2227 canonicalize the base and we're done. We'll normally have
2228 only one stack alignment anyway. */
2229 if (GET_CODE (loc) == AND
2230 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2231 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2232 {
2233 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2234 if (x != XEXP (loc, 0))
2235 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2236 retry = false;
2237 }
2238
2239 if (GET_CODE (loc) == VALUE)
2240 {
2241 if (set)
2242 loc = get_addr_from_local_cache (set, loc);
2243 else
2244 loc = get_addr_from_global_cache (loc);
2245
2246 /* Consolidate plus_constants. */
2247 while (ofst && GET_CODE (loc) == PLUS
2248 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2249 {
2250 ofst += INTVAL (XEXP (loc, 1));
2251 loc = XEXP (loc, 0);
2252 }
2253
2254 retry = false;
2255 }
2256 else
2257 {
2258 x = canon_rtx (loc);
2259 if (retry)
2260 retry = (x != loc);
2261 loc = x;
2262 }
2263 }
2264
2265 /* Add OFST back in. */
2266 if (ofst)
2267 {
2268 /* Don't build new RTL if we can help it. */
2269 if (GET_CODE (oloc) == PLUS
2270 && XEXP (oloc, 0) == loc
2271 && INTVAL (XEXP (oloc, 1)) == ofst)
2272 return oloc;
2273
2274 loc = plus_constant (mode, loc, ofst);
2275 }
2276
2277 return loc;
2278 }
2279
2280 /* Return true iff there's a true dependence between MLOC and LOC.
2281 MADDR must be a canonicalized version of MLOC's address. */
2282
2283 static inline bool
2284 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2285 {
2286 if (GET_CODE (loc) != MEM)
2287 return false;
2288
2289 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2290 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2291 return false;
2292
2293 return true;
2294 }
2295
2296 /* Hold parameters for the hashtab traversal function
2297 drop_overlapping_mem_locs, see below. */
2298
2299 struct overlapping_mems
2300 {
2301 dataflow_set *set;
2302 rtx loc, addr;
2303 };
2304
2305 /* Remove all MEMs that overlap with COMS->LOC from the location list
2306 of a hash table entry for a value. COMS->ADDR must be a
2307 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2308 canonicalized itself. */
2309
2310 int
2311 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2312 {
2313 dataflow_set *set = coms->set;
2314 rtx mloc = coms->loc, addr = coms->addr;
2315 variable var = *slot;
2316
2317 if (var->onepart == ONEPART_VALUE)
2318 {
2319 location_chain loc, *locp;
2320 bool changed = false;
2321 rtx cur_loc;
2322
2323 gcc_assert (var->n_var_parts == 1);
2324
2325 if (shared_var_p (var, set->vars))
2326 {
2327 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2328 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2329 break;
2330
2331 if (!loc)
2332 return 1;
2333
2334 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2335 var = *slot;
2336 gcc_assert (var->n_var_parts == 1);
2337 }
2338
2339 if (VAR_LOC_1PAUX (var))
2340 cur_loc = VAR_LOC_FROM (var);
2341 else
2342 cur_loc = var->var_part[0].cur_loc;
2343
2344 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2345 loc; loc = *locp)
2346 {
2347 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2348 {
2349 locp = &loc->next;
2350 continue;
2351 }
2352
2353 *locp = loc->next;
2354 /* If we have deleted the location which was last emitted
2355 we have to emit new location so add the variable to set
2356 of changed variables. */
2357 if (cur_loc == loc->loc)
2358 {
2359 changed = true;
2360 var->var_part[0].cur_loc = NULL;
2361 if (VAR_LOC_1PAUX (var))
2362 VAR_LOC_FROM (var) = NULL;
2363 }
2364 delete loc;
2365 }
2366
2367 if (!var->var_part[0].loc_chain)
2368 {
2369 var->n_var_parts--;
2370 changed = true;
2371 }
2372 if (changed)
2373 variable_was_changed (var, set);
2374 }
2375
2376 return 1;
2377 }
2378
2379 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2380
2381 static void
2382 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2383 {
2384 struct overlapping_mems coms;
2385
2386 gcc_checking_assert (GET_CODE (loc) == MEM);
2387
2388 coms.set = set;
2389 coms.loc = canon_rtx (loc);
2390 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2391
2392 set->traversed_vars = set->vars;
2393 shared_hash_htab (set->vars)
2394 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2395 set->traversed_vars = NULL;
2396 }
2397
2398 /* Set the location of DV, OFFSET as the MEM LOC. */
2399
2400 static void
2401 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2402 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2403 enum insert_option iopt)
2404 {
2405 if (dv_is_decl_p (dv))
2406 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2407
2408 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2409 }
2410
2411 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2412 SET to LOC.
2413 Adjust the address first if it is stack pointer based. */
2414
2415 static void
2416 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2417 rtx set_src)
2418 {
2419 tree decl = MEM_EXPR (loc);
2420 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2421
2422 var_mem_decl_set (set, loc, initialized,
2423 dv_from_decl (decl), offset, set_src, INSERT);
2424 }
2425
2426 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2427 dataflow set SET to LOC. If MODIFY is true, any other live copies
2428 of the same variable part are also deleted from the dataflow set,
2429 otherwise the variable part is assumed to be copied from another
2430 location holding the same part.
2431 Adjust the address first if it is stack pointer based. */
2432
2433 static void
2434 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2435 enum var_init_status initialized, rtx set_src)
2436 {
2437 tree decl = MEM_EXPR (loc);
2438 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2439
2440 clobber_overlapping_mems (set, loc);
2441 decl = var_debug_decl (decl);
2442
2443 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2444 initialized = get_init_value (set, loc, dv_from_decl (decl));
2445
2446 if (modify)
2447 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2448 var_mem_set (set, loc, initialized, set_src);
2449 }
2450
2451 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2452 true, also delete any other live copies of the same variable part.
2453 Adjust the address first if it is stack pointer based. */
2454
2455 static void
2456 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2457 {
2458 tree decl = MEM_EXPR (loc);
2459 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2460
2461 clobber_overlapping_mems (set, loc);
2462 decl = var_debug_decl (decl);
2463 if (clobber)
2464 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2465 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2466 }
2467
2468 /* Return true if LOC should not be expanded for location expressions,
2469 or used in them. */
2470
2471 static inline bool
2472 unsuitable_loc (rtx loc)
2473 {
2474 switch (GET_CODE (loc))
2475 {
2476 case PC:
2477 case SCRATCH:
2478 case CC0:
2479 case ASM_INPUT:
2480 case ASM_OPERANDS:
2481 return true;
2482
2483 default:
2484 return false;
2485 }
2486 }
2487
2488 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2489 bound to it. */
2490
2491 static inline void
2492 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2493 {
2494 if (REG_P (loc))
2495 {
2496 if (modified)
2497 var_regno_delete (set, REGNO (loc));
2498 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2499 dv_from_value (val), 0, NULL_RTX, INSERT);
2500 }
2501 else if (MEM_P (loc))
2502 {
2503 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2504
2505 if (modified)
2506 clobber_overlapping_mems (set, loc);
2507
2508 if (l && GET_CODE (l->loc) == VALUE)
2509 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2510
2511 /* If this MEM is a global constant, we don't need it in the
2512 dynamic tables. ??? We should test this before emitting the
2513 micro-op in the first place. */
2514 while (l)
2515 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2516 break;
2517 else
2518 l = l->next;
2519
2520 if (!l)
2521 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2522 dv_from_value (val), 0, NULL_RTX, INSERT);
2523 }
2524 else
2525 {
2526 /* Other kinds of equivalences are necessarily static, at least
2527 so long as we do not perform substitutions while merging
2528 expressions. */
2529 gcc_unreachable ();
2530 set_variable_part (set, loc, dv_from_value (val), 0,
2531 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2532 }
2533 }
2534
2535 /* Bind a value to a location it was just stored in. If MODIFIED
2536 holds, assume the location was modified, detaching it from any
2537 values bound to it. */
2538
2539 static void
2540 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2541 bool modified)
2542 {
2543 cselib_val *v = CSELIB_VAL_PTR (val);
2544
2545 gcc_assert (cselib_preserved_value_p (v));
2546
2547 if (dump_file)
2548 {
2549 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2550 print_inline_rtx (dump_file, loc, 0);
2551 fprintf (dump_file, " evaluates to ");
2552 print_inline_rtx (dump_file, val, 0);
2553 if (v->locs)
2554 {
2555 struct elt_loc_list *l;
2556 for (l = v->locs; l; l = l->next)
2557 {
2558 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2559 print_inline_rtx (dump_file, l->loc, 0);
2560 }
2561 }
2562 fprintf (dump_file, "\n");
2563 }
2564
2565 gcc_checking_assert (!unsuitable_loc (loc));
2566
2567 val_bind (set, val, loc, modified);
2568 }
2569
2570 /* Clear (canonical address) slots that reference X. */
2571
2572 bool
2573 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2574 {
2575 if (vt_get_canonicalize_base (*slot) == x)
2576 *slot = NULL;
2577 return true;
2578 }
2579
2580 /* Reset this node, detaching all its equivalences. Return the slot
2581 in the variable hash table that holds dv, if there is one. */
2582
2583 static void
2584 val_reset (dataflow_set *set, decl_or_value dv)
2585 {
2586 variable var = shared_hash_find (set->vars, dv) ;
2587 location_chain node;
2588 rtx cval;
2589
2590 if (!var || !var->n_var_parts)
2591 return;
2592
2593 gcc_assert (var->n_var_parts == 1);
2594
2595 if (var->onepart == ONEPART_VALUE)
2596 {
2597 rtx x = dv_as_value (dv);
2598
2599 /* Relationships in the global cache don't change, so reset the
2600 local cache entry only. */
2601 rtx *slot = local_get_addr_cache->get (x);
2602 if (slot)
2603 {
2604 /* If the value resolved back to itself, odds are that other
2605 values may have cached it too. These entries now refer
2606 to the old X, so detach them too. Entries that used the
2607 old X but resolved to something else remain ok as long as
2608 that something else isn't also reset. */
2609 if (*slot == x)
2610 local_get_addr_cache
2611 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2612 *slot = NULL;
2613 }
2614 }
2615
2616 cval = NULL;
2617 for (node = var->var_part[0].loc_chain; node; node = node->next)
2618 if (GET_CODE (node->loc) == VALUE
2619 && canon_value_cmp (node->loc, cval))
2620 cval = node->loc;
2621
2622 for (node = var->var_part[0].loc_chain; node; node = node->next)
2623 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2624 {
2625 /* Redirect the equivalence link to the new canonical
2626 value, or simply remove it if it would point at
2627 itself. */
2628 if (cval)
2629 set_variable_part (set, cval, dv_from_value (node->loc),
2630 0, node->init, node->set_src, NO_INSERT);
2631 delete_variable_part (set, dv_as_value (dv),
2632 dv_from_value (node->loc), 0);
2633 }
2634
2635 if (cval)
2636 {
2637 decl_or_value cdv = dv_from_value (cval);
2638
2639 /* Keep the remaining values connected, accummulating links
2640 in the canonical value. */
2641 for (node = var->var_part[0].loc_chain; node; node = node->next)
2642 {
2643 if (node->loc == cval)
2644 continue;
2645 else if (GET_CODE (node->loc) == REG)
2646 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2647 node->set_src, NO_INSERT);
2648 else if (GET_CODE (node->loc) == MEM)
2649 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2650 node->set_src, NO_INSERT);
2651 else
2652 set_variable_part (set, node->loc, cdv, 0,
2653 node->init, node->set_src, NO_INSERT);
2654 }
2655 }
2656
2657 /* We remove this last, to make sure that the canonical value is not
2658 removed to the point of requiring reinsertion. */
2659 if (cval)
2660 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2661
2662 clobber_variable_part (set, NULL, dv, 0, NULL);
2663 }
2664
2665 /* Find the values in a given location and map the val to another
2666 value, if it is unique, or add the location as one holding the
2667 value. */
2668
2669 static void
2670 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2671 {
2672 decl_or_value dv = dv_from_value (val);
2673
2674 if (dump_file && (dump_flags & TDF_DETAILS))
2675 {
2676 if (insn)
2677 fprintf (dump_file, "%i: ", INSN_UID (insn));
2678 else
2679 fprintf (dump_file, "head: ");
2680 print_inline_rtx (dump_file, val, 0);
2681 fputs (" is at ", dump_file);
2682 print_inline_rtx (dump_file, loc, 0);
2683 fputc ('\n', dump_file);
2684 }
2685
2686 val_reset (set, dv);
2687
2688 gcc_checking_assert (!unsuitable_loc (loc));
2689
2690 if (REG_P (loc))
2691 {
2692 attrs node, found = NULL;
2693
2694 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2695 if (dv_is_value_p (node->dv)
2696 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2697 {
2698 found = node;
2699
2700 /* Map incoming equivalences. ??? Wouldn't it be nice if
2701 we just started sharing the location lists? Maybe a
2702 circular list ending at the value itself or some
2703 such. */
2704 set_variable_part (set, dv_as_value (node->dv),
2705 dv_from_value (val), node->offset,
2706 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2707 set_variable_part (set, val, node->dv, node->offset,
2708 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2709 }
2710
2711 /* If we didn't find any equivalence, we need to remember that
2712 this value is held in the named register. */
2713 if (found)
2714 return;
2715 }
2716 /* ??? Attempt to find and merge equivalent MEMs or other
2717 expressions too. */
2718
2719 val_bind (set, val, loc, false);
2720 }
2721
2722 /* Initialize dataflow set SET to be empty.
2723 VARS_SIZE is the initial size of hash table VARS. */
2724
2725 static void
2726 dataflow_set_init (dataflow_set *set)
2727 {
2728 init_attrs_list_set (set->regs);
2729 set->vars = shared_hash_copy (empty_shared_hash);
2730 set->stack_adjust = 0;
2731 set->traversed_vars = NULL;
2732 }
2733
2734 /* Delete the contents of dataflow set SET. */
2735
2736 static void
2737 dataflow_set_clear (dataflow_set *set)
2738 {
2739 int i;
2740
2741 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2742 attrs_list_clear (&set->regs[i]);
2743
2744 shared_hash_destroy (set->vars);
2745 set->vars = shared_hash_copy (empty_shared_hash);
2746 }
2747
2748 /* Copy the contents of dataflow set SRC to DST. */
2749
2750 static void
2751 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2752 {
2753 int i;
2754
2755 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2756 attrs_list_copy (&dst->regs[i], src->regs[i]);
2757
2758 shared_hash_destroy (dst->vars);
2759 dst->vars = shared_hash_copy (src->vars);
2760 dst->stack_adjust = src->stack_adjust;
2761 }
2762
2763 /* Information for merging lists of locations for a given offset of variable.
2764 */
2765 struct variable_union_info
2766 {
2767 /* Node of the location chain. */
2768 location_chain lc;
2769
2770 /* The sum of positions in the input chains. */
2771 int pos;
2772
2773 /* The position in the chain of DST dataflow set. */
2774 int pos_dst;
2775 };
2776
2777 /* Buffer for location list sorting and its allocated size. */
2778 static struct variable_union_info *vui_vec;
2779 static int vui_allocated;
2780
2781 /* Compare function for qsort, order the structures by POS element. */
2782
2783 static int
2784 variable_union_info_cmp_pos (const void *n1, const void *n2)
2785 {
2786 const struct variable_union_info *const i1 =
2787 (const struct variable_union_info *) n1;
2788 const struct variable_union_info *const i2 =
2789 ( const struct variable_union_info *) n2;
2790
2791 if (i1->pos != i2->pos)
2792 return i1->pos - i2->pos;
2793
2794 return (i1->pos_dst - i2->pos_dst);
2795 }
2796
2797 /* Compute union of location parts of variable *SLOT and the same variable
2798 from hash table DATA. Compute "sorted" union of the location chains
2799 for common offsets, i.e. the locations of a variable part are sorted by
2800 a priority where the priority is the sum of the positions in the 2 chains
2801 (if a location is only in one list the position in the second list is
2802 defined to be larger than the length of the chains).
2803 When we are updating the location parts the newest location is in the
2804 beginning of the chain, so when we do the described "sorted" union
2805 we keep the newest locations in the beginning. */
2806
2807 static int
2808 variable_union (variable src, dataflow_set *set)
2809 {
2810 variable dst;
2811 variable_def **dstp;
2812 int i, j, k;
2813
2814 dstp = shared_hash_find_slot (set->vars, src->dv);
2815 if (!dstp || !*dstp)
2816 {
2817 src->refcount++;
2818
2819 dst_can_be_shared = false;
2820 if (!dstp)
2821 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2822
2823 *dstp = src;
2824
2825 /* Continue traversing the hash table. */
2826 return 1;
2827 }
2828 else
2829 dst = *dstp;
2830
2831 gcc_assert (src->n_var_parts);
2832 gcc_checking_assert (src->onepart == dst->onepart);
2833
2834 /* We can combine one-part variables very efficiently, because their
2835 entries are in canonical order. */
2836 if (src->onepart)
2837 {
2838 location_chain *nodep, dnode, snode;
2839
2840 gcc_assert (src->n_var_parts == 1
2841 && dst->n_var_parts == 1);
2842
2843 snode = src->var_part[0].loc_chain;
2844 gcc_assert (snode);
2845
2846 restart_onepart_unshared:
2847 nodep = &dst->var_part[0].loc_chain;
2848 dnode = *nodep;
2849 gcc_assert (dnode);
2850
2851 while (snode)
2852 {
2853 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2854
2855 if (r > 0)
2856 {
2857 location_chain nnode;
2858
2859 if (shared_var_p (dst, set->vars))
2860 {
2861 dstp = unshare_variable (set, dstp, dst,
2862 VAR_INIT_STATUS_INITIALIZED);
2863 dst = *dstp;
2864 goto restart_onepart_unshared;
2865 }
2866
2867 *nodep = nnode = new location_chain_def;
2868 nnode->loc = snode->loc;
2869 nnode->init = snode->init;
2870 if (!snode->set_src || MEM_P (snode->set_src))
2871 nnode->set_src = NULL;
2872 else
2873 nnode->set_src = snode->set_src;
2874 nnode->next = dnode;
2875 dnode = nnode;
2876 }
2877 else if (r == 0)
2878 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2879
2880 if (r >= 0)
2881 snode = snode->next;
2882
2883 nodep = &dnode->next;
2884 dnode = *nodep;
2885 }
2886
2887 return 1;
2888 }
2889
2890 gcc_checking_assert (!src->onepart);
2891
2892 /* Count the number of location parts, result is K. */
2893 for (i = 0, j = 0, k = 0;
2894 i < src->n_var_parts && j < dst->n_var_parts; k++)
2895 {
2896 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2897 {
2898 i++;
2899 j++;
2900 }
2901 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2902 i++;
2903 else
2904 j++;
2905 }
2906 k += src->n_var_parts - i;
2907 k += dst->n_var_parts - j;
2908
2909 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2910 thus there are at most MAX_VAR_PARTS different offsets. */
2911 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2912
2913 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2914 {
2915 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2916 dst = *dstp;
2917 }
2918
2919 i = src->n_var_parts - 1;
2920 j = dst->n_var_parts - 1;
2921 dst->n_var_parts = k;
2922
2923 for (k--; k >= 0; k--)
2924 {
2925 location_chain node, node2;
2926
2927 if (i >= 0 && j >= 0
2928 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2929 {
2930 /* Compute the "sorted" union of the chains, i.e. the locations which
2931 are in both chains go first, they are sorted by the sum of
2932 positions in the chains. */
2933 int dst_l, src_l;
2934 int ii, jj, n;
2935 struct variable_union_info *vui;
2936
2937 /* If DST is shared compare the location chains.
2938 If they are different we will modify the chain in DST with
2939 high probability so make a copy of DST. */
2940 if (shared_var_p (dst, set->vars))
2941 {
2942 for (node = src->var_part[i].loc_chain,
2943 node2 = dst->var_part[j].loc_chain; node && node2;
2944 node = node->next, node2 = node2->next)
2945 {
2946 if (!((REG_P (node2->loc)
2947 && REG_P (node->loc)
2948 && REGNO (node2->loc) == REGNO (node->loc))
2949 || rtx_equal_p (node2->loc, node->loc)))
2950 {
2951 if (node2->init < node->init)
2952 node2->init = node->init;
2953 break;
2954 }
2955 }
2956 if (node || node2)
2957 {
2958 dstp = unshare_variable (set, dstp, dst,
2959 VAR_INIT_STATUS_UNKNOWN);
2960 dst = (variable)*dstp;
2961 }
2962 }
2963
2964 src_l = 0;
2965 for (node = src->var_part[i].loc_chain; node; node = node->next)
2966 src_l++;
2967 dst_l = 0;
2968 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2969 dst_l++;
2970
2971 if (dst_l == 1)
2972 {
2973 /* The most common case, much simpler, no qsort is needed. */
2974 location_chain dstnode = dst->var_part[j].loc_chain;
2975 dst->var_part[k].loc_chain = dstnode;
2976 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2977 node2 = dstnode;
2978 for (node = src->var_part[i].loc_chain; node; node = node->next)
2979 if (!((REG_P (dstnode->loc)
2980 && REG_P (node->loc)
2981 && REGNO (dstnode->loc) == REGNO (node->loc))
2982 || rtx_equal_p (dstnode->loc, node->loc)))
2983 {
2984 location_chain new_node;
2985
2986 /* Copy the location from SRC. */
2987 new_node = new location_chain_def;
2988 new_node->loc = node->loc;
2989 new_node->init = node->init;
2990 if (!node->set_src || MEM_P (node->set_src))
2991 new_node->set_src = NULL;
2992 else
2993 new_node->set_src = node->set_src;
2994 node2->next = new_node;
2995 node2 = new_node;
2996 }
2997 node2->next = NULL;
2998 }
2999 else
3000 {
3001 if (src_l + dst_l > vui_allocated)
3002 {
3003 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
3004 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
3005 vui_allocated);
3006 }
3007 vui = vui_vec;
3008
3009 /* Fill in the locations from DST. */
3010 for (node = dst->var_part[j].loc_chain, jj = 0; node;
3011 node = node->next, jj++)
3012 {
3013 vui[jj].lc = node;
3014 vui[jj].pos_dst = jj;
3015
3016 /* Pos plus value larger than a sum of 2 valid positions. */
3017 vui[jj].pos = jj + src_l + dst_l;
3018 }
3019
3020 /* Fill in the locations from SRC. */
3021 n = dst_l;
3022 for (node = src->var_part[i].loc_chain, ii = 0; node;
3023 node = node->next, ii++)
3024 {
3025 /* Find location from NODE. */
3026 for (jj = 0; jj < dst_l; jj++)
3027 {
3028 if ((REG_P (vui[jj].lc->loc)
3029 && REG_P (node->loc)
3030 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3031 || rtx_equal_p (vui[jj].lc->loc, node->loc))
3032 {
3033 vui[jj].pos = jj + ii;
3034 break;
3035 }
3036 }
3037 if (jj >= dst_l) /* The location has not been found. */
3038 {
3039 location_chain new_node;
3040
3041 /* Copy the location from SRC. */
3042 new_node = new location_chain_def;
3043 new_node->loc = node->loc;
3044 new_node->init = node->init;
3045 if (!node->set_src || MEM_P (node->set_src))
3046 new_node->set_src = NULL;
3047 else
3048 new_node->set_src = node->set_src;
3049 vui[n].lc = new_node;
3050 vui[n].pos_dst = src_l + dst_l;
3051 vui[n].pos = ii + src_l + dst_l;
3052 n++;
3053 }
3054 }
3055
3056 if (dst_l == 2)
3057 {
3058 /* Special case still very common case. For dst_l == 2
3059 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3060 vui[i].pos == i + src_l + dst_l. */
3061 if (vui[0].pos > vui[1].pos)
3062 {
3063 /* Order should be 1, 0, 2... */
3064 dst->var_part[k].loc_chain = vui[1].lc;
3065 vui[1].lc->next = vui[0].lc;
3066 if (n >= 3)
3067 {
3068 vui[0].lc->next = vui[2].lc;
3069 vui[n - 1].lc->next = NULL;
3070 }
3071 else
3072 vui[0].lc->next = NULL;
3073 ii = 3;
3074 }
3075 else
3076 {
3077 dst->var_part[k].loc_chain = vui[0].lc;
3078 if (n >= 3 && vui[2].pos < vui[1].pos)
3079 {
3080 /* Order should be 0, 2, 1, 3... */
3081 vui[0].lc->next = vui[2].lc;
3082 vui[2].lc->next = vui[1].lc;
3083 if (n >= 4)
3084 {
3085 vui[1].lc->next = vui[3].lc;
3086 vui[n - 1].lc->next = NULL;
3087 }
3088 else
3089 vui[1].lc->next = NULL;
3090 ii = 4;
3091 }
3092 else
3093 {
3094 /* Order should be 0, 1, 2... */
3095 ii = 1;
3096 vui[n - 1].lc->next = NULL;
3097 }
3098 }
3099 for (; ii < n; ii++)
3100 vui[ii - 1].lc->next = vui[ii].lc;
3101 }
3102 else
3103 {
3104 qsort (vui, n, sizeof (struct variable_union_info),
3105 variable_union_info_cmp_pos);
3106
3107 /* Reconnect the nodes in sorted order. */
3108 for (ii = 1; ii < n; ii++)
3109 vui[ii - 1].lc->next = vui[ii].lc;
3110 vui[n - 1].lc->next = NULL;
3111 dst->var_part[k].loc_chain = vui[0].lc;
3112 }
3113
3114 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3115 }
3116 i--;
3117 j--;
3118 }
3119 else if ((i >= 0 && j >= 0
3120 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3121 || i < 0)
3122 {
3123 dst->var_part[k] = dst->var_part[j];
3124 j--;
3125 }
3126 else if ((i >= 0 && j >= 0
3127 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3128 || j < 0)
3129 {
3130 location_chain *nextp;
3131
3132 /* Copy the chain from SRC. */
3133 nextp = &dst->var_part[k].loc_chain;
3134 for (node = src->var_part[i].loc_chain; node; node = node->next)
3135 {
3136 location_chain new_lc;
3137
3138 new_lc = new location_chain_def;
3139 new_lc->next = NULL;
3140 new_lc->init = node->init;
3141 if (!node->set_src || MEM_P (node->set_src))
3142 new_lc->set_src = NULL;
3143 else
3144 new_lc->set_src = node->set_src;
3145 new_lc->loc = node->loc;
3146
3147 *nextp = new_lc;
3148 nextp = &new_lc->next;
3149 }
3150
3151 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3152 i--;
3153 }
3154 dst->var_part[k].cur_loc = NULL;
3155 }
3156
3157 if (flag_var_tracking_uninit)
3158 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3159 {
3160 location_chain node, node2;
3161 for (node = src->var_part[i].loc_chain; node; node = node->next)
3162 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3163 if (rtx_equal_p (node->loc, node2->loc))
3164 {
3165 if (node->init > node2->init)
3166 node2->init = node->init;
3167 }
3168 }
3169
3170 /* Continue traversing the hash table. */
3171 return 1;
3172 }
3173
3174 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3175
3176 static void
3177 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3178 {
3179 int i;
3180
3181 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3182 attrs_list_union (&dst->regs[i], src->regs[i]);
3183
3184 if (dst->vars == empty_shared_hash)
3185 {
3186 shared_hash_destroy (dst->vars);
3187 dst->vars = shared_hash_copy (src->vars);
3188 }
3189 else
3190 {
3191 variable_iterator_type hi;
3192 variable var;
3193
3194 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3195 var, variable, hi)
3196 variable_union (var, dst);
3197 }
3198 }
3199
3200 /* Whether the value is currently being expanded. */
3201 #define VALUE_RECURSED_INTO(x) \
3202 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3203
3204 /* Whether no expansion was found, saving useless lookups.
3205 It must only be set when VALUE_CHANGED is clear. */
3206 #define NO_LOC_P(x) \
3207 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3208
3209 /* Whether cur_loc in the value needs to be (re)computed. */
3210 #define VALUE_CHANGED(x) \
3211 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3212 /* Whether cur_loc in the decl needs to be (re)computed. */
3213 #define DECL_CHANGED(x) TREE_VISITED (x)
3214
3215 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3216 user DECLs, this means they're in changed_variables. Values and
3217 debug exprs may be left with this flag set if no user variable
3218 requires them to be evaluated. */
3219
3220 static inline void
3221 set_dv_changed (decl_or_value dv, bool newv)
3222 {
3223 switch (dv_onepart_p (dv))
3224 {
3225 case ONEPART_VALUE:
3226 if (newv)
3227 NO_LOC_P (dv_as_value (dv)) = false;
3228 VALUE_CHANGED (dv_as_value (dv)) = newv;
3229 break;
3230
3231 case ONEPART_DEXPR:
3232 if (newv)
3233 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3234 /* Fall through... */
3235
3236 default:
3237 DECL_CHANGED (dv_as_decl (dv)) = newv;
3238 break;
3239 }
3240 }
3241
3242 /* Return true if DV needs to have its cur_loc recomputed. */
3243
3244 static inline bool
3245 dv_changed_p (decl_or_value dv)
3246 {
3247 return (dv_is_value_p (dv)
3248 ? VALUE_CHANGED (dv_as_value (dv))
3249 : DECL_CHANGED (dv_as_decl (dv)));
3250 }
3251
3252 /* Return a location list node whose loc is rtx_equal to LOC, in the
3253 location list of a one-part variable or value VAR, or in that of
3254 any values recursively mentioned in the location lists. VARS must
3255 be in star-canonical form. */
3256
3257 static location_chain
3258 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3259 {
3260 location_chain node;
3261 enum rtx_code loc_code;
3262
3263 if (!var)
3264 return NULL;
3265
3266 gcc_checking_assert (var->onepart);
3267
3268 if (!var->n_var_parts)
3269 return NULL;
3270
3271 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3272
3273 loc_code = GET_CODE (loc);
3274 for (node = var->var_part[0].loc_chain; node; node = node->next)
3275 {
3276 decl_or_value dv;
3277 variable rvar;
3278
3279 if (GET_CODE (node->loc) != loc_code)
3280 {
3281 if (GET_CODE (node->loc) != VALUE)
3282 continue;
3283 }
3284 else if (loc == node->loc)
3285 return node;
3286 else if (loc_code != VALUE)
3287 {
3288 if (rtx_equal_p (loc, node->loc))
3289 return node;
3290 continue;
3291 }
3292
3293 /* Since we're in star-canonical form, we don't need to visit
3294 non-canonical nodes: one-part variables and non-canonical
3295 values would only point back to the canonical node. */
3296 if (dv_is_value_p (var->dv)
3297 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3298 {
3299 /* Skip all subsequent VALUEs. */
3300 while (node->next && GET_CODE (node->next->loc) == VALUE)
3301 {
3302 node = node->next;
3303 gcc_checking_assert (!canon_value_cmp (node->loc,
3304 dv_as_value (var->dv)));
3305 if (loc == node->loc)
3306 return node;
3307 }
3308 continue;
3309 }
3310
3311 gcc_checking_assert (node == var->var_part[0].loc_chain);
3312 gcc_checking_assert (!node->next);
3313
3314 dv = dv_from_value (node->loc);
3315 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3316 return find_loc_in_1pdv (loc, rvar, vars);
3317 }
3318
3319 /* ??? Gotta look in cselib_val locations too. */
3320
3321 return NULL;
3322 }
3323
3324 /* Hash table iteration argument passed to variable_merge. */
3325 struct dfset_merge
3326 {
3327 /* The set in which the merge is to be inserted. */
3328 dataflow_set *dst;
3329 /* The set that we're iterating in. */
3330 dataflow_set *cur;
3331 /* The set that may contain the other dv we are to merge with. */
3332 dataflow_set *src;
3333 /* Number of onepart dvs in src. */
3334 int src_onepart_cnt;
3335 };
3336
3337 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3338 loc_cmp order, and it is maintained as such. */
3339
3340 static void
3341 insert_into_intersection (location_chain *nodep, rtx loc,
3342 enum var_init_status status)
3343 {
3344 location_chain node;
3345 int r;
3346
3347 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3348 if ((r = loc_cmp (node->loc, loc)) == 0)
3349 {
3350 node->init = MIN (node->init, status);
3351 return;
3352 }
3353 else if (r > 0)
3354 break;
3355
3356 node = new location_chain_def;
3357
3358 node->loc = loc;
3359 node->set_src = NULL;
3360 node->init = status;
3361 node->next = *nodep;
3362 *nodep = node;
3363 }
3364
3365 /* Insert in DEST the intersection of the locations present in both
3366 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3367 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3368 DSM->dst. */
3369
3370 static void
3371 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3372 location_chain s1node, variable s2var)
3373 {
3374 dataflow_set *s1set = dsm->cur;
3375 dataflow_set *s2set = dsm->src;
3376 location_chain found;
3377
3378 if (s2var)
3379 {
3380 location_chain s2node;
3381
3382 gcc_checking_assert (s2var->onepart);
3383
3384 if (s2var->n_var_parts)
3385 {
3386 s2node = s2var->var_part[0].loc_chain;
3387
3388 for (; s1node && s2node;
3389 s1node = s1node->next, s2node = s2node->next)
3390 if (s1node->loc != s2node->loc)
3391 break;
3392 else if (s1node->loc == val)
3393 continue;
3394 else
3395 insert_into_intersection (dest, s1node->loc,
3396 MIN (s1node->init, s2node->init));
3397 }
3398 }
3399
3400 for (; s1node; s1node = s1node->next)
3401 {
3402 if (s1node->loc == val)
3403 continue;
3404
3405 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3406 shared_hash_htab (s2set->vars))))
3407 {
3408 insert_into_intersection (dest, s1node->loc,
3409 MIN (s1node->init, found->init));
3410 continue;
3411 }
3412
3413 if (GET_CODE (s1node->loc) == VALUE
3414 && !VALUE_RECURSED_INTO (s1node->loc))
3415 {
3416 decl_or_value dv = dv_from_value (s1node->loc);
3417 variable svar = shared_hash_find (s1set->vars, dv);
3418 if (svar)
3419 {
3420 if (svar->n_var_parts == 1)
3421 {
3422 VALUE_RECURSED_INTO (s1node->loc) = true;
3423 intersect_loc_chains (val, dest, dsm,
3424 svar->var_part[0].loc_chain,
3425 s2var);
3426 VALUE_RECURSED_INTO (s1node->loc) = false;
3427 }
3428 }
3429 }
3430
3431 /* ??? gotta look in cselib_val locations too. */
3432
3433 /* ??? if the location is equivalent to any location in src,
3434 searched recursively
3435
3436 add to dst the values needed to represent the equivalence
3437
3438 telling whether locations S is equivalent to another dv's
3439 location list:
3440
3441 for each location D in the list
3442
3443 if S and D satisfy rtx_equal_p, then it is present
3444
3445 else if D is a value, recurse without cycles
3446
3447 else if S and D have the same CODE and MODE
3448
3449 for each operand oS and the corresponding oD
3450
3451 if oS and oD are not equivalent, then S an D are not equivalent
3452
3453 else if they are RTX vectors
3454
3455 if any vector oS element is not equivalent to its respective oD,
3456 then S and D are not equivalent
3457
3458 */
3459
3460
3461 }
3462 }
3463
3464 /* Return -1 if X should be before Y in a location list for a 1-part
3465 variable, 1 if Y should be before X, and 0 if they're equivalent
3466 and should not appear in the list. */
3467
3468 static int
3469 loc_cmp (rtx x, rtx y)
3470 {
3471 int i, j, r;
3472 RTX_CODE code = GET_CODE (x);
3473 const char *fmt;
3474
3475 if (x == y)
3476 return 0;
3477
3478 if (REG_P (x))
3479 {
3480 if (!REG_P (y))
3481 return -1;
3482 gcc_assert (GET_MODE (x) == GET_MODE (y));
3483 if (REGNO (x) == REGNO (y))
3484 return 0;
3485 else if (REGNO (x) < REGNO (y))
3486 return -1;
3487 else
3488 return 1;
3489 }
3490
3491 if (REG_P (y))
3492 return 1;
3493
3494 if (MEM_P (x))
3495 {
3496 if (!MEM_P (y))
3497 return -1;
3498 gcc_assert (GET_MODE (x) == GET_MODE (y));
3499 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3500 }
3501
3502 if (MEM_P (y))
3503 return 1;
3504
3505 if (GET_CODE (x) == VALUE)
3506 {
3507 if (GET_CODE (y) != VALUE)
3508 return -1;
3509 /* Don't assert the modes are the same, that is true only
3510 when not recursing. (subreg:QI (value:SI 1:1) 0)
3511 and (subreg:QI (value:DI 2:2) 0) can be compared,
3512 even when the modes are different. */
3513 if (canon_value_cmp (x, y))
3514 return -1;
3515 else
3516 return 1;
3517 }
3518
3519 if (GET_CODE (y) == VALUE)
3520 return 1;
3521
3522 /* Entry value is the least preferable kind of expression. */
3523 if (GET_CODE (x) == ENTRY_VALUE)
3524 {
3525 if (GET_CODE (y) != ENTRY_VALUE)
3526 return 1;
3527 gcc_assert (GET_MODE (x) == GET_MODE (y));
3528 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3529 }
3530
3531 if (GET_CODE (y) == ENTRY_VALUE)
3532 return -1;
3533
3534 if (GET_CODE (x) == GET_CODE (y))
3535 /* Compare operands below. */;
3536 else if (GET_CODE (x) < GET_CODE (y))
3537 return -1;
3538 else
3539 return 1;
3540
3541 gcc_assert (GET_MODE (x) == GET_MODE (y));
3542
3543 if (GET_CODE (x) == DEBUG_EXPR)
3544 {
3545 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3546 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3547 return -1;
3548 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3549 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3550 return 1;
3551 }
3552
3553 fmt = GET_RTX_FORMAT (code);
3554 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3555 switch (fmt[i])
3556 {
3557 case 'w':
3558 if (XWINT (x, i) == XWINT (y, i))
3559 break;
3560 else if (XWINT (x, i) < XWINT (y, i))
3561 return -1;
3562 else
3563 return 1;
3564
3565 case 'n':
3566 case 'i':
3567 if (XINT (x, i) == XINT (y, i))
3568 break;
3569 else if (XINT (x, i) < XINT (y, i))
3570 return -1;
3571 else
3572 return 1;
3573
3574 case 'V':
3575 case 'E':
3576 /* Compare the vector length first. */
3577 if (XVECLEN (x, i) == XVECLEN (y, i))
3578 /* Compare the vectors elements. */;
3579 else if (XVECLEN (x, i) < XVECLEN (y, i))
3580 return -1;
3581 else
3582 return 1;
3583
3584 for (j = 0; j < XVECLEN (x, i); j++)
3585 if ((r = loc_cmp (XVECEXP (x, i, j),
3586 XVECEXP (y, i, j))))
3587 return r;
3588 break;
3589
3590 case 'e':
3591 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3592 return r;
3593 break;
3594
3595 case 'S':
3596 case 's':
3597 if (XSTR (x, i) == XSTR (y, i))
3598 break;
3599 if (!XSTR (x, i))
3600 return -1;
3601 if (!XSTR (y, i))
3602 return 1;
3603 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3604 break;
3605 else if (r < 0)
3606 return -1;
3607 else
3608 return 1;
3609
3610 case 'u':
3611 /* These are just backpointers, so they don't matter. */
3612 break;
3613
3614 case '0':
3615 case 't':
3616 break;
3617
3618 /* It is believed that rtx's at this level will never
3619 contain anything but integers and other rtx's,
3620 except for within LABEL_REFs and SYMBOL_REFs. */
3621 default:
3622 gcc_unreachable ();
3623 }
3624 if (CONST_WIDE_INT_P (x))
3625 {
3626 /* Compare the vector length first. */
3627 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3628 return 1;
3629 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3630 return -1;
3631
3632 /* Compare the vectors elements. */;
3633 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3634 {
3635 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3636 return -1;
3637 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3638 return 1;
3639 }
3640 }
3641
3642 return 0;
3643 }
3644
3645 #if ENABLE_CHECKING
3646 /* Check the order of entries in one-part variables. */
3647
3648 int
3649 canonicalize_loc_order_check (variable_def **slot,
3650 dataflow_set *data ATTRIBUTE_UNUSED)
3651 {
3652 variable var = *slot;
3653 location_chain node, next;
3654
3655 #ifdef ENABLE_RTL_CHECKING
3656 int i;
3657 for (i = 0; i < var->n_var_parts; i++)
3658 gcc_assert (var->var_part[0].cur_loc == NULL);
3659 gcc_assert (!var->in_changed_variables);
3660 #endif
3661
3662 if (!var->onepart)
3663 return 1;
3664
3665 gcc_assert (var->n_var_parts == 1);
3666 node = var->var_part[0].loc_chain;
3667 gcc_assert (node);
3668
3669 while ((next = node->next))
3670 {
3671 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3672 node = next;
3673 }
3674
3675 return 1;
3676 }
3677 #endif
3678
3679 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3680 more likely to be chosen as canonical for an equivalence set.
3681 Ensure less likely values can reach more likely neighbors, making
3682 the connections bidirectional. */
3683
3684 int
3685 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3686 {
3687 variable var = *slot;
3688 decl_or_value dv = var->dv;
3689 rtx val;
3690 location_chain node;
3691
3692 if (!dv_is_value_p (dv))
3693 return 1;
3694
3695 gcc_checking_assert (var->n_var_parts == 1);
3696
3697 val = dv_as_value (dv);
3698
3699 for (node = var->var_part[0].loc_chain; node; node = node->next)
3700 if (GET_CODE (node->loc) == VALUE)
3701 {
3702 if (canon_value_cmp (node->loc, val))
3703 VALUE_RECURSED_INTO (val) = true;
3704 else
3705 {
3706 decl_or_value odv = dv_from_value (node->loc);
3707 variable_def **oslot;
3708 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3709
3710 set_slot_part (set, val, oslot, odv, 0,
3711 node->init, NULL_RTX);
3712
3713 VALUE_RECURSED_INTO (node->loc) = true;
3714 }
3715 }
3716
3717 return 1;
3718 }
3719
3720 /* Remove redundant entries from equivalence lists in onepart
3721 variables, canonicalizing equivalence sets into star shapes. */
3722
3723 int
3724 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3725 {
3726 variable var = *slot;
3727 decl_or_value dv = var->dv;
3728 location_chain node;
3729 decl_or_value cdv;
3730 rtx val, cval;
3731 variable_def **cslot;
3732 bool has_value;
3733 bool has_marks;
3734
3735 if (!var->onepart)
3736 return 1;
3737
3738 gcc_checking_assert (var->n_var_parts == 1);
3739
3740 if (dv_is_value_p (dv))
3741 {
3742 cval = dv_as_value (dv);
3743 if (!VALUE_RECURSED_INTO (cval))
3744 return 1;
3745 VALUE_RECURSED_INTO (cval) = false;
3746 }
3747 else
3748 cval = NULL_RTX;
3749
3750 restart:
3751 val = cval;
3752 has_value = false;
3753 has_marks = false;
3754
3755 gcc_assert (var->n_var_parts == 1);
3756
3757 for (node = var->var_part[0].loc_chain; node; node = node->next)
3758 if (GET_CODE (node->loc) == VALUE)
3759 {
3760 has_value = true;
3761 if (VALUE_RECURSED_INTO (node->loc))
3762 has_marks = true;
3763 if (canon_value_cmp (node->loc, cval))
3764 cval = node->loc;
3765 }
3766
3767 if (!has_value)
3768 return 1;
3769
3770 if (cval == val)
3771 {
3772 if (!has_marks || dv_is_decl_p (dv))
3773 return 1;
3774
3775 /* Keep it marked so that we revisit it, either after visiting a
3776 child node, or after visiting a new parent that might be
3777 found out. */
3778 VALUE_RECURSED_INTO (val) = true;
3779
3780 for (node = var->var_part[0].loc_chain; node; node = node->next)
3781 if (GET_CODE (node->loc) == VALUE
3782 && VALUE_RECURSED_INTO (node->loc))
3783 {
3784 cval = node->loc;
3785 restart_with_cval:
3786 VALUE_RECURSED_INTO (cval) = false;
3787 dv = dv_from_value (cval);
3788 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3789 if (!slot)
3790 {
3791 gcc_assert (dv_is_decl_p (var->dv));
3792 /* The canonical value was reset and dropped.
3793 Remove it. */
3794 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3795 return 1;
3796 }
3797 var = *slot;
3798 gcc_assert (dv_is_value_p (var->dv));
3799 if (var->n_var_parts == 0)
3800 return 1;
3801 gcc_assert (var->n_var_parts == 1);
3802 goto restart;
3803 }
3804
3805 VALUE_RECURSED_INTO (val) = false;
3806
3807 return 1;
3808 }
3809
3810 /* Push values to the canonical one. */
3811 cdv = dv_from_value (cval);
3812 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3813
3814 for (node = var->var_part[0].loc_chain; node; node = node->next)
3815 if (node->loc != cval)
3816 {
3817 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3818 node->init, NULL_RTX);
3819 if (GET_CODE (node->loc) == VALUE)
3820 {
3821 decl_or_value ndv = dv_from_value (node->loc);
3822
3823 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3824 NO_INSERT);
3825
3826 if (canon_value_cmp (node->loc, val))
3827 {
3828 /* If it could have been a local minimum, it's not any more,
3829 since it's now neighbor to cval, so it may have to push
3830 to it. Conversely, if it wouldn't have prevailed over
3831 val, then whatever mark it has is fine: if it was to
3832 push, it will now push to a more canonical node, but if
3833 it wasn't, then it has already pushed any values it might
3834 have to. */
3835 VALUE_RECURSED_INTO (node->loc) = true;
3836 /* Make sure we visit node->loc by ensuring we cval is
3837 visited too. */
3838 VALUE_RECURSED_INTO (cval) = true;
3839 }
3840 else if (!VALUE_RECURSED_INTO (node->loc))
3841 /* If we have no need to "recurse" into this node, it's
3842 already "canonicalized", so drop the link to the old
3843 parent. */
3844 clobber_variable_part (set, cval, ndv, 0, NULL);
3845 }
3846 else if (GET_CODE (node->loc) == REG)
3847 {
3848 attrs list = set->regs[REGNO (node->loc)], *listp;
3849
3850 /* Change an existing attribute referring to dv so that it
3851 refers to cdv, removing any duplicate this might
3852 introduce, and checking that no previous duplicates
3853 existed, all in a single pass. */
3854
3855 while (list)
3856 {
3857 if (list->offset == 0
3858 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3859 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3860 break;
3861
3862 list = list->next;
3863 }
3864
3865 gcc_assert (list);
3866 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3867 {
3868 list->dv = cdv;
3869 for (listp = &list->next; (list = *listp); listp = &list->next)
3870 {
3871 if (list->offset)
3872 continue;
3873
3874 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3875 {
3876 *listp = list->next;
3877 delete list;
3878 list = *listp;
3879 break;
3880 }
3881
3882 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3883 }
3884 }
3885 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3886 {
3887 for (listp = &list->next; (list = *listp); listp = &list->next)
3888 {
3889 if (list->offset)
3890 continue;
3891
3892 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3893 {
3894 *listp = list->next;
3895 delete list;
3896 list = *listp;
3897 break;
3898 }
3899
3900 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3901 }
3902 }
3903 else
3904 gcc_unreachable ();
3905
3906 #if ENABLE_CHECKING
3907 while (list)
3908 {
3909 if (list->offset == 0
3910 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3911 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3912 gcc_unreachable ();
3913
3914 list = list->next;
3915 }
3916 #endif
3917 }
3918 }
3919
3920 if (val)
3921 set_slot_part (set, val, cslot, cdv, 0,
3922 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3923
3924 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3925
3926 /* Variable may have been unshared. */
3927 var = *slot;
3928 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3929 && var->var_part[0].loc_chain->next == NULL);
3930
3931 if (VALUE_RECURSED_INTO (cval))
3932 goto restart_with_cval;
3933
3934 return 1;
3935 }
3936
3937 /* Bind one-part variables to the canonical value in an equivalence
3938 set. Not doing this causes dataflow convergence failure in rare
3939 circumstances, see PR42873. Unfortunately we can't do this
3940 efficiently as part of canonicalize_values_star, since we may not
3941 have determined or even seen the canonical value of a set when we
3942 get to a variable that references another member of the set. */
3943
3944 int
3945 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3946 {
3947 variable var = *slot;
3948 decl_or_value dv = var->dv;
3949 location_chain node;
3950 rtx cval;
3951 decl_or_value cdv;
3952 variable_def **cslot;
3953 variable cvar;
3954 location_chain cnode;
3955
3956 if (!var->onepart || var->onepart == ONEPART_VALUE)
3957 return 1;
3958
3959 gcc_assert (var->n_var_parts == 1);
3960
3961 node = var->var_part[0].loc_chain;
3962
3963 if (GET_CODE (node->loc) != VALUE)
3964 return 1;
3965
3966 gcc_assert (!node->next);
3967 cval = node->loc;
3968
3969 /* Push values to the canonical one. */
3970 cdv = dv_from_value (cval);
3971 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3972 if (!cslot)
3973 return 1;
3974 cvar = *cslot;
3975 gcc_assert (cvar->n_var_parts == 1);
3976
3977 cnode = cvar->var_part[0].loc_chain;
3978
3979 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3980 that are not “more canonical” than it. */
3981 if (GET_CODE (cnode->loc) != VALUE
3982 || !canon_value_cmp (cnode->loc, cval))
3983 return 1;
3984
3985 /* CVAL was found to be non-canonical. Change the variable to point
3986 to the canonical VALUE. */
3987 gcc_assert (!cnode->next);
3988 cval = cnode->loc;
3989
3990 slot = set_slot_part (set, cval, slot, dv, 0,
3991 node->init, node->set_src);
3992 clobber_slot_part (set, cval, slot, 0, node->set_src);
3993
3994 return 1;
3995 }
3996
3997 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3998 corresponding entry in DSM->src. Multi-part variables are combined
3999 with variable_union, whereas onepart dvs are combined with
4000 intersection. */
4001
4002 static int
4003 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
4004 {
4005 dataflow_set *dst = dsm->dst;
4006 variable_def **dstslot;
4007 variable s2var, dvar = NULL;
4008 decl_or_value dv = s1var->dv;
4009 onepart_enum_t onepart = s1var->onepart;
4010 rtx val;
4011 hashval_t dvhash;
4012 location_chain node, *nodep;
4013
4014 /* If the incoming onepart variable has an empty location list, then
4015 the intersection will be just as empty. For other variables,
4016 it's always union. */
4017 gcc_checking_assert (s1var->n_var_parts
4018 && s1var->var_part[0].loc_chain);
4019
4020 if (!onepart)
4021 return variable_union (s1var, dst);
4022
4023 gcc_checking_assert (s1var->n_var_parts == 1);
4024
4025 dvhash = dv_htab_hash (dv);
4026 if (dv_is_value_p (dv))
4027 val = dv_as_value (dv);
4028 else
4029 val = NULL;
4030
4031 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4032 if (!s2var)
4033 {
4034 dst_can_be_shared = false;
4035 return 1;
4036 }
4037
4038 dsm->src_onepart_cnt--;
4039 gcc_assert (s2var->var_part[0].loc_chain
4040 && s2var->onepart == onepart
4041 && s2var->n_var_parts == 1);
4042
4043 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4044 if (dstslot)
4045 {
4046 dvar = *dstslot;
4047 gcc_assert (dvar->refcount == 1
4048 && dvar->onepart == onepart
4049 && dvar->n_var_parts == 1);
4050 nodep = &dvar->var_part[0].loc_chain;
4051 }
4052 else
4053 {
4054 nodep = &node;
4055 node = NULL;
4056 }
4057
4058 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4059 {
4060 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4061 dvhash, INSERT);
4062 *dstslot = dvar = s2var;
4063 dvar->refcount++;
4064 }
4065 else
4066 {
4067 dst_can_be_shared = false;
4068
4069 intersect_loc_chains (val, nodep, dsm,
4070 s1var->var_part[0].loc_chain, s2var);
4071
4072 if (!dstslot)
4073 {
4074 if (node)
4075 {
4076 dvar = onepart_pool (onepart).allocate ();
4077 dvar->dv = dv;
4078 dvar->refcount = 1;
4079 dvar->n_var_parts = 1;
4080 dvar->onepart = onepart;
4081 dvar->in_changed_variables = false;
4082 dvar->var_part[0].loc_chain = node;
4083 dvar->var_part[0].cur_loc = NULL;
4084 if (onepart)
4085 VAR_LOC_1PAUX (dvar) = NULL;
4086 else
4087 VAR_PART_OFFSET (dvar, 0) = 0;
4088
4089 dstslot
4090 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4091 INSERT);
4092 gcc_assert (!*dstslot);
4093 *dstslot = dvar;
4094 }
4095 else
4096 return 1;
4097 }
4098 }
4099
4100 nodep = &dvar->var_part[0].loc_chain;
4101 while ((node = *nodep))
4102 {
4103 location_chain *nextp = &node->next;
4104
4105 if (GET_CODE (node->loc) == REG)
4106 {
4107 attrs list;
4108
4109 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4110 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4111 && dv_is_value_p (list->dv))
4112 break;
4113
4114 if (!list)
4115 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4116 dv, 0, node->loc);
4117 /* If this value became canonical for another value that had
4118 this register, we want to leave it alone. */
4119 else if (dv_as_value (list->dv) != val)
4120 {
4121 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4122 dstslot, dv, 0,
4123 node->init, NULL_RTX);
4124 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4125
4126 /* Since nextp points into the removed node, we can't
4127 use it. The pointer to the next node moved to nodep.
4128 However, if the variable we're walking is unshared
4129 during our walk, we'll keep walking the location list
4130 of the previously-shared variable, in which case the
4131 node won't have been removed, and we'll want to skip
4132 it. That's why we test *nodep here. */
4133 if (*nodep != node)
4134 nextp = nodep;
4135 }
4136 }
4137 else
4138 /* Canonicalization puts registers first, so we don't have to
4139 walk it all. */
4140 break;
4141 nodep = nextp;
4142 }
4143
4144 if (dvar != *dstslot)
4145 dvar = *dstslot;
4146 nodep = &dvar->var_part[0].loc_chain;
4147
4148 if (val)
4149 {
4150 /* Mark all referenced nodes for canonicalization, and make sure
4151 we have mutual equivalence links. */
4152 VALUE_RECURSED_INTO (val) = true;
4153 for (node = *nodep; node; node = node->next)
4154 if (GET_CODE (node->loc) == VALUE)
4155 {
4156 VALUE_RECURSED_INTO (node->loc) = true;
4157 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4158 node->init, NULL, INSERT);
4159 }
4160
4161 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4162 gcc_assert (*dstslot == dvar);
4163 canonicalize_values_star (dstslot, dst);
4164 gcc_checking_assert (dstslot
4165 == shared_hash_find_slot_noinsert_1 (dst->vars,
4166 dv, dvhash));
4167 dvar = *dstslot;
4168 }
4169 else
4170 {
4171 bool has_value = false, has_other = false;
4172
4173 /* If we have one value and anything else, we're going to
4174 canonicalize this, so make sure all values have an entry in
4175 the table and are marked for canonicalization. */
4176 for (node = *nodep; node; node = node->next)
4177 {
4178 if (GET_CODE (node->loc) == VALUE)
4179 {
4180 /* If this was marked during register canonicalization,
4181 we know we have to canonicalize values. */
4182 if (has_value)
4183 has_other = true;
4184 has_value = true;
4185 if (has_other)
4186 break;
4187 }
4188 else
4189 {
4190 has_other = true;
4191 if (has_value)
4192 break;
4193 }
4194 }
4195
4196 if (has_value && has_other)
4197 {
4198 for (node = *nodep; node; node = node->next)
4199 {
4200 if (GET_CODE (node->loc) == VALUE)
4201 {
4202 decl_or_value dv = dv_from_value (node->loc);
4203 variable_def **slot = NULL;
4204
4205 if (shared_hash_shared (dst->vars))
4206 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4207 if (!slot)
4208 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4209 INSERT);
4210 if (!*slot)
4211 {
4212 variable var = onepart_pool (ONEPART_VALUE).allocate ();
4213 var->dv = dv;
4214 var->refcount = 1;
4215 var->n_var_parts = 1;
4216 var->onepart = ONEPART_VALUE;
4217 var->in_changed_variables = false;
4218 var->var_part[0].loc_chain = NULL;
4219 var->var_part[0].cur_loc = NULL;
4220 VAR_LOC_1PAUX (var) = NULL;
4221 *slot = var;
4222 }
4223
4224 VALUE_RECURSED_INTO (node->loc) = true;
4225 }
4226 }
4227
4228 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4229 gcc_assert (*dstslot == dvar);
4230 canonicalize_values_star (dstslot, dst);
4231 gcc_checking_assert (dstslot
4232 == shared_hash_find_slot_noinsert_1 (dst->vars,
4233 dv, dvhash));
4234 dvar = *dstslot;
4235 }
4236 }
4237
4238 if (!onepart_variable_different_p (dvar, s2var))
4239 {
4240 variable_htab_free (dvar);
4241 *dstslot = dvar = s2var;
4242 dvar->refcount++;
4243 }
4244 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4245 {
4246 variable_htab_free (dvar);
4247 *dstslot = dvar = s1var;
4248 dvar->refcount++;
4249 dst_can_be_shared = false;
4250 }
4251 else
4252 dst_can_be_shared = false;
4253
4254 return 1;
4255 }
4256
4257 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4258 multi-part variable. Unions of multi-part variables and
4259 intersections of one-part ones will be handled in
4260 variable_merge_over_cur(). */
4261
4262 static int
4263 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4264 {
4265 dataflow_set *dst = dsm->dst;
4266 decl_or_value dv = s2var->dv;
4267
4268 if (!s2var->onepart)
4269 {
4270 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4271 *dstp = s2var;
4272 s2var->refcount++;
4273 return 1;
4274 }
4275
4276 dsm->src_onepart_cnt++;
4277 return 1;
4278 }
4279
4280 /* Combine dataflow set information from SRC2 into DST, using PDST
4281 to carry over information across passes. */
4282
4283 static void
4284 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4285 {
4286 dataflow_set cur = *dst;
4287 dataflow_set *src1 = &cur;
4288 struct dfset_merge dsm;
4289 int i;
4290 size_t src1_elems, src2_elems;
4291 variable_iterator_type hi;
4292 variable var;
4293
4294 src1_elems = shared_hash_htab (src1->vars)->elements ();
4295 src2_elems = shared_hash_htab (src2->vars)->elements ();
4296 dataflow_set_init (dst);
4297 dst->stack_adjust = cur.stack_adjust;
4298 shared_hash_destroy (dst->vars);
4299 dst->vars = new shared_hash_def;
4300 dst->vars->refcount = 1;
4301 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4302
4303 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4304 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4305
4306 dsm.dst = dst;
4307 dsm.src = src2;
4308 dsm.cur = src1;
4309 dsm.src_onepart_cnt = 0;
4310
4311 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4312 var, variable, hi)
4313 variable_merge_over_src (var, &dsm);
4314 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4315 var, variable, hi)
4316 variable_merge_over_cur (var, &dsm);
4317
4318 if (dsm.src_onepart_cnt)
4319 dst_can_be_shared = false;
4320
4321 dataflow_set_destroy (src1);
4322 }
4323
4324 /* Mark register equivalences. */
4325
4326 static void
4327 dataflow_set_equiv_regs (dataflow_set *set)
4328 {
4329 int i;
4330 attrs list, *listp;
4331
4332 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4333 {
4334 rtx canon[NUM_MACHINE_MODES];
4335
4336 /* If the list is empty or one entry, no need to canonicalize
4337 anything. */
4338 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4339 continue;
4340
4341 memset (canon, 0, sizeof (canon));
4342
4343 for (list = set->regs[i]; list; list = list->next)
4344 if (list->offset == 0 && dv_is_value_p (list->dv))
4345 {
4346 rtx val = dv_as_value (list->dv);
4347 rtx *cvalp = &canon[(int)GET_MODE (val)];
4348 rtx cval = *cvalp;
4349
4350 if (canon_value_cmp (val, cval))
4351 *cvalp = val;
4352 }
4353
4354 for (list = set->regs[i]; list; list = list->next)
4355 if (list->offset == 0 && dv_onepart_p (list->dv))
4356 {
4357 rtx cval = canon[(int)GET_MODE (list->loc)];
4358
4359 if (!cval)
4360 continue;
4361
4362 if (dv_is_value_p (list->dv))
4363 {
4364 rtx val = dv_as_value (list->dv);
4365
4366 if (val == cval)
4367 continue;
4368
4369 VALUE_RECURSED_INTO (val) = true;
4370 set_variable_part (set, val, dv_from_value (cval), 0,
4371 VAR_INIT_STATUS_INITIALIZED,
4372 NULL, NO_INSERT);
4373 }
4374
4375 VALUE_RECURSED_INTO (cval) = true;
4376 set_variable_part (set, cval, list->dv, 0,
4377 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4378 }
4379
4380 for (listp = &set->regs[i]; (list = *listp);
4381 listp = list ? &list->next : listp)
4382 if (list->offset == 0 && dv_onepart_p (list->dv))
4383 {
4384 rtx cval = canon[(int)GET_MODE (list->loc)];
4385 variable_def **slot;
4386
4387 if (!cval)
4388 continue;
4389
4390 if (dv_is_value_p (list->dv))
4391 {
4392 rtx val = dv_as_value (list->dv);
4393 if (!VALUE_RECURSED_INTO (val))
4394 continue;
4395 }
4396
4397 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4398 canonicalize_values_star (slot, set);
4399 if (*listp != list)
4400 list = NULL;
4401 }
4402 }
4403 }
4404
4405 /* Remove any redundant values in the location list of VAR, which must
4406 be unshared and 1-part. */
4407
4408 static void
4409 remove_duplicate_values (variable var)
4410 {
4411 location_chain node, *nodep;
4412
4413 gcc_assert (var->onepart);
4414 gcc_assert (var->n_var_parts == 1);
4415 gcc_assert (var->refcount == 1);
4416
4417 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4418 {
4419 if (GET_CODE (node->loc) == VALUE)
4420 {
4421 if (VALUE_RECURSED_INTO (node->loc))
4422 {
4423 /* Remove duplicate value node. */
4424 *nodep = node->next;
4425 delete node;
4426 continue;
4427 }
4428 else
4429 VALUE_RECURSED_INTO (node->loc) = true;
4430 }
4431 nodep = &node->next;
4432 }
4433
4434 for (node = var->var_part[0].loc_chain; node; node = node->next)
4435 if (GET_CODE (node->loc) == VALUE)
4436 {
4437 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4438 VALUE_RECURSED_INTO (node->loc) = false;
4439 }
4440 }
4441
4442
4443 /* Hash table iteration argument passed to variable_post_merge. */
4444 struct dfset_post_merge
4445 {
4446 /* The new input set for the current block. */
4447 dataflow_set *set;
4448 /* Pointer to the permanent input set for the current block, or
4449 NULL. */
4450 dataflow_set **permp;
4451 };
4452
4453 /* Create values for incoming expressions associated with one-part
4454 variables that don't have value numbers for them. */
4455
4456 int
4457 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4458 {
4459 dataflow_set *set = dfpm->set;
4460 variable var = *slot;
4461 location_chain node;
4462
4463 if (!var->onepart || !var->n_var_parts)
4464 return 1;
4465
4466 gcc_assert (var->n_var_parts == 1);
4467
4468 if (dv_is_decl_p (var->dv))
4469 {
4470 bool check_dupes = false;
4471
4472 restart:
4473 for (node = var->var_part[0].loc_chain; node; node = node->next)
4474 {
4475 if (GET_CODE (node->loc) == VALUE)
4476 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4477 else if (GET_CODE (node->loc) == REG)
4478 {
4479 attrs att, *attp, *curp = NULL;
4480
4481 if (var->refcount != 1)
4482 {
4483 slot = unshare_variable (set, slot, var,
4484 VAR_INIT_STATUS_INITIALIZED);
4485 var = *slot;
4486 goto restart;
4487 }
4488
4489 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4490 attp = &att->next)
4491 if (att->offset == 0
4492 && GET_MODE (att->loc) == GET_MODE (node->loc))
4493 {
4494 if (dv_is_value_p (att->dv))
4495 {
4496 rtx cval = dv_as_value (att->dv);
4497 node->loc = cval;
4498 check_dupes = true;
4499 break;
4500 }
4501 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4502 curp = attp;
4503 }
4504
4505 if (!curp)
4506 {
4507 curp = attp;
4508 while (*curp)
4509 if ((*curp)->offset == 0
4510 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4511 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4512 break;
4513 else
4514 curp = &(*curp)->next;
4515 gcc_assert (*curp);
4516 }
4517
4518 if (!att)
4519 {
4520 decl_or_value cdv;
4521 rtx cval;
4522
4523 if (!*dfpm->permp)
4524 {
4525 *dfpm->permp = XNEW (dataflow_set);
4526 dataflow_set_init (*dfpm->permp);
4527 }
4528
4529 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4530 att; att = att->next)
4531 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4532 {
4533 gcc_assert (att->offset == 0
4534 && dv_is_value_p (att->dv));
4535 val_reset (set, att->dv);
4536 break;
4537 }
4538
4539 if (att)
4540 {
4541 cdv = att->dv;
4542 cval = dv_as_value (cdv);
4543 }
4544 else
4545 {
4546 /* Create a unique value to hold this register,
4547 that ought to be found and reused in
4548 subsequent rounds. */
4549 cselib_val *v;
4550 gcc_assert (!cselib_lookup (node->loc,
4551 GET_MODE (node->loc), 0,
4552 VOIDmode));
4553 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4554 VOIDmode);
4555 cselib_preserve_value (v);
4556 cselib_invalidate_rtx (node->loc);
4557 cval = v->val_rtx;
4558 cdv = dv_from_value (cval);
4559 if (dump_file)
4560 fprintf (dump_file,
4561 "Created new value %u:%u for reg %i\n",
4562 v->uid, v->hash, REGNO (node->loc));
4563 }
4564
4565 var_reg_decl_set (*dfpm->permp, node->loc,
4566 VAR_INIT_STATUS_INITIALIZED,
4567 cdv, 0, NULL, INSERT);
4568
4569 node->loc = cval;
4570 check_dupes = true;
4571 }
4572
4573 /* Remove attribute referring to the decl, which now
4574 uses the value for the register, already existing or
4575 to be added when we bring perm in. */
4576 att = *curp;
4577 *curp = att->next;
4578 delete att;
4579 }
4580 }
4581
4582 if (check_dupes)
4583 remove_duplicate_values (var);
4584 }
4585
4586 return 1;
4587 }
4588
4589 /* Reset values in the permanent set that are not associated with the
4590 chosen expression. */
4591
4592 int
4593 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4594 {
4595 dataflow_set *set = dfpm->set;
4596 variable pvar = *pslot, var;
4597 location_chain pnode;
4598 decl_or_value dv;
4599 attrs att;
4600
4601 gcc_assert (dv_is_value_p (pvar->dv)
4602 && pvar->n_var_parts == 1);
4603 pnode = pvar->var_part[0].loc_chain;
4604 gcc_assert (pnode
4605 && !pnode->next
4606 && REG_P (pnode->loc));
4607
4608 dv = pvar->dv;
4609
4610 var = shared_hash_find (set->vars, dv);
4611 if (var)
4612 {
4613 /* Although variable_post_merge_new_vals may have made decls
4614 non-star-canonical, values that pre-existed in canonical form
4615 remain canonical, and newly-created values reference a single
4616 REG, so they are canonical as well. Since VAR has the
4617 location list for a VALUE, using find_loc_in_1pdv for it is
4618 fine, since VALUEs don't map back to DECLs. */
4619 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4620 return 1;
4621 val_reset (set, dv);
4622 }
4623
4624 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4625 if (att->offset == 0
4626 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4627 && dv_is_value_p (att->dv))
4628 break;
4629
4630 /* If there is a value associated with this register already, create
4631 an equivalence. */
4632 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4633 {
4634 rtx cval = dv_as_value (att->dv);
4635 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4636 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4637 NULL, INSERT);
4638 }
4639 else if (!att)
4640 {
4641 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4642 dv, 0, pnode->loc);
4643 variable_union (pvar, set);
4644 }
4645
4646 return 1;
4647 }
4648
4649 /* Just checking stuff and registering register attributes for
4650 now. */
4651
4652 static void
4653 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4654 {
4655 struct dfset_post_merge dfpm;
4656
4657 dfpm.set = set;
4658 dfpm.permp = permp;
4659
4660 shared_hash_htab (set->vars)
4661 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4662 if (*permp)
4663 shared_hash_htab ((*permp)->vars)
4664 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4665 shared_hash_htab (set->vars)
4666 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4667 shared_hash_htab (set->vars)
4668 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4669 }
4670
4671 /* Return a node whose loc is a MEM that refers to EXPR in the
4672 location list of a one-part variable or value VAR, or in that of
4673 any values recursively mentioned in the location lists. */
4674
4675 static location_chain
4676 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4677 {
4678 location_chain node;
4679 decl_or_value dv;
4680 variable var;
4681 location_chain where = NULL;
4682
4683 if (!val)
4684 return NULL;
4685
4686 gcc_assert (GET_CODE (val) == VALUE
4687 && !VALUE_RECURSED_INTO (val));
4688
4689 dv = dv_from_value (val);
4690 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4691
4692 if (!var)
4693 return NULL;
4694
4695 gcc_assert (var->onepart);
4696
4697 if (!var->n_var_parts)
4698 return NULL;
4699
4700 VALUE_RECURSED_INTO (val) = true;
4701
4702 for (node = var->var_part[0].loc_chain; node; node = node->next)
4703 if (MEM_P (node->loc)
4704 && MEM_EXPR (node->loc) == expr
4705 && INT_MEM_OFFSET (node->loc) == 0)
4706 {
4707 where = node;
4708 break;
4709 }
4710 else if (GET_CODE (node->loc) == VALUE
4711 && !VALUE_RECURSED_INTO (node->loc)
4712 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4713 break;
4714
4715 VALUE_RECURSED_INTO (val) = false;
4716
4717 return where;
4718 }
4719
4720 /* Return TRUE if the value of MEM may vary across a call. */
4721
4722 static bool
4723 mem_dies_at_call (rtx mem)
4724 {
4725 tree expr = MEM_EXPR (mem);
4726 tree decl;
4727
4728 if (!expr)
4729 return true;
4730
4731 decl = get_base_address (expr);
4732
4733 if (!decl)
4734 return true;
4735
4736 if (!DECL_P (decl))
4737 return true;
4738
4739 return (may_be_aliased (decl)
4740 || (!TREE_READONLY (decl) && is_global_var (decl)));
4741 }
4742
4743 /* Remove all MEMs from the location list of a hash table entry for a
4744 one-part variable, except those whose MEM attributes map back to
4745 the variable itself, directly or within a VALUE. */
4746
4747 int
4748 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4749 {
4750 variable var = *slot;
4751
4752 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4753 {
4754 tree decl = dv_as_decl (var->dv);
4755 location_chain loc, *locp;
4756 bool changed = false;
4757
4758 if (!var->n_var_parts)
4759 return 1;
4760
4761 gcc_assert (var->n_var_parts == 1);
4762
4763 if (shared_var_p (var, set->vars))
4764 {
4765 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4766 {
4767 /* We want to remove dying MEMs that doesn't refer to DECL. */
4768 if (GET_CODE (loc->loc) == MEM
4769 && (MEM_EXPR (loc->loc) != decl
4770 || INT_MEM_OFFSET (loc->loc) != 0)
4771 && !mem_dies_at_call (loc->loc))
4772 break;
4773 /* We want to move here MEMs that do refer to DECL. */
4774 else if (GET_CODE (loc->loc) == VALUE
4775 && find_mem_expr_in_1pdv (decl, loc->loc,
4776 shared_hash_htab (set->vars)))
4777 break;
4778 }
4779
4780 if (!loc)
4781 return 1;
4782
4783 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4784 var = *slot;
4785 gcc_assert (var->n_var_parts == 1);
4786 }
4787
4788 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4789 loc; loc = *locp)
4790 {
4791 rtx old_loc = loc->loc;
4792 if (GET_CODE (old_loc) == VALUE)
4793 {
4794 location_chain mem_node
4795 = find_mem_expr_in_1pdv (decl, loc->loc,
4796 shared_hash_htab (set->vars));
4797
4798 /* ??? This picks up only one out of multiple MEMs that
4799 refer to the same variable. Do we ever need to be
4800 concerned about dealing with more than one, or, given
4801 that they should all map to the same variable
4802 location, their addresses will have been merged and
4803 they will be regarded as equivalent? */
4804 if (mem_node)
4805 {
4806 loc->loc = mem_node->loc;
4807 loc->set_src = mem_node->set_src;
4808 loc->init = MIN (loc->init, mem_node->init);
4809 }
4810 }
4811
4812 if (GET_CODE (loc->loc) != MEM
4813 || (MEM_EXPR (loc->loc) == decl
4814 && INT_MEM_OFFSET (loc->loc) == 0)
4815 || !mem_dies_at_call (loc->loc))
4816 {
4817 if (old_loc != loc->loc && emit_notes)
4818 {
4819 if (old_loc == var->var_part[0].cur_loc)
4820 {
4821 changed = true;
4822 var->var_part[0].cur_loc = NULL;
4823 }
4824 }
4825 locp = &loc->next;
4826 continue;
4827 }
4828
4829 if (emit_notes)
4830 {
4831 if (old_loc == var->var_part[0].cur_loc)
4832 {
4833 changed = true;
4834 var->var_part[0].cur_loc = NULL;
4835 }
4836 }
4837 *locp = loc->next;
4838 delete loc;
4839 }
4840
4841 if (!var->var_part[0].loc_chain)
4842 {
4843 var->n_var_parts--;
4844 changed = true;
4845 }
4846 if (changed)
4847 variable_was_changed (var, set);
4848 }
4849
4850 return 1;
4851 }
4852
4853 /* Remove all MEMs from the location list of a hash table entry for a
4854 value. */
4855
4856 int
4857 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4858 {
4859 variable var = *slot;
4860
4861 if (var->onepart == ONEPART_VALUE)
4862 {
4863 location_chain loc, *locp;
4864 bool changed = false;
4865 rtx cur_loc;
4866
4867 gcc_assert (var->n_var_parts == 1);
4868
4869 if (shared_var_p (var, set->vars))
4870 {
4871 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4872 if (GET_CODE (loc->loc) == MEM
4873 && mem_dies_at_call (loc->loc))
4874 break;
4875
4876 if (!loc)
4877 return 1;
4878
4879 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4880 var = *slot;
4881 gcc_assert (var->n_var_parts == 1);
4882 }
4883
4884 if (VAR_LOC_1PAUX (var))
4885 cur_loc = VAR_LOC_FROM (var);
4886 else
4887 cur_loc = var->var_part[0].cur_loc;
4888
4889 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4890 loc; loc = *locp)
4891 {
4892 if (GET_CODE (loc->loc) != MEM
4893 || !mem_dies_at_call (loc->loc))
4894 {
4895 locp = &loc->next;
4896 continue;
4897 }
4898
4899 *locp = loc->next;
4900 /* If we have deleted the location which was last emitted
4901 we have to emit new location so add the variable to set
4902 of changed variables. */
4903 if (cur_loc == loc->loc)
4904 {
4905 changed = true;
4906 var->var_part[0].cur_loc = NULL;
4907 if (VAR_LOC_1PAUX (var))
4908 VAR_LOC_FROM (var) = NULL;
4909 }
4910 delete loc;
4911 }
4912
4913 if (!var->var_part[0].loc_chain)
4914 {
4915 var->n_var_parts--;
4916 changed = true;
4917 }
4918 if (changed)
4919 variable_was_changed (var, set);
4920 }
4921
4922 return 1;
4923 }
4924
4925 /* Remove all variable-location information about call-clobbered
4926 registers, as well as associations between MEMs and VALUEs. */
4927
4928 static void
4929 dataflow_set_clear_at_call (dataflow_set *set)
4930 {
4931 unsigned int r;
4932 hard_reg_set_iterator hrsi;
4933
4934 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4935 var_regno_delete (set, r);
4936
4937 if (MAY_HAVE_DEBUG_INSNS)
4938 {
4939 set->traversed_vars = set->vars;
4940 shared_hash_htab (set->vars)
4941 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4942 set->traversed_vars = set->vars;
4943 shared_hash_htab (set->vars)
4944 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4945 set->traversed_vars = NULL;
4946 }
4947 }
4948
4949 static bool
4950 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4951 {
4952 location_chain lc1, lc2;
4953
4954 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4955 {
4956 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4957 {
4958 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4959 {
4960 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4961 break;
4962 }
4963 if (rtx_equal_p (lc1->loc, lc2->loc))
4964 break;
4965 }
4966 if (!lc2)
4967 return true;
4968 }
4969 return false;
4970 }
4971
4972 /* Return true if one-part variables VAR1 and VAR2 are different.
4973 They must be in canonical order. */
4974
4975 static bool
4976 onepart_variable_different_p (variable var1, variable var2)
4977 {
4978 location_chain lc1, lc2;
4979
4980 if (var1 == var2)
4981 return false;
4982
4983 gcc_assert (var1->n_var_parts == 1
4984 && var2->n_var_parts == 1);
4985
4986 lc1 = var1->var_part[0].loc_chain;
4987 lc2 = var2->var_part[0].loc_chain;
4988
4989 gcc_assert (lc1 && lc2);
4990
4991 while (lc1 && lc2)
4992 {
4993 if (loc_cmp (lc1->loc, lc2->loc))
4994 return true;
4995 lc1 = lc1->next;
4996 lc2 = lc2->next;
4997 }
4998
4999 return lc1 != lc2;
5000 }
5001
5002 /* Return true if variables VAR1 and VAR2 are different. */
5003
5004 static bool
5005 variable_different_p (variable var1, variable var2)
5006 {
5007 int i;
5008
5009 if (var1 == var2)
5010 return false;
5011
5012 if (var1->onepart != var2->onepart)
5013 return true;
5014
5015 if (var1->n_var_parts != var2->n_var_parts)
5016 return true;
5017
5018 if (var1->onepart && var1->n_var_parts)
5019 {
5020 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5021 && var1->n_var_parts == 1);
5022 /* One-part values have locations in a canonical order. */
5023 return onepart_variable_different_p (var1, var2);
5024 }
5025
5026 for (i = 0; i < var1->n_var_parts; i++)
5027 {
5028 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5029 return true;
5030 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5031 return true;
5032 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5033 return true;
5034 }
5035 return false;
5036 }
5037
5038 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5039
5040 static bool
5041 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5042 {
5043 variable_iterator_type hi;
5044 variable var1;
5045
5046 if (old_set->vars == new_set->vars)
5047 return false;
5048
5049 if (shared_hash_htab (old_set->vars)->elements ()
5050 != shared_hash_htab (new_set->vars)->elements ())
5051 return true;
5052
5053 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5054 var1, variable, hi)
5055 {
5056 variable_table_type *htab = shared_hash_htab (new_set->vars);
5057 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5058 if (!var2)
5059 {
5060 if (dump_file && (dump_flags & TDF_DETAILS))
5061 {
5062 fprintf (dump_file, "dataflow difference found: removal of:\n");
5063 dump_var (var1);
5064 }
5065 return true;
5066 }
5067
5068 if (variable_different_p (var1, var2))
5069 {
5070 if (dump_file && (dump_flags & TDF_DETAILS))
5071 {
5072 fprintf (dump_file, "dataflow difference found: "
5073 "old and new follow:\n");
5074 dump_var (var1);
5075 dump_var (var2);
5076 }
5077 return true;
5078 }
5079 }
5080
5081 /* No need to traverse the second hashtab, if both have the same number
5082 of elements and the second one had all entries found in the first one,
5083 then it can't have any extra entries. */
5084 return false;
5085 }
5086
5087 /* Free the contents of dataflow set SET. */
5088
5089 static void
5090 dataflow_set_destroy (dataflow_set *set)
5091 {
5092 int i;
5093
5094 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5095 attrs_list_clear (&set->regs[i]);
5096
5097 shared_hash_destroy (set->vars);
5098 set->vars = NULL;
5099 }
5100
5101 /* Return true if RTL X contains a SYMBOL_REF. */
5102
5103 static bool
5104 contains_symbol_ref (rtx x)
5105 {
5106 const char *fmt;
5107 RTX_CODE code;
5108 int i;
5109
5110 if (!x)
5111 return false;
5112
5113 code = GET_CODE (x);
5114 if (code == SYMBOL_REF)
5115 return true;
5116
5117 fmt = GET_RTX_FORMAT (code);
5118 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5119 {
5120 if (fmt[i] == 'e')
5121 {
5122 if (contains_symbol_ref (XEXP (x, i)))
5123 return true;
5124 }
5125 else if (fmt[i] == 'E')
5126 {
5127 int j;
5128 for (j = 0; j < XVECLEN (x, i); j++)
5129 if (contains_symbol_ref (XVECEXP (x, i, j)))
5130 return true;
5131 }
5132 }
5133
5134 return false;
5135 }
5136
5137 /* Shall EXPR be tracked? */
5138
5139 static bool
5140 track_expr_p (tree expr, bool need_rtl)
5141 {
5142 rtx decl_rtl;
5143 tree realdecl;
5144
5145 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5146 return DECL_RTL_SET_P (expr);
5147
5148 /* If EXPR is not a parameter or a variable do not track it. */
5149 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5150 return 0;
5151
5152 /* It also must have a name... */
5153 if (!DECL_NAME (expr) && need_rtl)
5154 return 0;
5155
5156 /* ... and a RTL assigned to it. */
5157 decl_rtl = DECL_RTL_IF_SET (expr);
5158 if (!decl_rtl && need_rtl)
5159 return 0;
5160
5161 /* If this expression is really a debug alias of some other declaration, we
5162 don't need to track this expression if the ultimate declaration is
5163 ignored. */
5164 realdecl = expr;
5165 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5166 {
5167 realdecl = DECL_DEBUG_EXPR (realdecl);
5168 if (!DECL_P (realdecl))
5169 {
5170 if (handled_component_p (realdecl)
5171 || (TREE_CODE (realdecl) == MEM_REF
5172 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5173 {
5174 HOST_WIDE_INT bitsize, bitpos, maxsize;
5175 tree innerdecl
5176 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5177 &maxsize);
5178 if (!DECL_P (innerdecl)
5179 || DECL_IGNORED_P (innerdecl)
5180 /* Do not track declarations for parts of tracked parameters
5181 since we want to track them as a whole instead. */
5182 || (TREE_CODE (innerdecl) == PARM_DECL
5183 && DECL_MODE (innerdecl) != BLKmode
5184 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5185 || TREE_STATIC (innerdecl)
5186 || bitsize <= 0
5187 || bitpos + bitsize > 256
5188 || bitsize != maxsize)
5189 return 0;
5190 else
5191 realdecl = expr;
5192 }
5193 else
5194 return 0;
5195 }
5196 }
5197
5198 /* Do not track EXPR if REALDECL it should be ignored for debugging
5199 purposes. */
5200 if (DECL_IGNORED_P (realdecl))
5201 return 0;
5202
5203 /* Do not track global variables until we are able to emit correct location
5204 list for them. */
5205 if (TREE_STATIC (realdecl))
5206 return 0;
5207
5208 /* When the EXPR is a DECL for alias of some variable (see example)
5209 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5210 DECL_RTL contains SYMBOL_REF.
5211
5212 Example:
5213 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5214 char **_dl_argv;
5215 */
5216 if (decl_rtl && MEM_P (decl_rtl)
5217 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5218 return 0;
5219
5220 /* If RTX is a memory it should not be very large (because it would be
5221 an array or struct). */
5222 if (decl_rtl && MEM_P (decl_rtl))
5223 {
5224 /* Do not track structures and arrays. */
5225 if (GET_MODE (decl_rtl) == BLKmode
5226 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5227 return 0;
5228 if (MEM_SIZE_KNOWN_P (decl_rtl)
5229 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5230 return 0;
5231 }
5232
5233 DECL_CHANGED (expr) = 0;
5234 DECL_CHANGED (realdecl) = 0;
5235 return 1;
5236 }
5237
5238 /* Determine whether a given LOC refers to the same variable part as
5239 EXPR+OFFSET. */
5240
5241 static bool
5242 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5243 {
5244 tree expr2;
5245 HOST_WIDE_INT offset2;
5246
5247 if (! DECL_P (expr))
5248 return false;
5249
5250 if (REG_P (loc))
5251 {
5252 expr2 = REG_EXPR (loc);
5253 offset2 = REG_OFFSET (loc);
5254 }
5255 else if (MEM_P (loc))
5256 {
5257 expr2 = MEM_EXPR (loc);
5258 offset2 = INT_MEM_OFFSET (loc);
5259 }
5260 else
5261 return false;
5262
5263 if (! expr2 || ! DECL_P (expr2))
5264 return false;
5265
5266 expr = var_debug_decl (expr);
5267 expr2 = var_debug_decl (expr2);
5268
5269 return (expr == expr2 && offset == offset2);
5270 }
5271
5272 /* LOC is a REG or MEM that we would like to track if possible.
5273 If EXPR is null, we don't know what expression LOC refers to,
5274 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5275 LOC is an lvalue register.
5276
5277 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5278 is something we can track. When returning true, store the mode of
5279 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5280 from EXPR in *OFFSET_OUT (if nonnull). */
5281
5282 static bool
5283 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5284 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5285 {
5286 machine_mode mode;
5287
5288 if (expr == NULL || !track_expr_p (expr, true))
5289 return false;
5290
5291 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5292 whole subreg, but only the old inner part is really relevant. */
5293 mode = GET_MODE (loc);
5294 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5295 {
5296 machine_mode pseudo_mode;
5297
5298 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5299 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5300 {
5301 offset += byte_lowpart_offset (pseudo_mode, mode);
5302 mode = pseudo_mode;
5303 }
5304 }
5305
5306 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5307 Do the same if we are storing to a register and EXPR occupies
5308 the whole of register LOC; in that case, the whole of EXPR is
5309 being changed. We exclude complex modes from the second case
5310 because the real and imaginary parts are represented as separate
5311 pseudo registers, even if the whole complex value fits into one
5312 hard register. */
5313 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5314 || (store_reg_p
5315 && !COMPLEX_MODE_P (DECL_MODE (expr))
5316 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5317 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5318 {
5319 mode = DECL_MODE (expr);
5320 offset = 0;
5321 }
5322
5323 if (offset < 0 || offset >= MAX_VAR_PARTS)
5324 return false;
5325
5326 if (mode_out)
5327 *mode_out = mode;
5328 if (offset_out)
5329 *offset_out = offset;
5330 return true;
5331 }
5332
5333 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5334 want to track. When returning nonnull, make sure that the attributes
5335 on the returned value are updated. */
5336
5337 static rtx
5338 var_lowpart (machine_mode mode, rtx loc)
5339 {
5340 unsigned int offset, reg_offset, regno;
5341
5342 if (GET_MODE (loc) == mode)
5343 return loc;
5344
5345 if (!REG_P (loc) && !MEM_P (loc))
5346 return NULL;
5347
5348 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5349
5350 if (MEM_P (loc))
5351 return adjust_address_nv (loc, mode, offset);
5352
5353 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5354 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5355 reg_offset, mode);
5356 return gen_rtx_REG_offset (loc, mode, regno, offset);
5357 }
5358
5359 /* Carry information about uses and stores while walking rtx. */
5360
5361 struct count_use_info
5362 {
5363 /* The insn where the RTX is. */
5364 rtx_insn *insn;
5365
5366 /* The basic block where insn is. */
5367 basic_block bb;
5368
5369 /* The array of n_sets sets in the insn, as determined by cselib. */
5370 struct cselib_set *sets;
5371 int n_sets;
5372
5373 /* True if we're counting stores, false otherwise. */
5374 bool store_p;
5375 };
5376
5377 /* Find a VALUE corresponding to X. */
5378
5379 static inline cselib_val *
5380 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5381 {
5382 int i;
5383
5384 if (cui->sets)
5385 {
5386 /* This is called after uses are set up and before stores are
5387 processed by cselib, so it's safe to look up srcs, but not
5388 dsts. So we look up expressions that appear in srcs or in
5389 dest expressions, but we search the sets array for dests of
5390 stores. */
5391 if (cui->store_p)
5392 {
5393 /* Some targets represent memset and memcpy patterns
5394 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5395 (set (mem:BLK ...) (const_int ...)) or
5396 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5397 in that case, otherwise we end up with mode mismatches. */
5398 if (mode == BLKmode && MEM_P (x))
5399 return NULL;
5400 for (i = 0; i < cui->n_sets; i++)
5401 if (cui->sets[i].dest == x)
5402 return cui->sets[i].src_elt;
5403 }
5404 else
5405 return cselib_lookup (x, mode, 0, VOIDmode);
5406 }
5407
5408 return NULL;
5409 }
5410
5411 /* Replace all registers and addresses in an expression with VALUE
5412 expressions that map back to them, unless the expression is a
5413 register. If no mapping is or can be performed, returns NULL. */
5414
5415 static rtx
5416 replace_expr_with_values (rtx loc)
5417 {
5418 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5419 return NULL;
5420 else if (MEM_P (loc))
5421 {
5422 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5423 get_address_mode (loc), 0,
5424 GET_MODE (loc));
5425 if (addr)
5426 return replace_equiv_address_nv (loc, addr->val_rtx);
5427 else
5428 return NULL;
5429 }
5430 else
5431 return cselib_subst_to_values (loc, VOIDmode);
5432 }
5433
5434 /* Return true if X contains a DEBUG_EXPR. */
5435
5436 static bool
5437 rtx_debug_expr_p (const_rtx x)
5438 {
5439 subrtx_iterator::array_type array;
5440 FOR_EACH_SUBRTX (iter, array, x, ALL)
5441 if (GET_CODE (*iter) == DEBUG_EXPR)
5442 return true;
5443 return false;
5444 }
5445
5446 /* Determine what kind of micro operation to choose for a USE. Return
5447 MO_CLOBBER if no micro operation is to be generated. */
5448
5449 static enum micro_operation_type
5450 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5451 {
5452 tree expr;
5453
5454 if (cui && cui->sets)
5455 {
5456 if (GET_CODE (loc) == VAR_LOCATION)
5457 {
5458 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5459 {
5460 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5461 if (! VAR_LOC_UNKNOWN_P (ploc))
5462 {
5463 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5464 VOIDmode);
5465
5466 /* ??? flag_float_store and volatile mems are never
5467 given values, but we could in theory use them for
5468 locations. */
5469 gcc_assert (val || 1);
5470 }
5471 return MO_VAL_LOC;
5472 }
5473 else
5474 return MO_CLOBBER;
5475 }
5476
5477 if (REG_P (loc) || MEM_P (loc))
5478 {
5479 if (modep)
5480 *modep = GET_MODE (loc);
5481 if (cui->store_p)
5482 {
5483 if (REG_P (loc)
5484 || (find_use_val (loc, GET_MODE (loc), cui)
5485 && cselib_lookup (XEXP (loc, 0),
5486 get_address_mode (loc), 0,
5487 GET_MODE (loc))))
5488 return MO_VAL_SET;
5489 }
5490 else
5491 {
5492 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5493
5494 if (val && !cselib_preserved_value_p (val))
5495 return MO_VAL_USE;
5496 }
5497 }
5498 }
5499
5500 if (REG_P (loc))
5501 {
5502 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5503
5504 if (loc == cfa_base_rtx)
5505 return MO_CLOBBER;
5506 expr = REG_EXPR (loc);
5507
5508 if (!expr)
5509 return MO_USE_NO_VAR;
5510 else if (target_for_debug_bind (var_debug_decl (expr)))
5511 return MO_CLOBBER;
5512 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5513 false, modep, NULL))
5514 return MO_USE;
5515 else
5516 return MO_USE_NO_VAR;
5517 }
5518 else if (MEM_P (loc))
5519 {
5520 expr = MEM_EXPR (loc);
5521
5522 if (!expr)
5523 return MO_CLOBBER;
5524 else if (target_for_debug_bind (var_debug_decl (expr)))
5525 return MO_CLOBBER;
5526 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5527 false, modep, NULL)
5528 /* Multi-part variables shouldn't refer to one-part
5529 variable names such as VALUEs (never happens) or
5530 DEBUG_EXPRs (only happens in the presence of debug
5531 insns). */
5532 && (!MAY_HAVE_DEBUG_INSNS
5533 || !rtx_debug_expr_p (XEXP (loc, 0))))
5534 return MO_USE;
5535 else
5536 return MO_CLOBBER;
5537 }
5538
5539 return MO_CLOBBER;
5540 }
5541
5542 /* Log to OUT information about micro-operation MOPT involving X in
5543 INSN of BB. */
5544
5545 static inline void
5546 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5547 enum micro_operation_type mopt, FILE *out)
5548 {
5549 fprintf (out, "bb %i op %i insn %i %s ",
5550 bb->index, VTI (bb)->mos.length (),
5551 INSN_UID (insn), micro_operation_type_name[mopt]);
5552 print_inline_rtx (out, x, 2);
5553 fputc ('\n', out);
5554 }
5555
5556 /* Tell whether the CONCAT used to holds a VALUE and its location
5557 needs value resolution, i.e., an attempt of mapping the location
5558 back to other incoming values. */
5559 #define VAL_NEEDS_RESOLUTION(x) \
5560 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5561 /* Whether the location in the CONCAT is a tracked expression, that
5562 should also be handled like a MO_USE. */
5563 #define VAL_HOLDS_TRACK_EXPR(x) \
5564 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5565 /* Whether the location in the CONCAT should be handled like a MO_COPY
5566 as well. */
5567 #define VAL_EXPR_IS_COPIED(x) \
5568 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5569 /* Whether the location in the CONCAT should be handled like a
5570 MO_CLOBBER as well. */
5571 #define VAL_EXPR_IS_CLOBBERED(x) \
5572 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5573
5574 /* All preserved VALUEs. */
5575 static vec<rtx> preserved_values;
5576
5577 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5578
5579 static void
5580 preserve_value (cselib_val *val)
5581 {
5582 cselib_preserve_value (val);
5583 preserved_values.safe_push (val->val_rtx);
5584 }
5585
5586 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5587 any rtxes not suitable for CONST use not replaced by VALUEs
5588 are discovered. */
5589
5590 static bool
5591 non_suitable_const (const_rtx x)
5592 {
5593 subrtx_iterator::array_type array;
5594 FOR_EACH_SUBRTX (iter, array, x, ALL)
5595 {
5596 const_rtx x = *iter;
5597 switch (GET_CODE (x))
5598 {
5599 case REG:
5600 case DEBUG_EXPR:
5601 case PC:
5602 case SCRATCH:
5603 case CC0:
5604 case ASM_INPUT:
5605 case ASM_OPERANDS:
5606 return true;
5607 case MEM:
5608 if (!MEM_READONLY_P (x))
5609 return true;
5610 break;
5611 default:
5612 break;
5613 }
5614 }
5615 return false;
5616 }
5617
5618 /* Add uses (register and memory references) LOC which will be tracked
5619 to VTI (bb)->mos. */
5620
5621 static void
5622 add_uses (rtx loc, struct count_use_info *cui)
5623 {
5624 machine_mode mode = VOIDmode;
5625 enum micro_operation_type type = use_type (loc, cui, &mode);
5626
5627 if (type != MO_CLOBBER)
5628 {
5629 basic_block bb = cui->bb;
5630 micro_operation mo;
5631
5632 mo.type = type;
5633 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5634 mo.insn = cui->insn;
5635
5636 if (type == MO_VAL_LOC)
5637 {
5638 rtx oloc = loc;
5639 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5640 cselib_val *val;
5641
5642 gcc_assert (cui->sets);
5643
5644 if (MEM_P (vloc)
5645 && !REG_P (XEXP (vloc, 0))
5646 && !MEM_P (XEXP (vloc, 0)))
5647 {
5648 rtx mloc = vloc;
5649 machine_mode address_mode = get_address_mode (mloc);
5650 cselib_val *val
5651 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5652 GET_MODE (mloc));
5653
5654 if (val && !cselib_preserved_value_p (val))
5655 preserve_value (val);
5656 }
5657
5658 if (CONSTANT_P (vloc)
5659 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5660 /* For constants don't look up any value. */;
5661 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5662 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5663 {
5664 machine_mode mode2;
5665 enum micro_operation_type type2;
5666 rtx nloc = NULL;
5667 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5668
5669 if (resolvable)
5670 nloc = replace_expr_with_values (vloc);
5671
5672 if (nloc)
5673 {
5674 oloc = shallow_copy_rtx (oloc);
5675 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5676 }
5677
5678 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5679
5680 type2 = use_type (vloc, 0, &mode2);
5681
5682 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5683 || type2 == MO_CLOBBER);
5684
5685 if (type2 == MO_CLOBBER
5686 && !cselib_preserved_value_p (val))
5687 {
5688 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5689 preserve_value (val);
5690 }
5691 }
5692 else if (!VAR_LOC_UNKNOWN_P (vloc))
5693 {
5694 oloc = shallow_copy_rtx (oloc);
5695 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5696 }
5697
5698 mo.u.loc = oloc;
5699 }
5700 else if (type == MO_VAL_USE)
5701 {
5702 machine_mode mode2 = VOIDmode;
5703 enum micro_operation_type type2;
5704 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5705 rtx vloc, oloc = loc, nloc;
5706
5707 gcc_assert (cui->sets);
5708
5709 if (MEM_P (oloc)
5710 && !REG_P (XEXP (oloc, 0))
5711 && !MEM_P (XEXP (oloc, 0)))
5712 {
5713 rtx mloc = oloc;
5714 machine_mode address_mode = get_address_mode (mloc);
5715 cselib_val *val
5716 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5717 GET_MODE (mloc));
5718
5719 if (val && !cselib_preserved_value_p (val))
5720 preserve_value (val);
5721 }
5722
5723 type2 = use_type (loc, 0, &mode2);
5724
5725 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5726 || type2 == MO_CLOBBER);
5727
5728 if (type2 == MO_USE)
5729 vloc = var_lowpart (mode2, loc);
5730 else
5731 vloc = oloc;
5732
5733 /* The loc of a MO_VAL_USE may have two forms:
5734
5735 (concat val src): val is at src, a value-based
5736 representation.
5737
5738 (concat (concat val use) src): same as above, with use as
5739 the MO_USE tracked value, if it differs from src.
5740
5741 */
5742
5743 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5744 nloc = replace_expr_with_values (loc);
5745 if (!nloc)
5746 nloc = oloc;
5747
5748 if (vloc != nloc)
5749 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5750 else
5751 oloc = val->val_rtx;
5752
5753 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5754
5755 if (type2 == MO_USE)
5756 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5757 if (!cselib_preserved_value_p (val))
5758 {
5759 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5760 preserve_value (val);
5761 }
5762 }
5763 else
5764 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5765
5766 if (dump_file && (dump_flags & TDF_DETAILS))
5767 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5768 VTI (bb)->mos.safe_push (mo);
5769 }
5770 }
5771
5772 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5773
5774 static void
5775 add_uses_1 (rtx *x, void *cui)
5776 {
5777 subrtx_var_iterator::array_type array;
5778 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5779 add_uses (*iter, (struct count_use_info *) cui);
5780 }
5781
5782 /* This is the value used during expansion of locations. We want it
5783 to be unbounded, so that variables expanded deep in a recursion
5784 nest are fully evaluated, so that their values are cached
5785 correctly. We avoid recursion cycles through other means, and we
5786 don't unshare RTL, so excess complexity is not a problem. */
5787 #define EXPR_DEPTH (INT_MAX)
5788 /* We use this to keep too-complex expressions from being emitted as
5789 location notes, and then to debug information. Users can trade
5790 compile time for ridiculously complex expressions, although they're
5791 seldom useful, and they may often have to be discarded as not
5792 representable anyway. */
5793 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5794
5795 /* Attempt to reverse the EXPR operation in the debug info and record
5796 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5797 no longer live we can express its value as VAL - 6. */
5798
5799 static void
5800 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5801 {
5802 rtx src, arg, ret;
5803 cselib_val *v;
5804 struct elt_loc_list *l;
5805 enum rtx_code code;
5806 int count;
5807
5808 if (GET_CODE (expr) != SET)
5809 return;
5810
5811 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5812 return;
5813
5814 src = SET_SRC (expr);
5815 switch (GET_CODE (src))
5816 {
5817 case PLUS:
5818 case MINUS:
5819 case XOR:
5820 case NOT:
5821 case NEG:
5822 if (!REG_P (XEXP (src, 0)))
5823 return;
5824 break;
5825 case SIGN_EXTEND:
5826 case ZERO_EXTEND:
5827 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5828 return;
5829 break;
5830 default:
5831 return;
5832 }
5833
5834 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5835 return;
5836
5837 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5838 if (!v || !cselib_preserved_value_p (v))
5839 return;
5840
5841 /* Use canonical V to avoid creating multiple redundant expressions
5842 for different VALUES equivalent to V. */
5843 v = canonical_cselib_val (v);
5844
5845 /* Adding a reverse op isn't useful if V already has an always valid
5846 location. Ignore ENTRY_VALUE, while it is always constant, we should
5847 prefer non-ENTRY_VALUE locations whenever possible. */
5848 for (l = v->locs, count = 0; l; l = l->next, count++)
5849 if (CONSTANT_P (l->loc)
5850 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5851 return;
5852 /* Avoid creating too large locs lists. */
5853 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5854 return;
5855
5856 switch (GET_CODE (src))
5857 {
5858 case NOT:
5859 case NEG:
5860 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5861 return;
5862 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5863 break;
5864 case SIGN_EXTEND:
5865 case ZERO_EXTEND:
5866 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5867 break;
5868 case XOR:
5869 code = XOR;
5870 goto binary;
5871 case PLUS:
5872 code = MINUS;
5873 goto binary;
5874 case MINUS:
5875 code = PLUS;
5876 goto binary;
5877 binary:
5878 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5879 return;
5880 arg = XEXP (src, 1);
5881 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5882 {
5883 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5884 if (arg == NULL_RTX)
5885 return;
5886 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5887 return;
5888 }
5889 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5890 if (ret == val)
5891 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5892 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5893 breaks a lot of routines during var-tracking. */
5894 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5895 break;
5896 default:
5897 gcc_unreachable ();
5898 }
5899
5900 cselib_add_permanent_equiv (v, ret, insn);
5901 }
5902
5903 /* Add stores (register and memory references) LOC which will be tracked
5904 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5905 CUIP->insn is instruction which the LOC is part of. */
5906
5907 static void
5908 add_stores (rtx loc, const_rtx expr, void *cuip)
5909 {
5910 machine_mode mode = VOIDmode, mode2;
5911 struct count_use_info *cui = (struct count_use_info *)cuip;
5912 basic_block bb = cui->bb;
5913 micro_operation mo;
5914 rtx oloc = loc, nloc, src = NULL;
5915 enum micro_operation_type type = use_type (loc, cui, &mode);
5916 bool track_p = false;
5917 cselib_val *v;
5918 bool resolve, preserve;
5919
5920 if (type == MO_CLOBBER)
5921 return;
5922
5923 mode2 = mode;
5924
5925 if (REG_P (loc))
5926 {
5927 gcc_assert (loc != cfa_base_rtx);
5928 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5929 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5930 || GET_CODE (expr) == CLOBBER)
5931 {
5932 mo.type = MO_CLOBBER;
5933 mo.u.loc = loc;
5934 if (GET_CODE (expr) == SET
5935 && SET_DEST (expr) == loc
5936 && !unsuitable_loc (SET_SRC (expr))
5937 && find_use_val (loc, mode, cui))
5938 {
5939 gcc_checking_assert (type == MO_VAL_SET);
5940 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5941 }
5942 }
5943 else
5944 {
5945 if (GET_CODE (expr) == SET
5946 && SET_DEST (expr) == loc
5947 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5948 src = var_lowpart (mode2, SET_SRC (expr));
5949 loc = var_lowpart (mode2, loc);
5950
5951 if (src == NULL)
5952 {
5953 mo.type = MO_SET;
5954 mo.u.loc = loc;
5955 }
5956 else
5957 {
5958 rtx xexpr = gen_rtx_SET (loc, src);
5959 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5960 {
5961 /* If this is an instruction copying (part of) a parameter
5962 passed by invisible reference to its register location,
5963 pretend it's a SET so that the initial memory location
5964 is discarded, as the parameter register can be reused
5965 for other purposes and we do not track locations based
5966 on generic registers. */
5967 if (MEM_P (src)
5968 && REG_EXPR (loc)
5969 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5970 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5971 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5972 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5973 != arg_pointer_rtx)
5974 mo.type = MO_SET;
5975 else
5976 mo.type = MO_COPY;
5977 }
5978 else
5979 mo.type = MO_SET;
5980 mo.u.loc = xexpr;
5981 }
5982 }
5983 mo.insn = cui->insn;
5984 }
5985 else if (MEM_P (loc)
5986 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5987 || cui->sets))
5988 {
5989 if (MEM_P (loc) && type == MO_VAL_SET
5990 && !REG_P (XEXP (loc, 0))
5991 && !MEM_P (XEXP (loc, 0)))
5992 {
5993 rtx mloc = loc;
5994 machine_mode address_mode = get_address_mode (mloc);
5995 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5996 address_mode, 0,
5997 GET_MODE (mloc));
5998
5999 if (val && !cselib_preserved_value_p (val))
6000 preserve_value (val);
6001 }
6002
6003 if (GET_CODE (expr) == CLOBBER || !track_p)
6004 {
6005 mo.type = MO_CLOBBER;
6006 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6007 }
6008 else
6009 {
6010 if (GET_CODE (expr) == SET
6011 && SET_DEST (expr) == loc
6012 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6013 src = var_lowpart (mode2, SET_SRC (expr));
6014 loc = var_lowpart (mode2, loc);
6015
6016 if (src == NULL)
6017 {
6018 mo.type = MO_SET;
6019 mo.u.loc = loc;
6020 }
6021 else
6022 {
6023 rtx xexpr = gen_rtx_SET (loc, src);
6024 if (same_variable_part_p (SET_SRC (xexpr),
6025 MEM_EXPR (loc),
6026 INT_MEM_OFFSET (loc)))
6027 mo.type = MO_COPY;
6028 else
6029 mo.type = MO_SET;
6030 mo.u.loc = xexpr;
6031 }
6032 }
6033 mo.insn = cui->insn;
6034 }
6035 else
6036 return;
6037
6038 if (type != MO_VAL_SET)
6039 goto log_and_return;
6040
6041 v = find_use_val (oloc, mode, cui);
6042
6043 if (!v)
6044 goto log_and_return;
6045
6046 resolve = preserve = !cselib_preserved_value_p (v);
6047
6048 /* We cannot track values for multiple-part variables, so we track only
6049 locations for tracked parameters passed either by invisible reference
6050 or directly in multiple locations. */
6051 if (track_p
6052 && REG_P (loc)
6053 && REG_EXPR (loc)
6054 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6055 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6056 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
6057 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6058 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
6059 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
6060 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
6061 {
6062 /* Although we don't use the value here, it could be used later by the
6063 mere virtue of its existence as the operand of the reverse operation
6064 that gave rise to it (typically extension/truncation). Make sure it
6065 is preserved as required by vt_expand_var_loc_chain. */
6066 if (preserve)
6067 preserve_value (v);
6068 goto log_and_return;
6069 }
6070
6071 if (loc == stack_pointer_rtx
6072 && hard_frame_pointer_adjustment != -1
6073 && preserve)
6074 cselib_set_value_sp_based (v);
6075
6076 nloc = replace_expr_with_values (oloc);
6077 if (nloc)
6078 oloc = nloc;
6079
6080 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6081 {
6082 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6083
6084 if (oval == v)
6085 return;
6086 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6087
6088 if (oval && !cselib_preserved_value_p (oval))
6089 {
6090 micro_operation moa;
6091
6092 preserve_value (oval);
6093
6094 moa.type = MO_VAL_USE;
6095 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6096 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6097 moa.insn = cui->insn;
6098
6099 if (dump_file && (dump_flags & TDF_DETAILS))
6100 log_op_type (moa.u.loc, cui->bb, cui->insn,
6101 moa.type, dump_file);
6102 VTI (bb)->mos.safe_push (moa);
6103 }
6104
6105 resolve = false;
6106 }
6107 else if (resolve && GET_CODE (mo.u.loc) == SET)
6108 {
6109 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6110 nloc = replace_expr_with_values (SET_SRC (expr));
6111 else
6112 nloc = NULL_RTX;
6113
6114 /* Avoid the mode mismatch between oexpr and expr. */
6115 if (!nloc && mode != mode2)
6116 {
6117 nloc = SET_SRC (expr);
6118 gcc_assert (oloc == SET_DEST (expr));
6119 }
6120
6121 if (nloc && nloc != SET_SRC (mo.u.loc))
6122 oloc = gen_rtx_SET (oloc, nloc);
6123 else
6124 {
6125 if (oloc == SET_DEST (mo.u.loc))
6126 /* No point in duplicating. */
6127 oloc = mo.u.loc;
6128 if (!REG_P (SET_SRC (mo.u.loc)))
6129 resolve = false;
6130 }
6131 }
6132 else if (!resolve)
6133 {
6134 if (GET_CODE (mo.u.loc) == SET
6135 && oloc == SET_DEST (mo.u.loc))
6136 /* No point in duplicating. */
6137 oloc = mo.u.loc;
6138 }
6139 else
6140 resolve = false;
6141
6142 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6143
6144 if (mo.u.loc != oloc)
6145 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6146
6147 /* The loc of a MO_VAL_SET may have various forms:
6148
6149 (concat val dst): dst now holds val
6150
6151 (concat val (set dst src)): dst now holds val, copied from src
6152
6153 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6154 after replacing mems and non-top-level regs with values.
6155
6156 (concat (concat val dstv) (set dst src)): dst now holds val,
6157 copied from src. dstv is a value-based representation of dst, if
6158 it differs from dst. If resolution is needed, src is a REG, and
6159 its mode is the same as that of val.
6160
6161 (concat (concat val (set dstv srcv)) (set dst src)): src
6162 copied to dst, holding val. dstv and srcv are value-based
6163 representations of dst and src, respectively.
6164
6165 */
6166
6167 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6168 reverse_op (v->val_rtx, expr, cui->insn);
6169
6170 mo.u.loc = loc;
6171
6172 if (track_p)
6173 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6174 if (preserve)
6175 {
6176 VAL_NEEDS_RESOLUTION (loc) = resolve;
6177 preserve_value (v);
6178 }
6179 if (mo.type == MO_CLOBBER)
6180 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6181 if (mo.type == MO_COPY)
6182 VAL_EXPR_IS_COPIED (loc) = 1;
6183
6184 mo.type = MO_VAL_SET;
6185
6186 log_and_return:
6187 if (dump_file && (dump_flags & TDF_DETAILS))
6188 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6189 VTI (bb)->mos.safe_push (mo);
6190 }
6191
6192 /* Arguments to the call. */
6193 static rtx call_arguments;
6194
6195 /* Compute call_arguments. */
6196
6197 static void
6198 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6199 {
6200 rtx link, x, call;
6201 rtx prev, cur, next;
6202 rtx this_arg = NULL_RTX;
6203 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6204 tree obj_type_ref = NULL_TREE;
6205 CUMULATIVE_ARGS args_so_far_v;
6206 cumulative_args_t args_so_far;
6207
6208 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6209 args_so_far = pack_cumulative_args (&args_so_far_v);
6210 call = get_call_rtx_from (insn);
6211 if (call)
6212 {
6213 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6214 {
6215 rtx symbol = XEXP (XEXP (call, 0), 0);
6216 if (SYMBOL_REF_DECL (symbol))
6217 fndecl = SYMBOL_REF_DECL (symbol);
6218 }
6219 if (fndecl == NULL_TREE)
6220 fndecl = MEM_EXPR (XEXP (call, 0));
6221 if (fndecl
6222 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6223 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6224 fndecl = NULL_TREE;
6225 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6226 type = TREE_TYPE (fndecl);
6227 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6228 {
6229 if (TREE_CODE (fndecl) == INDIRECT_REF
6230 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6231 obj_type_ref = TREE_OPERAND (fndecl, 0);
6232 fndecl = NULL_TREE;
6233 }
6234 if (type)
6235 {
6236 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6237 t = TREE_CHAIN (t))
6238 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6239 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6240 break;
6241 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6242 type = NULL;
6243 else
6244 {
6245 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6246 link = CALL_INSN_FUNCTION_USAGE (insn);
6247 #ifndef PCC_STATIC_STRUCT_RETURN
6248 if (aggregate_value_p (TREE_TYPE (type), type)
6249 && targetm.calls.struct_value_rtx (type, 0) == 0)
6250 {
6251 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6252 machine_mode mode = TYPE_MODE (struct_addr);
6253 rtx reg;
6254 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6255 nargs + 1);
6256 reg = targetm.calls.function_arg (args_so_far, mode,
6257 struct_addr, true);
6258 targetm.calls.function_arg_advance (args_so_far, mode,
6259 struct_addr, true);
6260 if (reg == NULL_RTX)
6261 {
6262 for (; link; link = XEXP (link, 1))
6263 if (GET_CODE (XEXP (link, 0)) == USE
6264 && MEM_P (XEXP (XEXP (link, 0), 0)))
6265 {
6266 link = XEXP (link, 1);
6267 break;
6268 }
6269 }
6270 }
6271 else
6272 #endif
6273 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6274 nargs);
6275 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6276 {
6277 machine_mode mode;
6278 t = TYPE_ARG_TYPES (type);
6279 mode = TYPE_MODE (TREE_VALUE (t));
6280 this_arg = targetm.calls.function_arg (args_so_far, mode,
6281 TREE_VALUE (t), true);
6282 if (this_arg && !REG_P (this_arg))
6283 this_arg = NULL_RTX;
6284 else if (this_arg == NULL_RTX)
6285 {
6286 for (; link; link = XEXP (link, 1))
6287 if (GET_CODE (XEXP (link, 0)) == USE
6288 && MEM_P (XEXP (XEXP (link, 0), 0)))
6289 {
6290 this_arg = XEXP (XEXP (link, 0), 0);
6291 break;
6292 }
6293 }
6294 }
6295 }
6296 }
6297 }
6298 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6299
6300 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6301 if (GET_CODE (XEXP (link, 0)) == USE)
6302 {
6303 rtx item = NULL_RTX;
6304 x = XEXP (XEXP (link, 0), 0);
6305 if (GET_MODE (link) == VOIDmode
6306 || GET_MODE (link) == BLKmode
6307 || (GET_MODE (link) != GET_MODE (x)
6308 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6309 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6310 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6311 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6312 /* Can't do anything for these, if the original type mode
6313 isn't known or can't be converted. */;
6314 else if (REG_P (x))
6315 {
6316 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6317 if (val && cselib_preserved_value_p (val))
6318 item = val->val_rtx;
6319 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6320 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6321 {
6322 machine_mode mode = GET_MODE (x);
6323
6324 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6325 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6326 {
6327 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6328
6329 if (reg == NULL_RTX || !REG_P (reg))
6330 continue;
6331 val = cselib_lookup (reg, mode, 0, VOIDmode);
6332 if (val && cselib_preserved_value_p (val))
6333 {
6334 item = val->val_rtx;
6335 break;
6336 }
6337 }
6338 }
6339 }
6340 else if (MEM_P (x))
6341 {
6342 rtx mem = x;
6343 cselib_val *val;
6344
6345 if (!frame_pointer_needed)
6346 {
6347 struct adjust_mem_data amd;
6348 amd.mem_mode = VOIDmode;
6349 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6350 amd.side_effects = NULL;
6351 amd.store = true;
6352 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6353 &amd);
6354 gcc_assert (amd.side_effects == NULL_RTX);
6355 }
6356 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6357 if (val && cselib_preserved_value_p (val))
6358 item = val->val_rtx;
6359 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6360 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6361 {
6362 /* For non-integer stack argument see also if they weren't
6363 initialized by integers. */
6364 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6365 if (imode != GET_MODE (mem) && imode != BLKmode)
6366 {
6367 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6368 imode, 0, VOIDmode);
6369 if (val && cselib_preserved_value_p (val))
6370 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6371 imode);
6372 }
6373 }
6374 }
6375 if (item)
6376 {
6377 rtx x2 = x;
6378 if (GET_MODE (item) != GET_MODE (link))
6379 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6380 if (GET_MODE (x2) != GET_MODE (link))
6381 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6382 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6383 call_arguments
6384 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6385 }
6386 if (t && t != void_list_node)
6387 {
6388 tree argtype = TREE_VALUE (t);
6389 machine_mode mode = TYPE_MODE (argtype);
6390 rtx reg;
6391 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6392 {
6393 argtype = build_pointer_type (argtype);
6394 mode = TYPE_MODE (argtype);
6395 }
6396 reg = targetm.calls.function_arg (args_so_far, mode,
6397 argtype, true);
6398 if (TREE_CODE (argtype) == REFERENCE_TYPE
6399 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6400 && reg
6401 && REG_P (reg)
6402 && GET_MODE (reg) == mode
6403 && (GET_MODE_CLASS (mode) == MODE_INT
6404 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6405 && REG_P (x)
6406 && REGNO (x) == REGNO (reg)
6407 && GET_MODE (x) == mode
6408 && item)
6409 {
6410 machine_mode indmode
6411 = TYPE_MODE (TREE_TYPE (argtype));
6412 rtx mem = gen_rtx_MEM (indmode, x);
6413 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6414 if (val && cselib_preserved_value_p (val))
6415 {
6416 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6417 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6418 call_arguments);
6419 }
6420 else
6421 {
6422 struct elt_loc_list *l;
6423 tree initial;
6424
6425 /* Try harder, when passing address of a constant
6426 pool integer it can be easily read back. */
6427 item = XEXP (item, 1);
6428 if (GET_CODE (item) == SUBREG)
6429 item = SUBREG_REG (item);
6430 gcc_assert (GET_CODE (item) == VALUE);
6431 val = CSELIB_VAL_PTR (item);
6432 for (l = val->locs; l; l = l->next)
6433 if (GET_CODE (l->loc) == SYMBOL_REF
6434 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6435 && SYMBOL_REF_DECL (l->loc)
6436 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6437 {
6438 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6439 if (tree_fits_shwi_p (initial))
6440 {
6441 item = GEN_INT (tree_to_shwi (initial));
6442 item = gen_rtx_CONCAT (indmode, mem, item);
6443 call_arguments
6444 = gen_rtx_EXPR_LIST (VOIDmode, item,
6445 call_arguments);
6446 }
6447 break;
6448 }
6449 }
6450 }
6451 targetm.calls.function_arg_advance (args_so_far, mode,
6452 argtype, true);
6453 t = TREE_CHAIN (t);
6454 }
6455 }
6456
6457 /* Add debug arguments. */
6458 if (fndecl
6459 && TREE_CODE (fndecl) == FUNCTION_DECL
6460 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6461 {
6462 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6463 if (debug_args)
6464 {
6465 unsigned int ix;
6466 tree param;
6467 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6468 {
6469 rtx item;
6470 tree dtemp = (**debug_args)[ix + 1];
6471 machine_mode mode = DECL_MODE (dtemp);
6472 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6473 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6474 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6475 call_arguments);
6476 }
6477 }
6478 }
6479
6480 /* Reverse call_arguments chain. */
6481 prev = NULL_RTX;
6482 for (cur = call_arguments; cur; cur = next)
6483 {
6484 next = XEXP (cur, 1);
6485 XEXP (cur, 1) = prev;
6486 prev = cur;
6487 }
6488 call_arguments = prev;
6489
6490 x = get_call_rtx_from (insn);
6491 if (x)
6492 {
6493 x = XEXP (XEXP (x, 0), 0);
6494 if (GET_CODE (x) == SYMBOL_REF)
6495 /* Don't record anything. */;
6496 else if (CONSTANT_P (x))
6497 {
6498 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6499 pc_rtx, x);
6500 call_arguments
6501 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6502 }
6503 else
6504 {
6505 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6506 if (val && cselib_preserved_value_p (val))
6507 {
6508 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6509 call_arguments
6510 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6511 }
6512 }
6513 }
6514 if (this_arg)
6515 {
6516 machine_mode mode
6517 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6518 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6519 HOST_WIDE_INT token
6520 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6521 if (token)
6522 clobbered = plus_constant (mode, clobbered,
6523 token * GET_MODE_SIZE (mode));
6524 clobbered = gen_rtx_MEM (mode, clobbered);
6525 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6526 call_arguments
6527 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6528 }
6529 }
6530
6531 /* Callback for cselib_record_sets_hook, that records as micro
6532 operations uses and stores in an insn after cselib_record_sets has
6533 analyzed the sets in an insn, but before it modifies the stored
6534 values in the internal tables, unless cselib_record_sets doesn't
6535 call it directly (perhaps because we're not doing cselib in the
6536 first place, in which case sets and n_sets will be 0). */
6537
6538 static void
6539 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6540 {
6541 basic_block bb = BLOCK_FOR_INSN (insn);
6542 int n1, n2;
6543 struct count_use_info cui;
6544 micro_operation *mos;
6545
6546 cselib_hook_called = true;
6547
6548 cui.insn = insn;
6549 cui.bb = bb;
6550 cui.sets = sets;
6551 cui.n_sets = n_sets;
6552
6553 n1 = VTI (bb)->mos.length ();
6554 cui.store_p = false;
6555 note_uses (&PATTERN (insn), add_uses_1, &cui);
6556 n2 = VTI (bb)->mos.length () - 1;
6557 mos = VTI (bb)->mos.address ();
6558
6559 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6560 MO_VAL_LOC last. */
6561 while (n1 < n2)
6562 {
6563 while (n1 < n2 && mos[n1].type == MO_USE)
6564 n1++;
6565 while (n1 < n2 && mos[n2].type != MO_USE)
6566 n2--;
6567 if (n1 < n2)
6568 std::swap (mos[n1], mos[n2]);
6569 }
6570
6571 n2 = VTI (bb)->mos.length () - 1;
6572 while (n1 < n2)
6573 {
6574 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6575 n1++;
6576 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6577 n2--;
6578 if (n1 < n2)
6579 std::swap (mos[n1], mos[n2]);
6580 }
6581
6582 if (CALL_P (insn))
6583 {
6584 micro_operation mo;
6585
6586 mo.type = MO_CALL;
6587 mo.insn = insn;
6588 mo.u.loc = call_arguments;
6589 call_arguments = NULL_RTX;
6590
6591 if (dump_file && (dump_flags & TDF_DETAILS))
6592 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6593 VTI (bb)->mos.safe_push (mo);
6594 }
6595
6596 n1 = VTI (bb)->mos.length ();
6597 /* This will record NEXT_INSN (insn), such that we can
6598 insert notes before it without worrying about any
6599 notes that MO_USEs might emit after the insn. */
6600 cui.store_p = true;
6601 note_stores (PATTERN (insn), add_stores, &cui);
6602 n2 = VTI (bb)->mos.length () - 1;
6603 mos = VTI (bb)->mos.address ();
6604
6605 /* Order the MO_VAL_USEs first (note_stores does nothing
6606 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6607 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6608 while (n1 < n2)
6609 {
6610 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6611 n1++;
6612 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6613 n2--;
6614 if (n1 < n2)
6615 std::swap (mos[n1], mos[n2]);
6616 }
6617
6618 n2 = VTI (bb)->mos.length () - 1;
6619 while (n1 < n2)
6620 {
6621 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6622 n1++;
6623 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6624 n2--;
6625 if (n1 < n2)
6626 std::swap (mos[n1], mos[n2]);
6627 }
6628 }
6629
6630 static enum var_init_status
6631 find_src_status (dataflow_set *in, rtx src)
6632 {
6633 tree decl = NULL_TREE;
6634 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6635
6636 if (! flag_var_tracking_uninit)
6637 status = VAR_INIT_STATUS_INITIALIZED;
6638
6639 if (src && REG_P (src))
6640 decl = var_debug_decl (REG_EXPR (src));
6641 else if (src && MEM_P (src))
6642 decl = var_debug_decl (MEM_EXPR (src));
6643
6644 if (src && decl)
6645 status = get_init_value (in, src, dv_from_decl (decl));
6646
6647 return status;
6648 }
6649
6650 /* SRC is the source of an assignment. Use SET to try to find what
6651 was ultimately assigned to SRC. Return that value if known,
6652 otherwise return SRC itself. */
6653
6654 static rtx
6655 find_src_set_src (dataflow_set *set, rtx src)
6656 {
6657 tree decl = NULL_TREE; /* The variable being copied around. */
6658 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6659 variable var;
6660 location_chain nextp;
6661 int i;
6662 bool found;
6663
6664 if (src && REG_P (src))
6665 decl = var_debug_decl (REG_EXPR (src));
6666 else if (src && MEM_P (src))
6667 decl = var_debug_decl (MEM_EXPR (src));
6668
6669 if (src && decl)
6670 {
6671 decl_or_value dv = dv_from_decl (decl);
6672
6673 var = shared_hash_find (set->vars, dv);
6674 if (var)
6675 {
6676 found = false;
6677 for (i = 0; i < var->n_var_parts && !found; i++)
6678 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6679 nextp = nextp->next)
6680 if (rtx_equal_p (nextp->loc, src))
6681 {
6682 set_src = nextp->set_src;
6683 found = true;
6684 }
6685
6686 }
6687 }
6688
6689 return set_src;
6690 }
6691
6692 /* Compute the changes of variable locations in the basic block BB. */
6693
6694 static bool
6695 compute_bb_dataflow (basic_block bb)
6696 {
6697 unsigned int i;
6698 micro_operation *mo;
6699 bool changed;
6700 dataflow_set old_out;
6701 dataflow_set *in = &VTI (bb)->in;
6702 dataflow_set *out = &VTI (bb)->out;
6703
6704 dataflow_set_init (&old_out);
6705 dataflow_set_copy (&old_out, out);
6706 dataflow_set_copy (out, in);
6707
6708 if (MAY_HAVE_DEBUG_INSNS)
6709 local_get_addr_cache = new hash_map<rtx, rtx>;
6710
6711 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6712 {
6713 rtx_insn *insn = mo->insn;
6714
6715 switch (mo->type)
6716 {
6717 case MO_CALL:
6718 dataflow_set_clear_at_call (out);
6719 break;
6720
6721 case MO_USE:
6722 {
6723 rtx loc = mo->u.loc;
6724
6725 if (REG_P (loc))
6726 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6727 else if (MEM_P (loc))
6728 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6729 }
6730 break;
6731
6732 case MO_VAL_LOC:
6733 {
6734 rtx loc = mo->u.loc;
6735 rtx val, vloc;
6736 tree var;
6737
6738 if (GET_CODE (loc) == CONCAT)
6739 {
6740 val = XEXP (loc, 0);
6741 vloc = XEXP (loc, 1);
6742 }
6743 else
6744 {
6745 val = NULL_RTX;
6746 vloc = loc;
6747 }
6748
6749 var = PAT_VAR_LOCATION_DECL (vloc);
6750
6751 clobber_variable_part (out, NULL_RTX,
6752 dv_from_decl (var), 0, NULL_RTX);
6753 if (val)
6754 {
6755 if (VAL_NEEDS_RESOLUTION (loc))
6756 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6757 set_variable_part (out, val, dv_from_decl (var), 0,
6758 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6759 INSERT);
6760 }
6761 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6762 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6763 dv_from_decl (var), 0,
6764 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6765 INSERT);
6766 }
6767 break;
6768
6769 case MO_VAL_USE:
6770 {
6771 rtx loc = mo->u.loc;
6772 rtx val, vloc, uloc;
6773
6774 vloc = uloc = XEXP (loc, 1);
6775 val = XEXP (loc, 0);
6776
6777 if (GET_CODE (val) == CONCAT)
6778 {
6779 uloc = XEXP (val, 1);
6780 val = XEXP (val, 0);
6781 }
6782
6783 if (VAL_NEEDS_RESOLUTION (loc))
6784 val_resolve (out, val, vloc, insn);
6785 else
6786 val_store (out, val, uloc, insn, false);
6787
6788 if (VAL_HOLDS_TRACK_EXPR (loc))
6789 {
6790 if (GET_CODE (uloc) == REG)
6791 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6792 NULL);
6793 else if (GET_CODE (uloc) == MEM)
6794 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6795 NULL);
6796 }
6797 }
6798 break;
6799
6800 case MO_VAL_SET:
6801 {
6802 rtx loc = mo->u.loc;
6803 rtx val, vloc, uloc;
6804 rtx dstv, srcv;
6805
6806 vloc = loc;
6807 uloc = XEXP (vloc, 1);
6808 val = XEXP (vloc, 0);
6809 vloc = uloc;
6810
6811 if (GET_CODE (uloc) == SET)
6812 {
6813 dstv = SET_DEST (uloc);
6814 srcv = SET_SRC (uloc);
6815 }
6816 else
6817 {
6818 dstv = uloc;
6819 srcv = NULL;
6820 }
6821
6822 if (GET_CODE (val) == CONCAT)
6823 {
6824 dstv = vloc = XEXP (val, 1);
6825 val = XEXP (val, 0);
6826 }
6827
6828 if (GET_CODE (vloc) == SET)
6829 {
6830 srcv = SET_SRC (vloc);
6831
6832 gcc_assert (val != srcv);
6833 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6834
6835 dstv = vloc = SET_DEST (vloc);
6836
6837 if (VAL_NEEDS_RESOLUTION (loc))
6838 val_resolve (out, val, srcv, insn);
6839 }
6840 else if (VAL_NEEDS_RESOLUTION (loc))
6841 {
6842 gcc_assert (GET_CODE (uloc) == SET
6843 && GET_CODE (SET_SRC (uloc)) == REG);
6844 val_resolve (out, val, SET_SRC (uloc), insn);
6845 }
6846
6847 if (VAL_HOLDS_TRACK_EXPR (loc))
6848 {
6849 if (VAL_EXPR_IS_CLOBBERED (loc))
6850 {
6851 if (REG_P (uloc))
6852 var_reg_delete (out, uloc, true);
6853 else if (MEM_P (uloc))
6854 {
6855 gcc_assert (MEM_P (dstv));
6856 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6857 var_mem_delete (out, dstv, true);
6858 }
6859 }
6860 else
6861 {
6862 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6863 rtx src = NULL, dst = uloc;
6864 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6865
6866 if (GET_CODE (uloc) == SET)
6867 {
6868 src = SET_SRC (uloc);
6869 dst = SET_DEST (uloc);
6870 }
6871
6872 if (copied_p)
6873 {
6874 if (flag_var_tracking_uninit)
6875 {
6876 status = find_src_status (in, src);
6877
6878 if (status == VAR_INIT_STATUS_UNKNOWN)
6879 status = find_src_status (out, src);
6880 }
6881
6882 src = find_src_set_src (in, src);
6883 }
6884
6885 if (REG_P (dst))
6886 var_reg_delete_and_set (out, dst, !copied_p,
6887 status, srcv);
6888 else if (MEM_P (dst))
6889 {
6890 gcc_assert (MEM_P (dstv));
6891 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6892 var_mem_delete_and_set (out, dstv, !copied_p,
6893 status, srcv);
6894 }
6895 }
6896 }
6897 else if (REG_P (uloc))
6898 var_regno_delete (out, REGNO (uloc));
6899 else if (MEM_P (uloc))
6900 {
6901 gcc_checking_assert (GET_CODE (vloc) == MEM);
6902 gcc_checking_assert (dstv == vloc);
6903 if (dstv != vloc)
6904 clobber_overlapping_mems (out, vloc);
6905 }
6906
6907 val_store (out, val, dstv, insn, true);
6908 }
6909 break;
6910
6911 case MO_SET:
6912 {
6913 rtx loc = mo->u.loc;
6914 rtx set_src = NULL;
6915
6916 if (GET_CODE (loc) == SET)
6917 {
6918 set_src = SET_SRC (loc);
6919 loc = SET_DEST (loc);
6920 }
6921
6922 if (REG_P (loc))
6923 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6924 set_src);
6925 else if (MEM_P (loc))
6926 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6927 set_src);
6928 }
6929 break;
6930
6931 case MO_COPY:
6932 {
6933 rtx loc = mo->u.loc;
6934 enum var_init_status src_status;
6935 rtx set_src = NULL;
6936
6937 if (GET_CODE (loc) == SET)
6938 {
6939 set_src = SET_SRC (loc);
6940 loc = SET_DEST (loc);
6941 }
6942
6943 if (! flag_var_tracking_uninit)
6944 src_status = VAR_INIT_STATUS_INITIALIZED;
6945 else
6946 {
6947 src_status = find_src_status (in, set_src);
6948
6949 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6950 src_status = find_src_status (out, set_src);
6951 }
6952
6953 set_src = find_src_set_src (in, set_src);
6954
6955 if (REG_P (loc))
6956 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6957 else if (MEM_P (loc))
6958 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6959 }
6960 break;
6961
6962 case MO_USE_NO_VAR:
6963 {
6964 rtx loc = mo->u.loc;
6965
6966 if (REG_P (loc))
6967 var_reg_delete (out, loc, false);
6968 else if (MEM_P (loc))
6969 var_mem_delete (out, loc, false);
6970 }
6971 break;
6972
6973 case MO_CLOBBER:
6974 {
6975 rtx loc = mo->u.loc;
6976
6977 if (REG_P (loc))
6978 var_reg_delete (out, loc, true);
6979 else if (MEM_P (loc))
6980 var_mem_delete (out, loc, true);
6981 }
6982 break;
6983
6984 case MO_ADJUST:
6985 out->stack_adjust += mo->u.adjust;
6986 break;
6987 }
6988 }
6989
6990 if (MAY_HAVE_DEBUG_INSNS)
6991 {
6992 delete local_get_addr_cache;
6993 local_get_addr_cache = NULL;
6994
6995 dataflow_set_equiv_regs (out);
6996 shared_hash_htab (out->vars)
6997 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6998 shared_hash_htab (out->vars)
6999 ->traverse <dataflow_set *, canonicalize_values_star> (out);
7000 #if ENABLE_CHECKING
7001 shared_hash_htab (out->vars)
7002 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7003 #endif
7004 }
7005 changed = dataflow_set_different (&old_out, out);
7006 dataflow_set_destroy (&old_out);
7007 return changed;
7008 }
7009
7010 /* Find the locations of variables in the whole function. */
7011
7012 static bool
7013 vt_find_locations (void)
7014 {
7015 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7016 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7017 sbitmap visited, in_worklist, in_pending;
7018 basic_block bb;
7019 edge e;
7020 int *bb_order;
7021 int *rc_order;
7022 int i;
7023 int htabsz = 0;
7024 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7025 bool success = true;
7026
7027 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7028 /* Compute reverse completion order of depth first search of the CFG
7029 so that the data-flow runs faster. */
7030 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7031 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7032 pre_and_rev_post_order_compute (NULL, rc_order, false);
7033 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7034 bb_order[rc_order[i]] = i;
7035 free (rc_order);
7036
7037 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
7038 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7039 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7040 bitmap_clear (in_worklist);
7041
7042 FOR_EACH_BB_FN (bb, cfun)
7043 pending->insert (bb_order[bb->index], bb);
7044 bitmap_ones (in_pending);
7045
7046 while (success && !pending->empty ())
7047 {
7048 std::swap (worklist, pending);
7049 std::swap (in_worklist, in_pending);
7050
7051 bitmap_clear (visited);
7052
7053 while (!worklist->empty ())
7054 {
7055 bb = worklist->extract_min ();
7056 bitmap_clear_bit (in_worklist, bb->index);
7057 gcc_assert (!bitmap_bit_p (visited, bb->index));
7058 if (!bitmap_bit_p (visited, bb->index))
7059 {
7060 bool changed;
7061 edge_iterator ei;
7062 int oldinsz, oldoutsz;
7063
7064 bitmap_set_bit (visited, bb->index);
7065
7066 if (VTI (bb)->in.vars)
7067 {
7068 htabsz
7069 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7070 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7071 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7072 oldoutsz
7073 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7074 }
7075 else
7076 oldinsz = oldoutsz = 0;
7077
7078 if (MAY_HAVE_DEBUG_INSNS)
7079 {
7080 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7081 bool first = true, adjust = false;
7082
7083 /* Calculate the IN set as the intersection of
7084 predecessor OUT sets. */
7085
7086 dataflow_set_clear (in);
7087 dst_can_be_shared = true;
7088
7089 FOR_EACH_EDGE (e, ei, bb->preds)
7090 if (!VTI (e->src)->flooded)
7091 gcc_assert (bb_order[bb->index]
7092 <= bb_order[e->src->index]);
7093 else if (first)
7094 {
7095 dataflow_set_copy (in, &VTI (e->src)->out);
7096 first_out = &VTI (e->src)->out;
7097 first = false;
7098 }
7099 else
7100 {
7101 dataflow_set_merge (in, &VTI (e->src)->out);
7102 adjust = true;
7103 }
7104
7105 if (adjust)
7106 {
7107 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7108 #if ENABLE_CHECKING
7109 /* Merge and merge_adjust should keep entries in
7110 canonical order. */
7111 shared_hash_htab (in->vars)
7112 ->traverse <dataflow_set *,
7113 canonicalize_loc_order_check> (in);
7114 #endif
7115 if (dst_can_be_shared)
7116 {
7117 shared_hash_destroy (in->vars);
7118 in->vars = shared_hash_copy (first_out->vars);
7119 }
7120 }
7121
7122 VTI (bb)->flooded = true;
7123 }
7124 else
7125 {
7126 /* Calculate the IN set as union of predecessor OUT sets. */
7127 dataflow_set_clear (&VTI (bb)->in);
7128 FOR_EACH_EDGE (e, ei, bb->preds)
7129 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7130 }
7131
7132 changed = compute_bb_dataflow (bb);
7133 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7134 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7135
7136 if (htabmax && htabsz > htabmax)
7137 {
7138 if (MAY_HAVE_DEBUG_INSNS)
7139 inform (DECL_SOURCE_LOCATION (cfun->decl),
7140 "variable tracking size limit exceeded with "
7141 "-fvar-tracking-assignments, retrying without");
7142 else
7143 inform (DECL_SOURCE_LOCATION (cfun->decl),
7144 "variable tracking size limit exceeded");
7145 success = false;
7146 break;
7147 }
7148
7149 if (changed)
7150 {
7151 FOR_EACH_EDGE (e, ei, bb->succs)
7152 {
7153 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7154 continue;
7155
7156 if (bitmap_bit_p (visited, e->dest->index))
7157 {
7158 if (!bitmap_bit_p (in_pending, e->dest->index))
7159 {
7160 /* Send E->DEST to next round. */
7161 bitmap_set_bit (in_pending, e->dest->index);
7162 pending->insert (bb_order[e->dest->index],
7163 e->dest);
7164 }
7165 }
7166 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7167 {
7168 /* Add E->DEST to current round. */
7169 bitmap_set_bit (in_worklist, e->dest->index);
7170 worklist->insert (bb_order[e->dest->index],
7171 e->dest);
7172 }
7173 }
7174 }
7175
7176 if (dump_file)
7177 fprintf (dump_file,
7178 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7179 bb->index,
7180 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7181 oldinsz,
7182 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7183 oldoutsz,
7184 (int)worklist->nodes (), (int)pending->nodes (),
7185 htabsz);
7186
7187 if (dump_file && (dump_flags & TDF_DETAILS))
7188 {
7189 fprintf (dump_file, "BB %i IN:\n", bb->index);
7190 dump_dataflow_set (&VTI (bb)->in);
7191 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7192 dump_dataflow_set (&VTI (bb)->out);
7193 }
7194 }
7195 }
7196 }
7197
7198 if (success && MAY_HAVE_DEBUG_INSNS)
7199 FOR_EACH_BB_FN (bb, cfun)
7200 gcc_assert (VTI (bb)->flooded);
7201
7202 free (bb_order);
7203 delete worklist;
7204 delete pending;
7205 sbitmap_free (visited);
7206 sbitmap_free (in_worklist);
7207 sbitmap_free (in_pending);
7208
7209 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7210 return success;
7211 }
7212
7213 /* Print the content of the LIST to dump file. */
7214
7215 static void
7216 dump_attrs_list (attrs list)
7217 {
7218 for (; list; list = list->next)
7219 {
7220 if (dv_is_decl_p (list->dv))
7221 print_mem_expr (dump_file, dv_as_decl (list->dv));
7222 else
7223 print_rtl_single (dump_file, dv_as_value (list->dv));
7224 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7225 }
7226 fprintf (dump_file, "\n");
7227 }
7228
7229 /* Print the information about variable *SLOT to dump file. */
7230
7231 int
7232 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7233 {
7234 variable var = *slot;
7235
7236 dump_var (var);
7237
7238 /* Continue traversing the hash table. */
7239 return 1;
7240 }
7241
7242 /* Print the information about variable VAR to dump file. */
7243
7244 static void
7245 dump_var (variable var)
7246 {
7247 int i;
7248 location_chain node;
7249
7250 if (dv_is_decl_p (var->dv))
7251 {
7252 const_tree decl = dv_as_decl (var->dv);
7253
7254 if (DECL_NAME (decl))
7255 {
7256 fprintf (dump_file, " name: %s",
7257 IDENTIFIER_POINTER (DECL_NAME (decl)));
7258 if (dump_flags & TDF_UID)
7259 fprintf (dump_file, "D.%u", DECL_UID (decl));
7260 }
7261 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7262 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7263 else
7264 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7265 fprintf (dump_file, "\n");
7266 }
7267 else
7268 {
7269 fputc (' ', dump_file);
7270 print_rtl_single (dump_file, dv_as_value (var->dv));
7271 }
7272
7273 for (i = 0; i < var->n_var_parts; i++)
7274 {
7275 fprintf (dump_file, " offset %ld\n",
7276 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7277 for (node = var->var_part[i].loc_chain; node; node = node->next)
7278 {
7279 fprintf (dump_file, " ");
7280 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7281 fprintf (dump_file, "[uninit]");
7282 print_rtl_single (dump_file, node->loc);
7283 }
7284 }
7285 }
7286
7287 /* Print the information about variables from hash table VARS to dump file. */
7288
7289 static void
7290 dump_vars (variable_table_type *vars)
7291 {
7292 if (vars->elements () > 0)
7293 {
7294 fprintf (dump_file, "Variables:\n");
7295 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7296 }
7297 }
7298
7299 /* Print the dataflow set SET to dump file. */
7300
7301 static void
7302 dump_dataflow_set (dataflow_set *set)
7303 {
7304 int i;
7305
7306 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7307 set->stack_adjust);
7308 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7309 {
7310 if (set->regs[i])
7311 {
7312 fprintf (dump_file, "Reg %d:", i);
7313 dump_attrs_list (set->regs[i]);
7314 }
7315 }
7316 dump_vars (shared_hash_htab (set->vars));
7317 fprintf (dump_file, "\n");
7318 }
7319
7320 /* Print the IN and OUT sets for each basic block to dump file. */
7321
7322 static void
7323 dump_dataflow_sets (void)
7324 {
7325 basic_block bb;
7326
7327 FOR_EACH_BB_FN (bb, cfun)
7328 {
7329 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7330 fprintf (dump_file, "IN:\n");
7331 dump_dataflow_set (&VTI (bb)->in);
7332 fprintf (dump_file, "OUT:\n");
7333 dump_dataflow_set (&VTI (bb)->out);
7334 }
7335 }
7336
7337 /* Return the variable for DV in dropped_values, inserting one if
7338 requested with INSERT. */
7339
7340 static inline variable
7341 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7342 {
7343 variable_def **slot;
7344 variable empty_var;
7345 onepart_enum_t onepart;
7346
7347 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7348
7349 if (!slot)
7350 return NULL;
7351
7352 if (*slot)
7353 return *slot;
7354
7355 gcc_checking_assert (insert == INSERT);
7356
7357 onepart = dv_onepart_p (dv);
7358
7359 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7360
7361 empty_var = onepart_pool (onepart).allocate ();
7362 empty_var->dv = dv;
7363 empty_var->refcount = 1;
7364 empty_var->n_var_parts = 0;
7365 empty_var->onepart = onepart;
7366 empty_var->in_changed_variables = false;
7367 empty_var->var_part[0].loc_chain = NULL;
7368 empty_var->var_part[0].cur_loc = NULL;
7369 VAR_LOC_1PAUX (empty_var) = NULL;
7370 set_dv_changed (dv, true);
7371
7372 *slot = empty_var;
7373
7374 return empty_var;
7375 }
7376
7377 /* Recover the one-part aux from dropped_values. */
7378
7379 static struct onepart_aux *
7380 recover_dropped_1paux (variable var)
7381 {
7382 variable dvar;
7383
7384 gcc_checking_assert (var->onepart);
7385
7386 if (VAR_LOC_1PAUX (var))
7387 return VAR_LOC_1PAUX (var);
7388
7389 if (var->onepart == ONEPART_VDECL)
7390 return NULL;
7391
7392 dvar = variable_from_dropped (var->dv, NO_INSERT);
7393
7394 if (!dvar)
7395 return NULL;
7396
7397 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7398 VAR_LOC_1PAUX (dvar) = NULL;
7399
7400 return VAR_LOC_1PAUX (var);
7401 }
7402
7403 /* Add variable VAR to the hash table of changed variables and
7404 if it has no locations delete it from SET's hash table. */
7405
7406 static void
7407 variable_was_changed (variable var, dataflow_set *set)
7408 {
7409 hashval_t hash = dv_htab_hash (var->dv);
7410
7411 if (emit_notes)
7412 {
7413 variable_def **slot;
7414
7415 /* Remember this decl or VALUE has been added to changed_variables. */
7416 set_dv_changed (var->dv, true);
7417
7418 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7419
7420 if (*slot)
7421 {
7422 variable old_var = *slot;
7423 gcc_assert (old_var->in_changed_variables);
7424 old_var->in_changed_variables = false;
7425 if (var != old_var && var->onepart)
7426 {
7427 /* Restore the auxiliary info from an empty variable
7428 previously created for changed_variables, so it is
7429 not lost. */
7430 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7431 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7432 VAR_LOC_1PAUX (old_var) = NULL;
7433 }
7434 variable_htab_free (*slot);
7435 }
7436
7437 if (set && var->n_var_parts == 0)
7438 {
7439 onepart_enum_t onepart = var->onepart;
7440 variable empty_var = NULL;
7441 variable_def **dslot = NULL;
7442
7443 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7444 {
7445 dslot = dropped_values->find_slot_with_hash (var->dv,
7446 dv_htab_hash (var->dv),
7447 INSERT);
7448 empty_var = *dslot;
7449
7450 if (empty_var)
7451 {
7452 gcc_checking_assert (!empty_var->in_changed_variables);
7453 if (!VAR_LOC_1PAUX (var))
7454 {
7455 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7456 VAR_LOC_1PAUX (empty_var) = NULL;
7457 }
7458 else
7459 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7460 }
7461 }
7462
7463 if (!empty_var)
7464 {
7465 empty_var = onepart_pool (onepart).allocate ();
7466 empty_var->dv = var->dv;
7467 empty_var->refcount = 1;
7468 empty_var->n_var_parts = 0;
7469 empty_var->onepart = onepart;
7470 if (dslot)
7471 {
7472 empty_var->refcount++;
7473 *dslot = empty_var;
7474 }
7475 }
7476 else
7477 empty_var->refcount++;
7478 empty_var->in_changed_variables = true;
7479 *slot = empty_var;
7480 if (onepart)
7481 {
7482 empty_var->var_part[0].loc_chain = NULL;
7483 empty_var->var_part[0].cur_loc = NULL;
7484 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7485 VAR_LOC_1PAUX (var) = NULL;
7486 }
7487 goto drop_var;
7488 }
7489 else
7490 {
7491 if (var->onepart && !VAR_LOC_1PAUX (var))
7492 recover_dropped_1paux (var);
7493 var->refcount++;
7494 var->in_changed_variables = true;
7495 *slot = var;
7496 }
7497 }
7498 else
7499 {
7500 gcc_assert (set);
7501 if (var->n_var_parts == 0)
7502 {
7503 variable_def **slot;
7504
7505 drop_var:
7506 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7507 if (slot)
7508 {
7509 if (shared_hash_shared (set->vars))
7510 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7511 NO_INSERT);
7512 shared_hash_htab (set->vars)->clear_slot (slot);
7513 }
7514 }
7515 }
7516 }
7517
7518 /* Look for the index in VAR->var_part corresponding to OFFSET.
7519 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7520 referenced int will be set to the index that the part has or should
7521 have, if it should be inserted. */
7522
7523 static inline int
7524 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7525 int *insertion_point)
7526 {
7527 int pos, low, high;
7528
7529 if (var->onepart)
7530 {
7531 if (offset != 0)
7532 return -1;
7533
7534 if (insertion_point)
7535 *insertion_point = 0;
7536
7537 return var->n_var_parts - 1;
7538 }
7539
7540 /* Find the location part. */
7541 low = 0;
7542 high = var->n_var_parts;
7543 while (low != high)
7544 {
7545 pos = (low + high) / 2;
7546 if (VAR_PART_OFFSET (var, pos) < offset)
7547 low = pos + 1;
7548 else
7549 high = pos;
7550 }
7551 pos = low;
7552
7553 if (insertion_point)
7554 *insertion_point = pos;
7555
7556 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7557 return pos;
7558
7559 return -1;
7560 }
7561
7562 static variable_def **
7563 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7564 decl_or_value dv, HOST_WIDE_INT offset,
7565 enum var_init_status initialized, rtx set_src)
7566 {
7567 int pos;
7568 location_chain node, next;
7569 location_chain *nextp;
7570 variable var;
7571 onepart_enum_t onepart;
7572
7573 var = *slot;
7574
7575 if (var)
7576 onepart = var->onepart;
7577 else
7578 onepart = dv_onepart_p (dv);
7579
7580 gcc_checking_assert (offset == 0 || !onepart);
7581 gcc_checking_assert (loc != dv_as_opaque (dv));
7582
7583 if (! flag_var_tracking_uninit)
7584 initialized = VAR_INIT_STATUS_INITIALIZED;
7585
7586 if (!var)
7587 {
7588 /* Create new variable information. */
7589 var = onepart_pool (onepart).allocate ();
7590 var->dv = dv;
7591 var->refcount = 1;
7592 var->n_var_parts = 1;
7593 var->onepart = onepart;
7594 var->in_changed_variables = false;
7595 if (var->onepart)
7596 VAR_LOC_1PAUX (var) = NULL;
7597 else
7598 VAR_PART_OFFSET (var, 0) = offset;
7599 var->var_part[0].loc_chain = NULL;
7600 var->var_part[0].cur_loc = NULL;
7601 *slot = var;
7602 pos = 0;
7603 nextp = &var->var_part[0].loc_chain;
7604 }
7605 else if (onepart)
7606 {
7607 int r = -1, c = 0;
7608
7609 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7610
7611 pos = 0;
7612
7613 if (GET_CODE (loc) == VALUE)
7614 {
7615 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7616 nextp = &node->next)
7617 if (GET_CODE (node->loc) == VALUE)
7618 {
7619 if (node->loc == loc)
7620 {
7621 r = 0;
7622 break;
7623 }
7624 if (canon_value_cmp (node->loc, loc))
7625 c++;
7626 else
7627 {
7628 r = 1;
7629 break;
7630 }
7631 }
7632 else if (REG_P (node->loc) || MEM_P (node->loc))
7633 c++;
7634 else
7635 {
7636 r = 1;
7637 break;
7638 }
7639 }
7640 else if (REG_P (loc))
7641 {
7642 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7643 nextp = &node->next)
7644 if (REG_P (node->loc))
7645 {
7646 if (REGNO (node->loc) < REGNO (loc))
7647 c++;
7648 else
7649 {
7650 if (REGNO (node->loc) == REGNO (loc))
7651 r = 0;
7652 else
7653 r = 1;
7654 break;
7655 }
7656 }
7657 else
7658 {
7659 r = 1;
7660 break;
7661 }
7662 }
7663 else if (MEM_P (loc))
7664 {
7665 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7666 nextp = &node->next)
7667 if (REG_P (node->loc))
7668 c++;
7669 else if (MEM_P (node->loc))
7670 {
7671 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7672 break;
7673 else
7674 c++;
7675 }
7676 else
7677 {
7678 r = 1;
7679 break;
7680 }
7681 }
7682 else
7683 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7684 nextp = &node->next)
7685 if ((r = loc_cmp (node->loc, loc)) >= 0)
7686 break;
7687 else
7688 c++;
7689
7690 if (r == 0)
7691 return slot;
7692
7693 if (shared_var_p (var, set->vars))
7694 {
7695 slot = unshare_variable (set, slot, var, initialized);
7696 var = *slot;
7697 for (nextp = &var->var_part[0].loc_chain; c;
7698 nextp = &(*nextp)->next)
7699 c--;
7700 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7701 }
7702 }
7703 else
7704 {
7705 int inspos = 0;
7706
7707 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7708
7709 pos = find_variable_location_part (var, offset, &inspos);
7710
7711 if (pos >= 0)
7712 {
7713 node = var->var_part[pos].loc_chain;
7714
7715 if (node
7716 && ((REG_P (node->loc) && REG_P (loc)
7717 && REGNO (node->loc) == REGNO (loc))
7718 || rtx_equal_p (node->loc, loc)))
7719 {
7720 /* LOC is in the beginning of the chain so we have nothing
7721 to do. */
7722 if (node->init < initialized)
7723 node->init = initialized;
7724 if (set_src != NULL)
7725 node->set_src = set_src;
7726
7727 return slot;
7728 }
7729 else
7730 {
7731 /* We have to make a copy of a shared variable. */
7732 if (shared_var_p (var, set->vars))
7733 {
7734 slot = unshare_variable (set, slot, var, initialized);
7735 var = *slot;
7736 }
7737 }
7738 }
7739 else
7740 {
7741 /* We have not found the location part, new one will be created. */
7742
7743 /* We have to make a copy of the shared variable. */
7744 if (shared_var_p (var, set->vars))
7745 {
7746 slot = unshare_variable (set, slot, var, initialized);
7747 var = *slot;
7748 }
7749
7750 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7751 thus there are at most MAX_VAR_PARTS different offsets. */
7752 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7753 && (!var->n_var_parts || !onepart));
7754
7755 /* We have to move the elements of array starting at index
7756 inspos to the next position. */
7757 for (pos = var->n_var_parts; pos > inspos; pos--)
7758 var->var_part[pos] = var->var_part[pos - 1];
7759
7760 var->n_var_parts++;
7761 gcc_checking_assert (!onepart);
7762 VAR_PART_OFFSET (var, pos) = offset;
7763 var->var_part[pos].loc_chain = NULL;
7764 var->var_part[pos].cur_loc = NULL;
7765 }
7766
7767 /* Delete the location from the list. */
7768 nextp = &var->var_part[pos].loc_chain;
7769 for (node = var->var_part[pos].loc_chain; node; node = next)
7770 {
7771 next = node->next;
7772 if ((REG_P (node->loc) && REG_P (loc)
7773 && REGNO (node->loc) == REGNO (loc))
7774 || rtx_equal_p (node->loc, loc))
7775 {
7776 /* Save these values, to assign to the new node, before
7777 deleting this one. */
7778 if (node->init > initialized)
7779 initialized = node->init;
7780 if (node->set_src != NULL && set_src == NULL)
7781 set_src = node->set_src;
7782 if (var->var_part[pos].cur_loc == node->loc)
7783 var->var_part[pos].cur_loc = NULL;
7784 delete node;
7785 *nextp = next;
7786 break;
7787 }
7788 else
7789 nextp = &node->next;
7790 }
7791
7792 nextp = &var->var_part[pos].loc_chain;
7793 }
7794
7795 /* Add the location to the beginning. */
7796 node = new location_chain_def;
7797 node->loc = loc;
7798 node->init = initialized;
7799 node->set_src = set_src;
7800 node->next = *nextp;
7801 *nextp = node;
7802
7803 /* If no location was emitted do so. */
7804 if (var->var_part[pos].cur_loc == NULL)
7805 variable_was_changed (var, set);
7806
7807 return slot;
7808 }
7809
7810 /* Set the part of variable's location in the dataflow set SET. The
7811 variable part is specified by variable's declaration in DV and
7812 offset OFFSET and the part's location by LOC. IOPT should be
7813 NO_INSERT if the variable is known to be in SET already and the
7814 variable hash table must not be resized, and INSERT otherwise. */
7815
7816 static void
7817 set_variable_part (dataflow_set *set, rtx loc,
7818 decl_or_value dv, HOST_WIDE_INT offset,
7819 enum var_init_status initialized, rtx set_src,
7820 enum insert_option iopt)
7821 {
7822 variable_def **slot;
7823
7824 if (iopt == NO_INSERT)
7825 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7826 else
7827 {
7828 slot = shared_hash_find_slot (set->vars, dv);
7829 if (!slot)
7830 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7831 }
7832 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7833 }
7834
7835 /* Remove all recorded register locations for the given variable part
7836 from dataflow set SET, except for those that are identical to loc.
7837 The variable part is specified by variable's declaration or value
7838 DV and offset OFFSET. */
7839
7840 static variable_def **
7841 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7842 HOST_WIDE_INT offset, rtx set_src)
7843 {
7844 variable var = *slot;
7845 int pos = find_variable_location_part (var, offset, NULL);
7846
7847 if (pos >= 0)
7848 {
7849 location_chain node, next;
7850
7851 /* Remove the register locations from the dataflow set. */
7852 next = var->var_part[pos].loc_chain;
7853 for (node = next; node; node = next)
7854 {
7855 next = node->next;
7856 if (node->loc != loc
7857 && (!flag_var_tracking_uninit
7858 || !set_src
7859 || MEM_P (set_src)
7860 || !rtx_equal_p (set_src, node->set_src)))
7861 {
7862 if (REG_P (node->loc))
7863 {
7864 attrs anode, anext;
7865 attrs *anextp;
7866
7867 /* Remove the variable part from the register's
7868 list, but preserve any other variable parts
7869 that might be regarded as live in that same
7870 register. */
7871 anextp = &set->regs[REGNO (node->loc)];
7872 for (anode = *anextp; anode; anode = anext)
7873 {
7874 anext = anode->next;
7875 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7876 && anode->offset == offset)
7877 {
7878 delete anode;
7879 *anextp = anext;
7880 }
7881 else
7882 anextp = &anode->next;
7883 }
7884 }
7885
7886 slot = delete_slot_part (set, node->loc, slot, offset);
7887 }
7888 }
7889 }
7890
7891 return slot;
7892 }
7893
7894 /* Remove all recorded register locations for the given variable part
7895 from dataflow set SET, except for those that are identical to loc.
7896 The variable part is specified by variable's declaration or value
7897 DV and offset OFFSET. */
7898
7899 static void
7900 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7901 HOST_WIDE_INT offset, rtx set_src)
7902 {
7903 variable_def **slot;
7904
7905 if (!dv_as_opaque (dv)
7906 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7907 return;
7908
7909 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7910 if (!slot)
7911 return;
7912
7913 clobber_slot_part (set, loc, slot, offset, set_src);
7914 }
7915
7916 /* Delete the part of variable's location from dataflow set SET. The
7917 variable part is specified by its SET->vars slot SLOT and offset
7918 OFFSET and the part's location by LOC. */
7919
7920 static variable_def **
7921 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7922 HOST_WIDE_INT offset)
7923 {
7924 variable var = *slot;
7925 int pos = find_variable_location_part (var, offset, NULL);
7926
7927 if (pos >= 0)
7928 {
7929 location_chain node, next;
7930 location_chain *nextp;
7931 bool changed;
7932 rtx cur_loc;
7933
7934 if (shared_var_p (var, set->vars))
7935 {
7936 /* If the variable contains the location part we have to
7937 make a copy of the variable. */
7938 for (node = var->var_part[pos].loc_chain; node;
7939 node = node->next)
7940 {
7941 if ((REG_P (node->loc) && REG_P (loc)
7942 && REGNO (node->loc) == REGNO (loc))
7943 || rtx_equal_p (node->loc, loc))
7944 {
7945 slot = unshare_variable (set, slot, var,
7946 VAR_INIT_STATUS_UNKNOWN);
7947 var = *slot;
7948 break;
7949 }
7950 }
7951 }
7952
7953 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7954 cur_loc = VAR_LOC_FROM (var);
7955 else
7956 cur_loc = var->var_part[pos].cur_loc;
7957
7958 /* Delete the location part. */
7959 changed = false;
7960 nextp = &var->var_part[pos].loc_chain;
7961 for (node = *nextp; node; node = next)
7962 {
7963 next = node->next;
7964 if ((REG_P (node->loc) && REG_P (loc)
7965 && REGNO (node->loc) == REGNO (loc))
7966 || rtx_equal_p (node->loc, loc))
7967 {
7968 /* If we have deleted the location which was last emitted
7969 we have to emit new location so add the variable to set
7970 of changed variables. */
7971 if (cur_loc == node->loc)
7972 {
7973 changed = true;
7974 var->var_part[pos].cur_loc = NULL;
7975 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7976 VAR_LOC_FROM (var) = NULL;
7977 }
7978 delete node;
7979 *nextp = next;
7980 break;
7981 }
7982 else
7983 nextp = &node->next;
7984 }
7985
7986 if (var->var_part[pos].loc_chain == NULL)
7987 {
7988 changed = true;
7989 var->n_var_parts--;
7990 while (pos < var->n_var_parts)
7991 {
7992 var->var_part[pos] = var->var_part[pos + 1];
7993 pos++;
7994 }
7995 }
7996 if (changed)
7997 variable_was_changed (var, set);
7998 }
7999
8000 return slot;
8001 }
8002
8003 /* Delete the part of variable's location from dataflow set SET. The
8004 variable part is specified by variable's declaration or value DV
8005 and offset OFFSET and the part's location by LOC. */
8006
8007 static void
8008 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8009 HOST_WIDE_INT offset)
8010 {
8011 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8012 if (!slot)
8013 return;
8014
8015 delete_slot_part (set, loc, slot, offset);
8016 }
8017
8018
8019 /* Structure for passing some other parameters to function
8020 vt_expand_loc_callback. */
8021 struct expand_loc_callback_data
8022 {
8023 /* The variables and values active at this point. */
8024 variable_table_type *vars;
8025
8026 /* Stack of values and debug_exprs under expansion, and their
8027 children. */
8028 auto_vec<rtx, 4> expanding;
8029
8030 /* Stack of values and debug_exprs whose expansion hit recursion
8031 cycles. They will have VALUE_RECURSED_INTO marked when added to
8032 this list. This flag will be cleared if any of its dependencies
8033 resolves to a valid location. So, if the flag remains set at the
8034 end of the search, we know no valid location for this one can
8035 possibly exist. */
8036 auto_vec<rtx, 4> pending;
8037
8038 /* The maximum depth among the sub-expressions under expansion.
8039 Zero indicates no expansion so far. */
8040 expand_depth depth;
8041 };
8042
8043 /* Allocate the one-part auxiliary data structure for VAR, with enough
8044 room for COUNT dependencies. */
8045
8046 static void
8047 loc_exp_dep_alloc (variable var, int count)
8048 {
8049 size_t allocsize;
8050
8051 gcc_checking_assert (var->onepart);
8052
8053 /* We can be called with COUNT == 0 to allocate the data structure
8054 without any dependencies, e.g. for the backlinks only. However,
8055 if we are specifying a COUNT, then the dependency list must have
8056 been emptied before. It would be possible to adjust pointers or
8057 force it empty here, but this is better done at an earlier point
8058 in the algorithm, so we instead leave an assertion to catch
8059 errors. */
8060 gcc_checking_assert (!count
8061 || VAR_LOC_DEP_VEC (var) == NULL
8062 || VAR_LOC_DEP_VEC (var)->is_empty ());
8063
8064 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8065 return;
8066
8067 allocsize = offsetof (struct onepart_aux, deps)
8068 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8069
8070 if (VAR_LOC_1PAUX (var))
8071 {
8072 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8073 VAR_LOC_1PAUX (var), allocsize);
8074 /* If the reallocation moves the onepaux structure, the
8075 back-pointer to BACKLINKS in the first list member will still
8076 point to its old location. Adjust it. */
8077 if (VAR_LOC_DEP_LST (var))
8078 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8079 }
8080 else
8081 {
8082 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8083 *VAR_LOC_DEP_LSTP (var) = NULL;
8084 VAR_LOC_FROM (var) = NULL;
8085 VAR_LOC_DEPTH (var).complexity = 0;
8086 VAR_LOC_DEPTH (var).entryvals = 0;
8087 }
8088 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8089 }
8090
8091 /* Remove all entries from the vector of active dependencies of VAR,
8092 removing them from the back-links lists too. */
8093
8094 static void
8095 loc_exp_dep_clear (variable var)
8096 {
8097 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8098 {
8099 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8100 if (led->next)
8101 led->next->pprev = led->pprev;
8102 if (led->pprev)
8103 *led->pprev = led->next;
8104 VAR_LOC_DEP_VEC (var)->pop ();
8105 }
8106 }
8107
8108 /* Insert an active dependency from VAR on X to the vector of
8109 dependencies, and add the corresponding back-link to X's list of
8110 back-links in VARS. */
8111
8112 static void
8113 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8114 {
8115 decl_or_value dv;
8116 variable xvar;
8117 loc_exp_dep *led;
8118
8119 dv = dv_from_rtx (x);
8120
8121 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8122 an additional look up? */
8123 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8124
8125 if (!xvar)
8126 {
8127 xvar = variable_from_dropped (dv, NO_INSERT);
8128 gcc_checking_assert (xvar);
8129 }
8130
8131 /* No point in adding the same backlink more than once. This may
8132 arise if say the same value appears in two complex expressions in
8133 the same loc_list, or even more than once in a single
8134 expression. */
8135 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8136 return;
8137
8138 if (var->onepart == NOT_ONEPART)
8139 led = new loc_exp_dep;
8140 else
8141 {
8142 loc_exp_dep empty;
8143 memset (&empty, 0, sizeof (empty));
8144 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8145 led = &VAR_LOC_DEP_VEC (var)->last ();
8146 }
8147 led->dv = var->dv;
8148 led->value = x;
8149
8150 loc_exp_dep_alloc (xvar, 0);
8151 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8152 led->next = *led->pprev;
8153 if (led->next)
8154 led->next->pprev = &led->next;
8155 *led->pprev = led;
8156 }
8157
8158 /* Create active dependencies of VAR on COUNT values starting at
8159 VALUE, and corresponding back-links to the entries in VARS. Return
8160 true if we found any pending-recursion results. */
8161
8162 static bool
8163 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8164 variable_table_type *vars)
8165 {
8166 bool pending_recursion = false;
8167
8168 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8169 || VAR_LOC_DEP_VEC (var)->is_empty ());
8170
8171 /* Set up all dependencies from last_child (as set up at the end of
8172 the loop above) to the end. */
8173 loc_exp_dep_alloc (var, count);
8174
8175 while (count--)
8176 {
8177 rtx x = *value++;
8178
8179 if (!pending_recursion)
8180 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8181
8182 loc_exp_insert_dep (var, x, vars);
8183 }
8184
8185 return pending_recursion;
8186 }
8187
8188 /* Notify the back-links of IVAR that are pending recursion that we
8189 have found a non-NIL value for it, so they are cleared for another
8190 attempt to compute a current location. */
8191
8192 static void
8193 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8194 {
8195 loc_exp_dep *led, *next;
8196
8197 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8198 {
8199 decl_or_value dv = led->dv;
8200 variable var;
8201
8202 next = led->next;
8203
8204 if (dv_is_value_p (dv))
8205 {
8206 rtx value = dv_as_value (dv);
8207
8208 /* If we have already resolved it, leave it alone. */
8209 if (!VALUE_RECURSED_INTO (value))
8210 continue;
8211
8212 /* Check that VALUE_RECURSED_INTO, true from the test above,
8213 implies NO_LOC_P. */
8214 gcc_checking_assert (NO_LOC_P (value));
8215
8216 /* We won't notify variables that are being expanded,
8217 because their dependency list is cleared before
8218 recursing. */
8219 NO_LOC_P (value) = false;
8220 VALUE_RECURSED_INTO (value) = false;
8221
8222 gcc_checking_assert (dv_changed_p (dv));
8223 }
8224 else
8225 {
8226 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8227 if (!dv_changed_p (dv))
8228 continue;
8229 }
8230
8231 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8232
8233 if (!var)
8234 var = variable_from_dropped (dv, NO_INSERT);
8235
8236 if (var)
8237 notify_dependents_of_resolved_value (var, vars);
8238
8239 if (next)
8240 next->pprev = led->pprev;
8241 if (led->pprev)
8242 *led->pprev = next;
8243 led->next = NULL;
8244 led->pprev = NULL;
8245 }
8246 }
8247
8248 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8249 int max_depth, void *data);
8250
8251 /* Return the combined depth, when one sub-expression evaluated to
8252 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8253
8254 static inline expand_depth
8255 update_depth (expand_depth saved_depth, expand_depth best_depth)
8256 {
8257 /* If we didn't find anything, stick with what we had. */
8258 if (!best_depth.complexity)
8259 return saved_depth;
8260
8261 /* If we found hadn't found anything, use the depth of the current
8262 expression. Do NOT add one extra level, we want to compute the
8263 maximum depth among sub-expressions. We'll increment it later,
8264 if appropriate. */
8265 if (!saved_depth.complexity)
8266 return best_depth;
8267
8268 /* Combine the entryval count so that regardless of which one we
8269 return, the entryval count is accurate. */
8270 best_depth.entryvals = saved_depth.entryvals
8271 = best_depth.entryvals + saved_depth.entryvals;
8272
8273 if (saved_depth.complexity < best_depth.complexity)
8274 return best_depth;
8275 else
8276 return saved_depth;
8277 }
8278
8279 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8280 DATA for cselib expand callback. If PENDRECP is given, indicate in
8281 it whether any sub-expression couldn't be fully evaluated because
8282 it is pending recursion resolution. */
8283
8284 static inline rtx
8285 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8286 {
8287 struct expand_loc_callback_data *elcd
8288 = (struct expand_loc_callback_data *) data;
8289 location_chain loc, next;
8290 rtx result = NULL;
8291 int first_child, result_first_child, last_child;
8292 bool pending_recursion;
8293 rtx loc_from = NULL;
8294 struct elt_loc_list *cloc = NULL;
8295 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8296 int wanted_entryvals, found_entryvals = 0;
8297
8298 /* Clear all backlinks pointing at this, so that we're not notified
8299 while we're active. */
8300 loc_exp_dep_clear (var);
8301
8302 retry:
8303 if (var->onepart == ONEPART_VALUE)
8304 {
8305 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8306
8307 gcc_checking_assert (cselib_preserved_value_p (val));
8308
8309 cloc = val->locs;
8310 }
8311
8312 first_child = result_first_child = last_child
8313 = elcd->expanding.length ();
8314
8315 wanted_entryvals = found_entryvals;
8316
8317 /* Attempt to expand each available location in turn. */
8318 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8319 loc || cloc; loc = next)
8320 {
8321 result_first_child = last_child;
8322
8323 if (!loc)
8324 {
8325 loc_from = cloc->loc;
8326 next = loc;
8327 cloc = cloc->next;
8328 if (unsuitable_loc (loc_from))
8329 continue;
8330 }
8331 else
8332 {
8333 loc_from = loc->loc;
8334 next = loc->next;
8335 }
8336
8337 gcc_checking_assert (!unsuitable_loc (loc_from));
8338
8339 elcd->depth.complexity = elcd->depth.entryvals = 0;
8340 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8341 vt_expand_loc_callback, data);
8342 last_child = elcd->expanding.length ();
8343
8344 if (result)
8345 {
8346 depth = elcd->depth;
8347
8348 gcc_checking_assert (depth.complexity
8349 || result_first_child == last_child);
8350
8351 if (last_child - result_first_child != 1)
8352 {
8353 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8354 depth.entryvals++;
8355 depth.complexity++;
8356 }
8357
8358 if (depth.complexity <= EXPR_USE_DEPTH)
8359 {
8360 if (depth.entryvals <= wanted_entryvals)
8361 break;
8362 else if (!found_entryvals || depth.entryvals < found_entryvals)
8363 found_entryvals = depth.entryvals;
8364 }
8365
8366 result = NULL;
8367 }
8368
8369 /* Set it up in case we leave the loop. */
8370 depth.complexity = depth.entryvals = 0;
8371 loc_from = NULL;
8372 result_first_child = first_child;
8373 }
8374
8375 if (!loc_from && wanted_entryvals < found_entryvals)
8376 {
8377 /* We found entries with ENTRY_VALUEs and skipped them. Since
8378 we could not find any expansions without ENTRY_VALUEs, but we
8379 found at least one with them, go back and get an entry with
8380 the minimum number ENTRY_VALUE count that we found. We could
8381 avoid looping, but since each sub-loc is already resolved,
8382 the re-expansion should be trivial. ??? Should we record all
8383 attempted locs as dependencies, so that we retry the
8384 expansion should any of them change, in the hope it can give
8385 us a new entry without an ENTRY_VALUE? */
8386 elcd->expanding.truncate (first_child);
8387 goto retry;
8388 }
8389
8390 /* Register all encountered dependencies as active. */
8391 pending_recursion = loc_exp_dep_set
8392 (var, result, elcd->expanding.address () + result_first_child,
8393 last_child - result_first_child, elcd->vars);
8394
8395 elcd->expanding.truncate (first_child);
8396
8397 /* Record where the expansion came from. */
8398 gcc_checking_assert (!result || !pending_recursion);
8399 VAR_LOC_FROM (var) = loc_from;
8400 VAR_LOC_DEPTH (var) = depth;
8401
8402 gcc_checking_assert (!depth.complexity == !result);
8403
8404 elcd->depth = update_depth (saved_depth, depth);
8405
8406 /* Indicate whether any of the dependencies are pending recursion
8407 resolution. */
8408 if (pendrecp)
8409 *pendrecp = pending_recursion;
8410
8411 if (!pendrecp || !pending_recursion)
8412 var->var_part[0].cur_loc = result;
8413
8414 return result;
8415 }
8416
8417 /* Callback for cselib_expand_value, that looks for expressions
8418 holding the value in the var-tracking hash tables. Return X for
8419 standard processing, anything else is to be used as-is. */
8420
8421 static rtx
8422 vt_expand_loc_callback (rtx x, bitmap regs,
8423 int max_depth ATTRIBUTE_UNUSED,
8424 void *data)
8425 {
8426 struct expand_loc_callback_data *elcd
8427 = (struct expand_loc_callback_data *) data;
8428 decl_or_value dv;
8429 variable var;
8430 rtx result, subreg;
8431 bool pending_recursion = false;
8432 bool from_empty = false;
8433
8434 switch (GET_CODE (x))
8435 {
8436 case SUBREG:
8437 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8438 EXPR_DEPTH,
8439 vt_expand_loc_callback, data);
8440
8441 if (!subreg)
8442 return NULL;
8443
8444 result = simplify_gen_subreg (GET_MODE (x), subreg,
8445 GET_MODE (SUBREG_REG (x)),
8446 SUBREG_BYTE (x));
8447
8448 /* Invalid SUBREGs are ok in debug info. ??? We could try
8449 alternate expansions for the VALUE as well. */
8450 if (!result)
8451 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8452
8453 return result;
8454
8455 case DEBUG_EXPR:
8456 case VALUE:
8457 dv = dv_from_rtx (x);
8458 break;
8459
8460 default:
8461 return x;
8462 }
8463
8464 elcd->expanding.safe_push (x);
8465
8466 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8467 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8468
8469 if (NO_LOC_P (x))
8470 {
8471 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8472 return NULL;
8473 }
8474
8475 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8476
8477 if (!var)
8478 {
8479 from_empty = true;
8480 var = variable_from_dropped (dv, INSERT);
8481 }
8482
8483 gcc_checking_assert (var);
8484
8485 if (!dv_changed_p (dv))
8486 {
8487 gcc_checking_assert (!NO_LOC_P (x));
8488 gcc_checking_assert (var->var_part[0].cur_loc);
8489 gcc_checking_assert (VAR_LOC_1PAUX (var));
8490 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8491
8492 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8493
8494 return var->var_part[0].cur_loc;
8495 }
8496
8497 VALUE_RECURSED_INTO (x) = true;
8498 /* This is tentative, but it makes some tests simpler. */
8499 NO_LOC_P (x) = true;
8500
8501 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8502
8503 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8504
8505 if (pending_recursion)
8506 {
8507 gcc_checking_assert (!result);
8508 elcd->pending.safe_push (x);
8509 }
8510 else
8511 {
8512 NO_LOC_P (x) = !result;
8513 VALUE_RECURSED_INTO (x) = false;
8514 set_dv_changed (dv, false);
8515
8516 if (result)
8517 notify_dependents_of_resolved_value (var, elcd->vars);
8518 }
8519
8520 return result;
8521 }
8522
8523 /* While expanding variables, we may encounter recursion cycles
8524 because of mutual (possibly indirect) dependencies between two
8525 particular variables (or values), say A and B. If we're trying to
8526 expand A when we get to B, which in turn attempts to expand A, if
8527 we can't find any other expansion for B, we'll add B to this
8528 pending-recursion stack, and tentatively return NULL for its
8529 location. This tentative value will be used for any other
8530 occurrences of B, unless A gets some other location, in which case
8531 it will notify B that it is worth another try at computing a
8532 location for it, and it will use the location computed for A then.
8533 At the end of the expansion, the tentative NULL locations become
8534 final for all members of PENDING that didn't get a notification.
8535 This function performs this finalization of NULL locations. */
8536
8537 static void
8538 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8539 {
8540 while (!pending->is_empty ())
8541 {
8542 rtx x = pending->pop ();
8543 decl_or_value dv;
8544
8545 if (!VALUE_RECURSED_INTO (x))
8546 continue;
8547
8548 gcc_checking_assert (NO_LOC_P (x));
8549 VALUE_RECURSED_INTO (x) = false;
8550 dv = dv_from_rtx (x);
8551 gcc_checking_assert (dv_changed_p (dv));
8552 set_dv_changed (dv, false);
8553 }
8554 }
8555
8556 /* Initialize expand_loc_callback_data D with variable hash table V.
8557 It must be a macro because of alloca (vec stack). */
8558 #define INIT_ELCD(d, v) \
8559 do \
8560 { \
8561 (d).vars = (v); \
8562 (d).depth.complexity = (d).depth.entryvals = 0; \
8563 } \
8564 while (0)
8565 /* Finalize expand_loc_callback_data D, resolved to location L. */
8566 #define FINI_ELCD(d, l) \
8567 do \
8568 { \
8569 resolve_expansions_pending_recursion (&(d).pending); \
8570 (d).pending.release (); \
8571 (d).expanding.release (); \
8572 \
8573 if ((l) && MEM_P (l)) \
8574 (l) = targetm.delegitimize_address (l); \
8575 } \
8576 while (0)
8577
8578 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8579 equivalences in VARS, updating their CUR_LOCs in the process. */
8580
8581 static rtx
8582 vt_expand_loc (rtx loc, variable_table_type *vars)
8583 {
8584 struct expand_loc_callback_data data;
8585 rtx result;
8586
8587 if (!MAY_HAVE_DEBUG_INSNS)
8588 return loc;
8589
8590 INIT_ELCD (data, vars);
8591
8592 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8593 vt_expand_loc_callback, &data);
8594
8595 FINI_ELCD (data, result);
8596
8597 return result;
8598 }
8599
8600 /* Expand the one-part VARiable to a location, using the equivalences
8601 in VARS, updating their CUR_LOCs in the process. */
8602
8603 static rtx
8604 vt_expand_1pvar (variable var, variable_table_type *vars)
8605 {
8606 struct expand_loc_callback_data data;
8607 rtx loc;
8608
8609 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8610
8611 if (!dv_changed_p (var->dv))
8612 return var->var_part[0].cur_loc;
8613
8614 INIT_ELCD (data, vars);
8615
8616 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8617
8618 gcc_checking_assert (data.expanding.is_empty ());
8619
8620 FINI_ELCD (data, loc);
8621
8622 return loc;
8623 }
8624
8625 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8626 additional parameters: WHERE specifies whether the note shall be emitted
8627 before or after instruction INSN. */
8628
8629 int
8630 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8631 {
8632 variable var = *varp;
8633 rtx_insn *insn = data->insn;
8634 enum emit_note_where where = data->where;
8635 variable_table_type *vars = data->vars;
8636 rtx_note *note;
8637 rtx note_vl;
8638 int i, j, n_var_parts;
8639 bool complete;
8640 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8641 HOST_WIDE_INT last_limit;
8642 tree type_size_unit;
8643 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8644 rtx loc[MAX_VAR_PARTS];
8645 tree decl;
8646 location_chain lc;
8647
8648 gcc_checking_assert (var->onepart == NOT_ONEPART
8649 || var->onepart == ONEPART_VDECL);
8650
8651 decl = dv_as_decl (var->dv);
8652
8653 complete = true;
8654 last_limit = 0;
8655 n_var_parts = 0;
8656 if (!var->onepart)
8657 for (i = 0; i < var->n_var_parts; i++)
8658 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8659 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8660 for (i = 0; i < var->n_var_parts; i++)
8661 {
8662 machine_mode mode, wider_mode;
8663 rtx loc2;
8664 HOST_WIDE_INT offset;
8665
8666 if (i == 0 && var->onepart)
8667 {
8668 gcc_checking_assert (var->n_var_parts == 1);
8669 offset = 0;
8670 initialized = VAR_INIT_STATUS_INITIALIZED;
8671 loc2 = vt_expand_1pvar (var, vars);
8672 }
8673 else
8674 {
8675 if (last_limit < VAR_PART_OFFSET (var, i))
8676 {
8677 complete = false;
8678 break;
8679 }
8680 else if (last_limit > VAR_PART_OFFSET (var, i))
8681 continue;
8682 offset = VAR_PART_OFFSET (var, i);
8683 loc2 = var->var_part[i].cur_loc;
8684 if (loc2 && GET_CODE (loc2) == MEM
8685 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8686 {
8687 rtx depval = XEXP (loc2, 0);
8688
8689 loc2 = vt_expand_loc (loc2, vars);
8690
8691 if (loc2)
8692 loc_exp_insert_dep (var, depval, vars);
8693 }
8694 if (!loc2)
8695 {
8696 complete = false;
8697 continue;
8698 }
8699 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8700 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8701 if (var->var_part[i].cur_loc == lc->loc)
8702 {
8703 initialized = lc->init;
8704 break;
8705 }
8706 gcc_assert (lc);
8707 }
8708
8709 offsets[n_var_parts] = offset;
8710 if (!loc2)
8711 {
8712 complete = false;
8713 continue;
8714 }
8715 loc[n_var_parts] = loc2;
8716 mode = GET_MODE (var->var_part[i].cur_loc);
8717 if (mode == VOIDmode && var->onepart)
8718 mode = DECL_MODE (decl);
8719 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8720
8721 /* Attempt to merge adjacent registers or memory. */
8722 wider_mode = GET_MODE_WIDER_MODE (mode);
8723 for (j = i + 1; j < var->n_var_parts; j++)
8724 if (last_limit <= VAR_PART_OFFSET (var, j))
8725 break;
8726 if (j < var->n_var_parts
8727 && wider_mode != VOIDmode
8728 && var->var_part[j].cur_loc
8729 && mode == GET_MODE (var->var_part[j].cur_loc)
8730 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8731 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8732 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8733 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8734 {
8735 rtx new_loc = NULL;
8736
8737 if (REG_P (loc[n_var_parts])
8738 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8739 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8740 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8741 == REGNO (loc2))
8742 {
8743 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8744 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8745 mode, 0);
8746 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8747 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8748 if (new_loc)
8749 {
8750 if (!REG_P (new_loc)
8751 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8752 new_loc = NULL;
8753 else
8754 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8755 }
8756 }
8757 else if (MEM_P (loc[n_var_parts])
8758 && GET_CODE (XEXP (loc2, 0)) == PLUS
8759 && REG_P (XEXP (XEXP (loc2, 0), 0))
8760 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8761 {
8762 if ((REG_P (XEXP (loc[n_var_parts], 0))
8763 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8764 XEXP (XEXP (loc2, 0), 0))
8765 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8766 == GET_MODE_SIZE (mode))
8767 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8768 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8769 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8770 XEXP (XEXP (loc2, 0), 0))
8771 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8772 + GET_MODE_SIZE (mode)
8773 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8774 new_loc = adjust_address_nv (loc[n_var_parts],
8775 wider_mode, 0);
8776 }
8777
8778 if (new_loc)
8779 {
8780 loc[n_var_parts] = new_loc;
8781 mode = wider_mode;
8782 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8783 i = j;
8784 }
8785 }
8786 ++n_var_parts;
8787 }
8788 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8789 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8790 complete = false;
8791
8792 if (! flag_var_tracking_uninit)
8793 initialized = VAR_INIT_STATUS_INITIALIZED;
8794
8795 note_vl = NULL_RTX;
8796 if (!complete)
8797 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8798 else if (n_var_parts == 1)
8799 {
8800 rtx expr_list;
8801
8802 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8803 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8804 else
8805 expr_list = loc[0];
8806
8807 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8808 }
8809 else if (n_var_parts)
8810 {
8811 rtx parallel;
8812
8813 for (i = 0; i < n_var_parts; i++)
8814 loc[i]
8815 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8816
8817 parallel = gen_rtx_PARALLEL (VOIDmode,
8818 gen_rtvec_v (n_var_parts, loc));
8819 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8820 parallel, initialized);
8821 }
8822
8823 if (where != EMIT_NOTE_BEFORE_INSN)
8824 {
8825 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8826 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8827 NOTE_DURING_CALL_P (note) = true;
8828 }
8829 else
8830 {
8831 /* Make sure that the call related notes come first. */
8832 while (NEXT_INSN (insn)
8833 && NOTE_P (insn)
8834 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8835 && NOTE_DURING_CALL_P (insn))
8836 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8837 insn = NEXT_INSN (insn);
8838 if (NOTE_P (insn)
8839 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8840 && NOTE_DURING_CALL_P (insn))
8841 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8842 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8843 else
8844 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8845 }
8846 NOTE_VAR_LOCATION (note) = note_vl;
8847
8848 set_dv_changed (var->dv, false);
8849 gcc_assert (var->in_changed_variables);
8850 var->in_changed_variables = false;
8851 changed_variables->clear_slot (varp);
8852
8853 /* Continue traversing the hash table. */
8854 return 1;
8855 }
8856
8857 /* While traversing changed_variables, push onto DATA (a stack of RTX
8858 values) entries that aren't user variables. */
8859
8860 int
8861 var_track_values_to_stack (variable_def **slot,
8862 vec<rtx, va_heap> *changed_values_stack)
8863 {
8864 variable var = *slot;
8865
8866 if (var->onepart == ONEPART_VALUE)
8867 changed_values_stack->safe_push (dv_as_value (var->dv));
8868 else if (var->onepart == ONEPART_DEXPR)
8869 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8870
8871 return 1;
8872 }
8873
8874 /* Remove from changed_variables the entry whose DV corresponds to
8875 value or debug_expr VAL. */
8876 static void
8877 remove_value_from_changed_variables (rtx val)
8878 {
8879 decl_or_value dv = dv_from_rtx (val);
8880 variable_def **slot;
8881 variable var;
8882
8883 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8884 NO_INSERT);
8885 var = *slot;
8886 var->in_changed_variables = false;
8887 changed_variables->clear_slot (slot);
8888 }
8889
8890 /* If VAL (a value or debug_expr) has backlinks to variables actively
8891 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8892 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8893 have dependencies of their own to notify. */
8894
8895 static void
8896 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8897 vec<rtx, va_heap> *changed_values_stack)
8898 {
8899 variable_def **slot;
8900 variable var;
8901 loc_exp_dep *led;
8902 decl_or_value dv = dv_from_rtx (val);
8903
8904 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8905 NO_INSERT);
8906 if (!slot)
8907 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8908 if (!slot)
8909 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8910 NO_INSERT);
8911 var = *slot;
8912
8913 while ((led = VAR_LOC_DEP_LST (var)))
8914 {
8915 decl_or_value ldv = led->dv;
8916 variable ivar;
8917
8918 /* Deactivate and remove the backlink, as it was “used up”. It
8919 makes no sense to attempt to notify the same entity again:
8920 either it will be recomputed and re-register an active
8921 dependency, or it will still have the changed mark. */
8922 if (led->next)
8923 led->next->pprev = led->pprev;
8924 if (led->pprev)
8925 *led->pprev = led->next;
8926 led->next = NULL;
8927 led->pprev = NULL;
8928
8929 if (dv_changed_p (ldv))
8930 continue;
8931
8932 switch (dv_onepart_p (ldv))
8933 {
8934 case ONEPART_VALUE:
8935 case ONEPART_DEXPR:
8936 set_dv_changed (ldv, true);
8937 changed_values_stack->safe_push (dv_as_rtx (ldv));
8938 break;
8939
8940 case ONEPART_VDECL:
8941 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8942 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8943 variable_was_changed (ivar, NULL);
8944 break;
8945
8946 case NOT_ONEPART:
8947 delete led;
8948 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8949 if (ivar)
8950 {
8951 int i = ivar->n_var_parts;
8952 while (i--)
8953 {
8954 rtx loc = ivar->var_part[i].cur_loc;
8955
8956 if (loc && GET_CODE (loc) == MEM
8957 && XEXP (loc, 0) == val)
8958 {
8959 variable_was_changed (ivar, NULL);
8960 break;
8961 }
8962 }
8963 }
8964 break;
8965
8966 default:
8967 gcc_unreachable ();
8968 }
8969 }
8970 }
8971
8972 /* Take out of changed_variables any entries that don't refer to use
8973 variables. Back-propagate change notifications from values and
8974 debug_exprs to their active dependencies in HTAB or in
8975 CHANGED_VARIABLES. */
8976
8977 static void
8978 process_changed_values (variable_table_type *htab)
8979 {
8980 int i, n;
8981 rtx val;
8982 auto_vec<rtx, 20> changed_values_stack;
8983
8984 /* Move values from changed_variables to changed_values_stack. */
8985 changed_variables
8986 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8987 (&changed_values_stack);
8988
8989 /* Back-propagate change notifications in values while popping
8990 them from the stack. */
8991 for (n = i = changed_values_stack.length ();
8992 i > 0; i = changed_values_stack.length ())
8993 {
8994 val = changed_values_stack.pop ();
8995 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8996
8997 /* This condition will hold when visiting each of the entries
8998 originally in changed_variables. We can't remove them
8999 earlier because this could drop the backlinks before we got a
9000 chance to use them. */
9001 if (i == n)
9002 {
9003 remove_value_from_changed_variables (val);
9004 n--;
9005 }
9006 }
9007 }
9008
9009 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9010 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9011 the notes shall be emitted before of after instruction INSN. */
9012
9013 static void
9014 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9015 shared_hash vars)
9016 {
9017 emit_note_data data;
9018 variable_table_type *htab = shared_hash_htab (vars);
9019
9020 if (!changed_variables->elements ())
9021 return;
9022
9023 if (MAY_HAVE_DEBUG_INSNS)
9024 process_changed_values (htab);
9025
9026 data.insn = insn;
9027 data.where = where;
9028 data.vars = htab;
9029
9030 changed_variables
9031 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9032 }
9033
9034 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9035 same variable in hash table DATA or is not there at all. */
9036
9037 int
9038 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9039 {
9040 variable old_var, new_var;
9041
9042 old_var = *slot;
9043 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9044
9045 if (!new_var)
9046 {
9047 /* Variable has disappeared. */
9048 variable empty_var = NULL;
9049
9050 if (old_var->onepart == ONEPART_VALUE
9051 || old_var->onepart == ONEPART_DEXPR)
9052 {
9053 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9054 if (empty_var)
9055 {
9056 gcc_checking_assert (!empty_var->in_changed_variables);
9057 if (!VAR_LOC_1PAUX (old_var))
9058 {
9059 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9060 VAR_LOC_1PAUX (empty_var) = NULL;
9061 }
9062 else
9063 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9064 }
9065 }
9066
9067 if (!empty_var)
9068 {
9069 empty_var = onepart_pool (old_var->onepart).allocate ();
9070 empty_var->dv = old_var->dv;
9071 empty_var->refcount = 0;
9072 empty_var->n_var_parts = 0;
9073 empty_var->onepart = old_var->onepart;
9074 empty_var->in_changed_variables = false;
9075 }
9076
9077 if (empty_var->onepart)
9078 {
9079 /* Propagate the auxiliary data to (ultimately)
9080 changed_variables. */
9081 empty_var->var_part[0].loc_chain = NULL;
9082 empty_var->var_part[0].cur_loc = NULL;
9083 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9084 VAR_LOC_1PAUX (old_var) = NULL;
9085 }
9086 variable_was_changed (empty_var, NULL);
9087 /* Continue traversing the hash table. */
9088 return 1;
9089 }
9090 /* Update cur_loc and one-part auxiliary data, before new_var goes
9091 through variable_was_changed. */
9092 if (old_var != new_var && new_var->onepart)
9093 {
9094 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9095 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9096 VAR_LOC_1PAUX (old_var) = NULL;
9097 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9098 }
9099 if (variable_different_p (old_var, new_var))
9100 variable_was_changed (new_var, NULL);
9101
9102 /* Continue traversing the hash table. */
9103 return 1;
9104 }
9105
9106 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9107 table DATA. */
9108
9109 int
9110 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9111 {
9112 variable old_var, new_var;
9113
9114 new_var = *slot;
9115 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9116 if (!old_var)
9117 {
9118 int i;
9119 for (i = 0; i < new_var->n_var_parts; i++)
9120 new_var->var_part[i].cur_loc = NULL;
9121 variable_was_changed (new_var, NULL);
9122 }
9123
9124 /* Continue traversing the hash table. */
9125 return 1;
9126 }
9127
9128 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9129 NEW_SET. */
9130
9131 static void
9132 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9133 dataflow_set *new_set)
9134 {
9135 shared_hash_htab (old_set->vars)
9136 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9137 (shared_hash_htab (new_set->vars));
9138 shared_hash_htab (new_set->vars)
9139 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9140 (shared_hash_htab (old_set->vars));
9141 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9142 }
9143
9144 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9145
9146 static rtx_insn *
9147 next_non_note_insn_var_location (rtx_insn *insn)
9148 {
9149 while (insn)
9150 {
9151 insn = NEXT_INSN (insn);
9152 if (insn == 0
9153 || !NOTE_P (insn)
9154 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9155 break;
9156 }
9157
9158 return insn;
9159 }
9160
9161 /* Emit the notes for changes of location parts in the basic block BB. */
9162
9163 static void
9164 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9165 {
9166 unsigned int i;
9167 micro_operation *mo;
9168
9169 dataflow_set_clear (set);
9170 dataflow_set_copy (set, &VTI (bb)->in);
9171
9172 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9173 {
9174 rtx_insn *insn = mo->insn;
9175 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9176
9177 switch (mo->type)
9178 {
9179 case MO_CALL:
9180 dataflow_set_clear_at_call (set);
9181 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9182 {
9183 rtx arguments = mo->u.loc, *p = &arguments;
9184 rtx_note *note;
9185 while (*p)
9186 {
9187 XEXP (XEXP (*p, 0), 1)
9188 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9189 shared_hash_htab (set->vars));
9190 /* If expansion is successful, keep it in the list. */
9191 if (XEXP (XEXP (*p, 0), 1))
9192 p = &XEXP (*p, 1);
9193 /* Otherwise, if the following item is data_value for it,
9194 drop it too too. */
9195 else if (XEXP (*p, 1)
9196 && REG_P (XEXP (XEXP (*p, 0), 0))
9197 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9198 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9199 0))
9200 && REGNO (XEXP (XEXP (*p, 0), 0))
9201 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9202 0), 0)))
9203 *p = XEXP (XEXP (*p, 1), 1);
9204 /* Just drop this item. */
9205 else
9206 *p = XEXP (*p, 1);
9207 }
9208 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9209 NOTE_VAR_LOCATION (note) = arguments;
9210 }
9211 break;
9212
9213 case MO_USE:
9214 {
9215 rtx loc = mo->u.loc;
9216
9217 if (REG_P (loc))
9218 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9219 else
9220 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9221
9222 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9223 }
9224 break;
9225
9226 case MO_VAL_LOC:
9227 {
9228 rtx loc = mo->u.loc;
9229 rtx val, vloc;
9230 tree var;
9231
9232 if (GET_CODE (loc) == CONCAT)
9233 {
9234 val = XEXP (loc, 0);
9235 vloc = XEXP (loc, 1);
9236 }
9237 else
9238 {
9239 val = NULL_RTX;
9240 vloc = loc;
9241 }
9242
9243 var = PAT_VAR_LOCATION_DECL (vloc);
9244
9245 clobber_variable_part (set, NULL_RTX,
9246 dv_from_decl (var), 0, NULL_RTX);
9247 if (val)
9248 {
9249 if (VAL_NEEDS_RESOLUTION (loc))
9250 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9251 set_variable_part (set, val, dv_from_decl (var), 0,
9252 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9253 INSERT);
9254 }
9255 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9256 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9257 dv_from_decl (var), 0,
9258 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9259 INSERT);
9260
9261 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9262 }
9263 break;
9264
9265 case MO_VAL_USE:
9266 {
9267 rtx loc = mo->u.loc;
9268 rtx val, vloc, uloc;
9269
9270 vloc = uloc = XEXP (loc, 1);
9271 val = XEXP (loc, 0);
9272
9273 if (GET_CODE (val) == CONCAT)
9274 {
9275 uloc = XEXP (val, 1);
9276 val = XEXP (val, 0);
9277 }
9278
9279 if (VAL_NEEDS_RESOLUTION (loc))
9280 val_resolve (set, val, vloc, insn);
9281 else
9282 val_store (set, val, uloc, insn, false);
9283
9284 if (VAL_HOLDS_TRACK_EXPR (loc))
9285 {
9286 if (GET_CODE (uloc) == REG)
9287 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9288 NULL);
9289 else if (GET_CODE (uloc) == MEM)
9290 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9291 NULL);
9292 }
9293
9294 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9295 }
9296 break;
9297
9298 case MO_VAL_SET:
9299 {
9300 rtx loc = mo->u.loc;
9301 rtx val, vloc, uloc;
9302 rtx dstv, srcv;
9303
9304 vloc = loc;
9305 uloc = XEXP (vloc, 1);
9306 val = XEXP (vloc, 0);
9307 vloc = uloc;
9308
9309 if (GET_CODE (uloc) == SET)
9310 {
9311 dstv = SET_DEST (uloc);
9312 srcv = SET_SRC (uloc);
9313 }
9314 else
9315 {
9316 dstv = uloc;
9317 srcv = NULL;
9318 }
9319
9320 if (GET_CODE (val) == CONCAT)
9321 {
9322 dstv = vloc = XEXP (val, 1);
9323 val = XEXP (val, 0);
9324 }
9325
9326 if (GET_CODE (vloc) == SET)
9327 {
9328 srcv = SET_SRC (vloc);
9329
9330 gcc_assert (val != srcv);
9331 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9332
9333 dstv = vloc = SET_DEST (vloc);
9334
9335 if (VAL_NEEDS_RESOLUTION (loc))
9336 val_resolve (set, val, srcv, insn);
9337 }
9338 else if (VAL_NEEDS_RESOLUTION (loc))
9339 {
9340 gcc_assert (GET_CODE (uloc) == SET
9341 && GET_CODE (SET_SRC (uloc)) == REG);
9342 val_resolve (set, val, SET_SRC (uloc), insn);
9343 }
9344
9345 if (VAL_HOLDS_TRACK_EXPR (loc))
9346 {
9347 if (VAL_EXPR_IS_CLOBBERED (loc))
9348 {
9349 if (REG_P (uloc))
9350 var_reg_delete (set, uloc, true);
9351 else if (MEM_P (uloc))
9352 {
9353 gcc_assert (MEM_P (dstv));
9354 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9355 var_mem_delete (set, dstv, true);
9356 }
9357 }
9358 else
9359 {
9360 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9361 rtx src = NULL, dst = uloc;
9362 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9363
9364 if (GET_CODE (uloc) == SET)
9365 {
9366 src = SET_SRC (uloc);
9367 dst = SET_DEST (uloc);
9368 }
9369
9370 if (copied_p)
9371 {
9372 status = find_src_status (set, src);
9373
9374 src = find_src_set_src (set, src);
9375 }
9376
9377 if (REG_P (dst))
9378 var_reg_delete_and_set (set, dst, !copied_p,
9379 status, srcv);
9380 else if (MEM_P (dst))
9381 {
9382 gcc_assert (MEM_P (dstv));
9383 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9384 var_mem_delete_and_set (set, dstv, !copied_p,
9385 status, srcv);
9386 }
9387 }
9388 }
9389 else if (REG_P (uloc))
9390 var_regno_delete (set, REGNO (uloc));
9391 else if (MEM_P (uloc))
9392 {
9393 gcc_checking_assert (GET_CODE (vloc) == MEM);
9394 gcc_checking_assert (vloc == dstv);
9395 if (vloc != dstv)
9396 clobber_overlapping_mems (set, vloc);
9397 }
9398
9399 val_store (set, val, dstv, insn, true);
9400
9401 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9402 set->vars);
9403 }
9404 break;
9405
9406 case MO_SET:
9407 {
9408 rtx loc = mo->u.loc;
9409 rtx set_src = NULL;
9410
9411 if (GET_CODE (loc) == SET)
9412 {
9413 set_src = SET_SRC (loc);
9414 loc = SET_DEST (loc);
9415 }
9416
9417 if (REG_P (loc))
9418 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9419 set_src);
9420 else
9421 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9422 set_src);
9423
9424 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9425 set->vars);
9426 }
9427 break;
9428
9429 case MO_COPY:
9430 {
9431 rtx loc = mo->u.loc;
9432 enum var_init_status src_status;
9433 rtx set_src = NULL;
9434
9435 if (GET_CODE (loc) == SET)
9436 {
9437 set_src = SET_SRC (loc);
9438 loc = SET_DEST (loc);
9439 }
9440
9441 src_status = find_src_status (set, set_src);
9442 set_src = find_src_set_src (set, set_src);
9443
9444 if (REG_P (loc))
9445 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9446 else
9447 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9448
9449 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9450 set->vars);
9451 }
9452 break;
9453
9454 case MO_USE_NO_VAR:
9455 {
9456 rtx loc = mo->u.loc;
9457
9458 if (REG_P (loc))
9459 var_reg_delete (set, loc, false);
9460 else
9461 var_mem_delete (set, loc, false);
9462
9463 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9464 }
9465 break;
9466
9467 case MO_CLOBBER:
9468 {
9469 rtx loc = mo->u.loc;
9470
9471 if (REG_P (loc))
9472 var_reg_delete (set, loc, true);
9473 else
9474 var_mem_delete (set, loc, true);
9475
9476 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9477 set->vars);
9478 }
9479 break;
9480
9481 case MO_ADJUST:
9482 set->stack_adjust += mo->u.adjust;
9483 break;
9484 }
9485 }
9486 }
9487
9488 /* Emit notes for the whole function. */
9489
9490 static void
9491 vt_emit_notes (void)
9492 {
9493 basic_block bb;
9494 dataflow_set cur;
9495
9496 gcc_assert (!changed_variables->elements ());
9497
9498 /* Free memory occupied by the out hash tables, as they aren't used
9499 anymore. */
9500 FOR_EACH_BB_FN (bb, cfun)
9501 dataflow_set_clear (&VTI (bb)->out);
9502
9503 /* Enable emitting notes by functions (mainly by set_variable_part and
9504 delete_variable_part). */
9505 emit_notes = true;
9506
9507 if (MAY_HAVE_DEBUG_INSNS)
9508 {
9509 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9510 }
9511
9512 dataflow_set_init (&cur);
9513
9514 FOR_EACH_BB_FN (bb, cfun)
9515 {
9516 /* Emit the notes for changes of variable locations between two
9517 subsequent basic blocks. */
9518 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9519
9520 if (MAY_HAVE_DEBUG_INSNS)
9521 local_get_addr_cache = new hash_map<rtx, rtx>;
9522
9523 /* Emit the notes for the changes in the basic block itself. */
9524 emit_notes_in_bb (bb, &cur);
9525
9526 if (MAY_HAVE_DEBUG_INSNS)
9527 delete local_get_addr_cache;
9528 local_get_addr_cache = NULL;
9529
9530 /* Free memory occupied by the in hash table, we won't need it
9531 again. */
9532 dataflow_set_clear (&VTI (bb)->in);
9533 }
9534 #ifdef ENABLE_CHECKING
9535 shared_hash_htab (cur.vars)
9536 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9537 (shared_hash_htab (empty_shared_hash));
9538 #endif
9539 dataflow_set_destroy (&cur);
9540
9541 if (MAY_HAVE_DEBUG_INSNS)
9542 delete dropped_values;
9543 dropped_values = NULL;
9544
9545 emit_notes = false;
9546 }
9547
9548 /* If there is a declaration and offset associated with register/memory RTL
9549 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9550
9551 static bool
9552 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9553 {
9554 if (REG_P (rtl))
9555 {
9556 if (REG_ATTRS (rtl))
9557 {
9558 *declp = REG_EXPR (rtl);
9559 *offsetp = REG_OFFSET (rtl);
9560 return true;
9561 }
9562 }
9563 else if (GET_CODE (rtl) == PARALLEL)
9564 {
9565 tree decl = NULL_TREE;
9566 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9567 int len = XVECLEN (rtl, 0), i;
9568
9569 for (i = 0; i < len; i++)
9570 {
9571 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9572 if (!REG_P (reg) || !REG_ATTRS (reg))
9573 break;
9574 if (!decl)
9575 decl = REG_EXPR (reg);
9576 if (REG_EXPR (reg) != decl)
9577 break;
9578 if (REG_OFFSET (reg) < offset)
9579 offset = REG_OFFSET (reg);
9580 }
9581
9582 if (i == len)
9583 {
9584 *declp = decl;
9585 *offsetp = offset;
9586 return true;
9587 }
9588 }
9589 else if (MEM_P (rtl))
9590 {
9591 if (MEM_ATTRS (rtl))
9592 {
9593 *declp = MEM_EXPR (rtl);
9594 *offsetp = INT_MEM_OFFSET (rtl);
9595 return true;
9596 }
9597 }
9598 return false;
9599 }
9600
9601 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9602 of VAL. */
9603
9604 static void
9605 record_entry_value (cselib_val *val, rtx rtl)
9606 {
9607 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9608
9609 ENTRY_VALUE_EXP (ev) = rtl;
9610
9611 cselib_add_permanent_equiv (val, ev, get_insns ());
9612 }
9613
9614 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9615
9616 static void
9617 vt_add_function_parameter (tree parm)
9618 {
9619 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9620 rtx incoming = DECL_INCOMING_RTL (parm);
9621 tree decl;
9622 machine_mode mode;
9623 HOST_WIDE_INT offset;
9624 dataflow_set *out;
9625 decl_or_value dv;
9626
9627 if (TREE_CODE (parm) != PARM_DECL)
9628 return;
9629
9630 if (!decl_rtl || !incoming)
9631 return;
9632
9633 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9634 return;
9635
9636 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9637 rewrite the incoming location of parameters passed on the stack
9638 into MEMs based on the argument pointer, so that incoming doesn't
9639 depend on a pseudo. */
9640 if (MEM_P (incoming)
9641 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9642 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9643 && XEXP (XEXP (incoming, 0), 0)
9644 == crtl->args.internal_arg_pointer
9645 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9646 {
9647 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9648 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9649 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9650 incoming
9651 = replace_equiv_address_nv (incoming,
9652 plus_constant (Pmode,
9653 arg_pointer_rtx, off));
9654 }
9655
9656 #ifdef HAVE_window_save
9657 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9658 If the target machine has an explicit window save instruction, the
9659 actual entry value is the corresponding OUTGOING_REGNO instead. */
9660 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9661 {
9662 if (REG_P (incoming)
9663 && HARD_REGISTER_P (incoming)
9664 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9665 {
9666 parm_reg_t p;
9667 p.incoming = incoming;
9668 incoming
9669 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9670 OUTGOING_REGNO (REGNO (incoming)), 0);
9671 p.outgoing = incoming;
9672 vec_safe_push (windowed_parm_regs, p);
9673 }
9674 else if (GET_CODE (incoming) == PARALLEL)
9675 {
9676 rtx outgoing
9677 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9678 int i;
9679
9680 for (i = 0; i < XVECLEN (incoming, 0); i++)
9681 {
9682 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9683 parm_reg_t p;
9684 p.incoming = reg;
9685 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9686 OUTGOING_REGNO (REGNO (reg)), 0);
9687 p.outgoing = reg;
9688 XVECEXP (outgoing, 0, i)
9689 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9690 XEXP (XVECEXP (incoming, 0, i), 1));
9691 vec_safe_push (windowed_parm_regs, p);
9692 }
9693
9694 incoming = outgoing;
9695 }
9696 else if (MEM_P (incoming)
9697 && REG_P (XEXP (incoming, 0))
9698 && HARD_REGISTER_P (XEXP (incoming, 0)))
9699 {
9700 rtx reg = XEXP (incoming, 0);
9701 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9702 {
9703 parm_reg_t p;
9704 p.incoming = reg;
9705 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9706 p.outgoing = reg;
9707 vec_safe_push (windowed_parm_regs, p);
9708 incoming = replace_equiv_address_nv (incoming, reg);
9709 }
9710 }
9711 }
9712 #endif
9713
9714 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9715 {
9716 if (MEM_P (incoming))
9717 {
9718 /* This means argument is passed by invisible reference. */
9719 offset = 0;
9720 decl = parm;
9721 }
9722 else
9723 {
9724 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9725 return;
9726 offset += byte_lowpart_offset (GET_MODE (incoming),
9727 GET_MODE (decl_rtl));
9728 }
9729 }
9730
9731 if (!decl)
9732 return;
9733
9734 if (parm != decl)
9735 {
9736 /* If that DECL_RTL wasn't a pseudo that got spilled to
9737 memory, bail out. Otherwise, the spill slot sharing code
9738 will force the memory to reference spill_slot_decl (%sfp),
9739 so we don't match above. That's ok, the pseudo must have
9740 referenced the entire parameter, so just reset OFFSET. */
9741 if (decl != get_spill_slot_decl (false))
9742 return;
9743 offset = 0;
9744 }
9745
9746 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9747 return;
9748
9749 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9750
9751 dv = dv_from_decl (parm);
9752
9753 if (target_for_debug_bind (parm)
9754 /* We can't deal with these right now, because this kind of
9755 variable is single-part. ??? We could handle parallels
9756 that describe multiple locations for the same single
9757 value, but ATM we don't. */
9758 && GET_CODE (incoming) != PARALLEL)
9759 {
9760 cselib_val *val;
9761 rtx lowpart;
9762
9763 /* ??? We shouldn't ever hit this, but it may happen because
9764 arguments passed by invisible reference aren't dealt with
9765 above: incoming-rtl will have Pmode rather than the
9766 expected mode for the type. */
9767 if (offset)
9768 return;
9769
9770 lowpart = var_lowpart (mode, incoming);
9771 if (!lowpart)
9772 return;
9773
9774 val = cselib_lookup_from_insn (lowpart, mode, true,
9775 VOIDmode, get_insns ());
9776
9777 /* ??? Float-typed values in memory are not handled by
9778 cselib. */
9779 if (val)
9780 {
9781 preserve_value (val);
9782 set_variable_part (out, val->val_rtx, dv, offset,
9783 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9784 dv = dv_from_value (val->val_rtx);
9785 }
9786
9787 if (MEM_P (incoming))
9788 {
9789 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9790 VOIDmode, get_insns ());
9791 if (val)
9792 {
9793 preserve_value (val);
9794 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9795 }
9796 }
9797 }
9798
9799 if (REG_P (incoming))
9800 {
9801 incoming = var_lowpart (mode, incoming);
9802 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9803 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9804 incoming);
9805 set_variable_part (out, incoming, dv, offset,
9806 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9807 if (dv_is_value_p (dv))
9808 {
9809 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9810 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9811 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9812 {
9813 machine_mode indmode
9814 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9815 rtx mem = gen_rtx_MEM (indmode, incoming);
9816 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9817 VOIDmode,
9818 get_insns ());
9819 if (val)
9820 {
9821 preserve_value (val);
9822 record_entry_value (val, mem);
9823 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9824 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9825 }
9826 }
9827 }
9828 }
9829 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9830 {
9831 int i;
9832
9833 for (i = 0; i < XVECLEN (incoming, 0); i++)
9834 {
9835 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9836 offset = REG_OFFSET (reg);
9837 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9838 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9839 set_variable_part (out, reg, dv, offset,
9840 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9841 }
9842 }
9843 else if (MEM_P (incoming))
9844 {
9845 incoming = var_lowpart (mode, incoming);
9846 set_variable_part (out, incoming, dv, offset,
9847 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9848 }
9849 }
9850
9851 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9852
9853 static void
9854 vt_add_function_parameters (void)
9855 {
9856 tree parm;
9857
9858 for (parm = DECL_ARGUMENTS (current_function_decl);
9859 parm; parm = DECL_CHAIN (parm))
9860 if (!POINTER_BOUNDS_P (parm))
9861 vt_add_function_parameter (parm);
9862
9863 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9864 {
9865 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9866
9867 if (TREE_CODE (vexpr) == INDIRECT_REF)
9868 vexpr = TREE_OPERAND (vexpr, 0);
9869
9870 if (TREE_CODE (vexpr) == PARM_DECL
9871 && DECL_ARTIFICIAL (vexpr)
9872 && !DECL_IGNORED_P (vexpr)
9873 && DECL_NAMELESS (vexpr))
9874 vt_add_function_parameter (vexpr);
9875 }
9876 }
9877
9878 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9879 ensure it isn't flushed during cselib_reset_table.
9880 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9881 has been eliminated. */
9882
9883 static void
9884 vt_init_cfa_base (void)
9885 {
9886 cselib_val *val;
9887
9888 #ifdef FRAME_POINTER_CFA_OFFSET
9889 cfa_base_rtx = frame_pointer_rtx;
9890 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9891 #else
9892 cfa_base_rtx = arg_pointer_rtx;
9893 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9894 #endif
9895 if (cfa_base_rtx == hard_frame_pointer_rtx
9896 || !fixed_regs[REGNO (cfa_base_rtx)])
9897 {
9898 cfa_base_rtx = NULL_RTX;
9899 return;
9900 }
9901 if (!MAY_HAVE_DEBUG_INSNS)
9902 return;
9903
9904 /* Tell alias analysis that cfa_base_rtx should share
9905 find_base_term value with stack pointer or hard frame pointer. */
9906 if (!frame_pointer_needed)
9907 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9908 else if (!crtl->stack_realign_tried)
9909 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9910
9911 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9912 VOIDmode, get_insns ());
9913 preserve_value (val);
9914 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9915 }
9916
9917 /* Allocate and initialize the data structures for variable tracking
9918 and parse the RTL to get the micro operations. */
9919
9920 static bool
9921 vt_initialize (void)
9922 {
9923 basic_block bb;
9924 HOST_WIDE_INT fp_cfa_offset = -1;
9925
9926 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9927
9928 empty_shared_hash = new shared_hash_def;
9929 empty_shared_hash->refcount = 1;
9930 empty_shared_hash->htab = new variable_table_type (1);
9931 changed_variables = new variable_table_type (10);
9932
9933 /* Init the IN and OUT sets. */
9934 FOR_ALL_BB_FN (bb, cfun)
9935 {
9936 VTI (bb)->visited = false;
9937 VTI (bb)->flooded = false;
9938 dataflow_set_init (&VTI (bb)->in);
9939 dataflow_set_init (&VTI (bb)->out);
9940 VTI (bb)->permp = NULL;
9941 }
9942
9943 if (MAY_HAVE_DEBUG_INSNS)
9944 {
9945 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9946 scratch_regs = BITMAP_ALLOC (NULL);
9947 preserved_values.create (256);
9948 global_get_addr_cache = new hash_map<rtx, rtx>;
9949 }
9950 else
9951 {
9952 scratch_regs = NULL;
9953 global_get_addr_cache = NULL;
9954 }
9955
9956 if (MAY_HAVE_DEBUG_INSNS)
9957 {
9958 rtx reg, expr;
9959 int ofst;
9960 cselib_val *val;
9961
9962 #ifdef FRAME_POINTER_CFA_OFFSET
9963 reg = frame_pointer_rtx;
9964 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9965 #else
9966 reg = arg_pointer_rtx;
9967 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9968 #endif
9969
9970 ofst -= INCOMING_FRAME_SP_OFFSET;
9971
9972 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9973 VOIDmode, get_insns ());
9974 preserve_value (val);
9975 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9976 cselib_preserve_cfa_base_value (val, REGNO (reg));
9977 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9978 stack_pointer_rtx, -ofst);
9979 cselib_add_permanent_equiv (val, expr, get_insns ());
9980
9981 if (ofst)
9982 {
9983 val = cselib_lookup_from_insn (stack_pointer_rtx,
9984 GET_MODE (stack_pointer_rtx), 1,
9985 VOIDmode, get_insns ());
9986 preserve_value (val);
9987 expr = plus_constant (GET_MODE (reg), reg, ofst);
9988 cselib_add_permanent_equiv (val, expr, get_insns ());
9989 }
9990 }
9991
9992 /* In order to factor out the adjustments made to the stack pointer or to
9993 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9994 instead of individual location lists, we're going to rewrite MEMs based
9995 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9996 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9997 resp. arg_pointer_rtx. We can do this either when there is no frame
9998 pointer in the function and stack adjustments are consistent for all
9999 basic blocks or when there is a frame pointer and no stack realignment.
10000 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10001 has been eliminated. */
10002 if (!frame_pointer_needed)
10003 {
10004 rtx reg, elim;
10005
10006 if (!vt_stack_adjustments ())
10007 return false;
10008
10009 #ifdef FRAME_POINTER_CFA_OFFSET
10010 reg = frame_pointer_rtx;
10011 #else
10012 reg = arg_pointer_rtx;
10013 #endif
10014 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10015 if (elim != reg)
10016 {
10017 if (GET_CODE (elim) == PLUS)
10018 elim = XEXP (elim, 0);
10019 if (elim == stack_pointer_rtx)
10020 vt_init_cfa_base ();
10021 }
10022 }
10023 else if (!crtl->stack_realign_tried)
10024 {
10025 rtx reg, elim;
10026
10027 #ifdef FRAME_POINTER_CFA_OFFSET
10028 reg = frame_pointer_rtx;
10029 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10030 #else
10031 reg = arg_pointer_rtx;
10032 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10033 #endif
10034 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10035 if (elim != reg)
10036 {
10037 if (GET_CODE (elim) == PLUS)
10038 {
10039 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10040 elim = XEXP (elim, 0);
10041 }
10042 if (elim != hard_frame_pointer_rtx)
10043 fp_cfa_offset = -1;
10044 }
10045 else
10046 fp_cfa_offset = -1;
10047 }
10048
10049 /* If the stack is realigned and a DRAP register is used, we're going to
10050 rewrite MEMs based on it representing incoming locations of parameters
10051 passed on the stack into MEMs based on the argument pointer. Although
10052 we aren't going to rewrite other MEMs, we still need to initialize the
10053 virtual CFA pointer in order to ensure that the argument pointer will
10054 be seen as a constant throughout the function.
10055
10056 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10057 else if (stack_realign_drap)
10058 {
10059 rtx reg, elim;
10060
10061 #ifdef FRAME_POINTER_CFA_OFFSET
10062 reg = frame_pointer_rtx;
10063 #else
10064 reg = arg_pointer_rtx;
10065 #endif
10066 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10067 if (elim != reg)
10068 {
10069 if (GET_CODE (elim) == PLUS)
10070 elim = XEXP (elim, 0);
10071 if (elim == hard_frame_pointer_rtx)
10072 vt_init_cfa_base ();
10073 }
10074 }
10075
10076 hard_frame_pointer_adjustment = -1;
10077
10078 vt_add_function_parameters ();
10079
10080 FOR_EACH_BB_FN (bb, cfun)
10081 {
10082 rtx_insn *insn;
10083 HOST_WIDE_INT pre, post = 0;
10084 basic_block first_bb, last_bb;
10085
10086 if (MAY_HAVE_DEBUG_INSNS)
10087 {
10088 cselib_record_sets_hook = add_with_sets;
10089 if (dump_file && (dump_flags & TDF_DETAILS))
10090 fprintf (dump_file, "first value: %i\n",
10091 cselib_get_next_uid ());
10092 }
10093
10094 first_bb = bb;
10095 for (;;)
10096 {
10097 edge e;
10098 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10099 || ! single_pred_p (bb->next_bb))
10100 break;
10101 e = find_edge (bb, bb->next_bb);
10102 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10103 break;
10104 bb = bb->next_bb;
10105 }
10106 last_bb = bb;
10107
10108 /* Add the micro-operations to the vector. */
10109 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10110 {
10111 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10112 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10113 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10114 insn = NEXT_INSN (insn))
10115 {
10116 if (INSN_P (insn))
10117 {
10118 if (!frame_pointer_needed)
10119 {
10120 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10121 if (pre)
10122 {
10123 micro_operation mo;
10124 mo.type = MO_ADJUST;
10125 mo.u.adjust = pre;
10126 mo.insn = insn;
10127 if (dump_file && (dump_flags & TDF_DETAILS))
10128 log_op_type (PATTERN (insn), bb, insn,
10129 MO_ADJUST, dump_file);
10130 VTI (bb)->mos.safe_push (mo);
10131 VTI (bb)->out.stack_adjust += pre;
10132 }
10133 }
10134
10135 cselib_hook_called = false;
10136 adjust_insn (bb, insn);
10137 if (MAY_HAVE_DEBUG_INSNS)
10138 {
10139 if (CALL_P (insn))
10140 prepare_call_arguments (bb, insn);
10141 cselib_process_insn (insn);
10142 if (dump_file && (dump_flags & TDF_DETAILS))
10143 {
10144 print_rtl_single (dump_file, insn);
10145 dump_cselib_table (dump_file);
10146 }
10147 }
10148 if (!cselib_hook_called)
10149 add_with_sets (insn, 0, 0);
10150 cancel_changes (0);
10151
10152 if (!frame_pointer_needed && post)
10153 {
10154 micro_operation mo;
10155 mo.type = MO_ADJUST;
10156 mo.u.adjust = post;
10157 mo.insn = insn;
10158 if (dump_file && (dump_flags & TDF_DETAILS))
10159 log_op_type (PATTERN (insn), bb, insn,
10160 MO_ADJUST, dump_file);
10161 VTI (bb)->mos.safe_push (mo);
10162 VTI (bb)->out.stack_adjust += post;
10163 }
10164
10165 if (fp_cfa_offset != -1
10166 && hard_frame_pointer_adjustment == -1
10167 && fp_setter_insn (insn))
10168 {
10169 vt_init_cfa_base ();
10170 hard_frame_pointer_adjustment = fp_cfa_offset;
10171 /* Disassociate sp from fp now. */
10172 if (MAY_HAVE_DEBUG_INSNS)
10173 {
10174 cselib_val *v;
10175 cselib_invalidate_rtx (stack_pointer_rtx);
10176 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10177 VOIDmode);
10178 if (v && !cselib_preserved_value_p (v))
10179 {
10180 cselib_set_value_sp_based (v);
10181 preserve_value (v);
10182 }
10183 }
10184 }
10185 }
10186 }
10187 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10188 }
10189
10190 bb = last_bb;
10191
10192 if (MAY_HAVE_DEBUG_INSNS)
10193 {
10194 cselib_preserve_only_values ();
10195 cselib_reset_table (cselib_get_next_uid ());
10196 cselib_record_sets_hook = NULL;
10197 }
10198 }
10199
10200 hard_frame_pointer_adjustment = -1;
10201 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10202 cfa_base_rtx = NULL_RTX;
10203 return true;
10204 }
10205
10206 /* This is *not* reset after each function. It gives each
10207 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10208 a unique label number. */
10209
10210 static int debug_label_num = 1;
10211
10212 /* Get rid of all debug insns from the insn stream. */
10213
10214 static void
10215 delete_debug_insns (void)
10216 {
10217 basic_block bb;
10218 rtx_insn *insn, *next;
10219
10220 if (!MAY_HAVE_DEBUG_INSNS)
10221 return;
10222
10223 FOR_EACH_BB_FN (bb, cfun)
10224 {
10225 FOR_BB_INSNS_SAFE (bb, insn, next)
10226 if (DEBUG_INSN_P (insn))
10227 {
10228 tree decl = INSN_VAR_LOCATION_DECL (insn);
10229 if (TREE_CODE (decl) == LABEL_DECL
10230 && DECL_NAME (decl)
10231 && !DECL_RTL_SET_P (decl))
10232 {
10233 PUT_CODE (insn, NOTE);
10234 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10235 NOTE_DELETED_LABEL_NAME (insn)
10236 = IDENTIFIER_POINTER (DECL_NAME (decl));
10237 SET_DECL_RTL (decl, insn);
10238 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10239 }
10240 else
10241 delete_insn (insn);
10242 }
10243 }
10244 }
10245
10246 /* Run a fast, BB-local only version of var tracking, to take care of
10247 information that we don't do global analysis on, such that not all
10248 information is lost. If SKIPPED holds, we're skipping the global
10249 pass entirely, so we should try to use information it would have
10250 handled as well.. */
10251
10252 static void
10253 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10254 {
10255 /* ??? Just skip it all for now. */
10256 delete_debug_insns ();
10257 }
10258
10259 /* Free the data structures needed for variable tracking. */
10260
10261 static void
10262 vt_finalize (void)
10263 {
10264 basic_block bb;
10265
10266 FOR_EACH_BB_FN (bb, cfun)
10267 {
10268 VTI (bb)->mos.release ();
10269 }
10270
10271 FOR_ALL_BB_FN (bb, cfun)
10272 {
10273 dataflow_set_destroy (&VTI (bb)->in);
10274 dataflow_set_destroy (&VTI (bb)->out);
10275 if (VTI (bb)->permp)
10276 {
10277 dataflow_set_destroy (VTI (bb)->permp);
10278 XDELETE (VTI (bb)->permp);
10279 }
10280 }
10281 free_aux_for_blocks ();
10282 delete empty_shared_hash->htab;
10283 empty_shared_hash->htab = NULL;
10284 delete changed_variables;
10285 changed_variables = NULL;
10286 attrs_def::pool.release ();
10287 var_pool.release ();
10288 location_chain_def::pool.release ();
10289 shared_hash_def::pool.release ();
10290
10291 if (MAY_HAVE_DEBUG_INSNS)
10292 {
10293 if (global_get_addr_cache)
10294 delete global_get_addr_cache;
10295 global_get_addr_cache = NULL;
10296 loc_exp_dep::pool.release ();
10297 valvar_pool.release ();
10298 preserved_values.release ();
10299 cselib_finish ();
10300 BITMAP_FREE (scratch_regs);
10301 scratch_regs = NULL;
10302 }
10303
10304 #ifdef HAVE_window_save
10305 vec_free (windowed_parm_regs);
10306 #endif
10307
10308 if (vui_vec)
10309 XDELETEVEC (vui_vec);
10310 vui_vec = NULL;
10311 vui_allocated = 0;
10312 }
10313
10314 /* The entry point to variable tracking pass. */
10315
10316 static inline unsigned int
10317 variable_tracking_main_1 (void)
10318 {
10319 bool success;
10320
10321 if (flag_var_tracking_assignments < 0
10322 /* Var-tracking right now assumes the IR doesn't contain
10323 any pseudos at this point. */
10324 || targetm.no_register_allocation)
10325 {
10326 delete_debug_insns ();
10327 return 0;
10328 }
10329
10330 if (n_basic_blocks_for_fn (cfun) > 500 &&
10331 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10332 {
10333 vt_debug_insns_local (true);
10334 return 0;
10335 }
10336
10337 mark_dfs_back_edges ();
10338 if (!vt_initialize ())
10339 {
10340 vt_finalize ();
10341 vt_debug_insns_local (true);
10342 return 0;
10343 }
10344
10345 success = vt_find_locations ();
10346
10347 if (!success && flag_var_tracking_assignments > 0)
10348 {
10349 vt_finalize ();
10350
10351 delete_debug_insns ();
10352
10353 /* This is later restored by our caller. */
10354 flag_var_tracking_assignments = 0;
10355
10356 success = vt_initialize ();
10357 gcc_assert (success);
10358
10359 success = vt_find_locations ();
10360 }
10361
10362 if (!success)
10363 {
10364 vt_finalize ();
10365 vt_debug_insns_local (false);
10366 return 0;
10367 }
10368
10369 if (dump_file && (dump_flags & TDF_DETAILS))
10370 {
10371 dump_dataflow_sets ();
10372 dump_reg_info (dump_file);
10373 dump_flow_info (dump_file, dump_flags);
10374 }
10375
10376 timevar_push (TV_VAR_TRACKING_EMIT);
10377 vt_emit_notes ();
10378 timevar_pop (TV_VAR_TRACKING_EMIT);
10379
10380 vt_finalize ();
10381 vt_debug_insns_local (false);
10382 return 0;
10383 }
10384
10385 unsigned int
10386 variable_tracking_main (void)
10387 {
10388 unsigned int ret;
10389 int save = flag_var_tracking_assignments;
10390
10391 ret = variable_tracking_main_1 ();
10392
10393 flag_var_tracking_assignments = save;
10394
10395 return ret;
10396 }
10397 \f
10398 namespace {
10399
10400 const pass_data pass_data_variable_tracking =
10401 {
10402 RTL_PASS, /* type */
10403 "vartrack", /* name */
10404 OPTGROUP_NONE, /* optinfo_flags */
10405 TV_VAR_TRACKING, /* tv_id */
10406 0, /* properties_required */
10407 0, /* properties_provided */
10408 0, /* properties_destroyed */
10409 0, /* todo_flags_start */
10410 0, /* todo_flags_finish */
10411 };
10412
10413 class pass_variable_tracking : public rtl_opt_pass
10414 {
10415 public:
10416 pass_variable_tracking (gcc::context *ctxt)
10417 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10418 {}
10419
10420 /* opt_pass methods: */
10421 virtual bool gate (function *)
10422 {
10423 return (flag_var_tracking && !targetm.delay_vartrack);
10424 }
10425
10426 virtual unsigned int execute (function *)
10427 {
10428 return variable_tracking_main ();
10429 }
10430
10431 }; // class pass_variable_tracking
10432
10433 } // anon namespace
10434
10435 rtl_opt_pass *
10436 make_pass_variable_tracking (gcc::context *ctxt)
10437 {
10438 return new pass_variable_tracking (ctxt);
10439 }
This page took 0.482773 seconds and 6 git commands to generate.