1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
90 #include "coretypes.h"
101 #include "stor-layout.h"
102 #include "hash-map.h"
103 #include "hash-table.h"
105 #include "hard-reg-set.h"
106 #include "function.h"
107 #include "dominance.h"
111 #include "basic-block.h"
114 #include "insn-config.h"
117 #include "alloc-pool.h"
120 #include "statistics.h"
125 #include "emit-rtl.h"
128 #include "tree-pass.h"
130 #include "tree-dfa.h"
131 #include "tree-ssa.h"
135 #include "diagnostic.h"
136 #include "tree-pretty-print.h"
138 #include "rtl-iter.h"
139 #include "fibonacci_heap.h"
141 typedef fibonacci_heap
<long, basic_block_def
> bb_heap_t
;
142 typedef fibonacci_node
<long, basic_block_def
> bb_heap_node_t
;
144 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
145 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
146 Currently the value is the same as IDENTIFIER_NODE, which has such
147 a property. If this compile time assertion ever fails, make sure that
148 the new tree code that equals (int) VALUE has the same property. */
149 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
151 /* Type of micro operation. */
152 enum micro_operation_type
154 MO_USE
, /* Use location (REG or MEM). */
155 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
156 or the variable is not trackable. */
157 MO_VAL_USE
, /* Use location which is associated with a value. */
158 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
159 MO_VAL_SET
, /* Set location associated with a value. */
160 MO_SET
, /* Set location. */
161 MO_COPY
, /* Copy the same portion of a variable from one
162 location to another. */
163 MO_CLOBBER
, /* Clobber location. */
164 MO_CALL
, /* Call insn. */
165 MO_ADJUST
/* Adjust stack pointer. */
169 static const char * const ATTRIBUTE_UNUSED
170 micro_operation_type_name
[] = {
183 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
184 Notes emitted as AFTER_CALL are to take effect during the call,
185 rather than after the call. */
188 EMIT_NOTE_BEFORE_INSN
,
189 EMIT_NOTE_AFTER_INSN
,
190 EMIT_NOTE_AFTER_CALL_INSN
193 /* Structure holding information about micro operation. */
194 typedef struct micro_operation_def
196 /* Type of micro operation. */
197 enum micro_operation_type type
;
199 /* The instruction which the micro operation is in, for MO_USE,
200 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
201 instruction or note in the original flow (before any var-tracking
202 notes are inserted, to simplify emission of notes), for MO_SET
207 /* Location. For MO_SET and MO_COPY, this is the SET that
208 performs the assignment, if known, otherwise it is the target
209 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
210 CONCAT of the VALUE and the LOC associated with it. For
211 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
212 associated with it. */
215 /* Stack adjustment. */
216 HOST_WIDE_INT adjust
;
221 /* A declaration of a variable, or an RTL value being handled like a
223 typedef void *decl_or_value
;
225 /* Return true if a decl_or_value DV is a DECL or NULL. */
227 dv_is_decl_p (decl_or_value dv
)
229 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
232 /* Return true if a decl_or_value is a VALUE rtl. */
234 dv_is_value_p (decl_or_value dv
)
236 return dv
&& !dv_is_decl_p (dv
);
239 /* Return the decl in the decl_or_value. */
241 dv_as_decl (decl_or_value dv
)
243 gcc_checking_assert (dv_is_decl_p (dv
));
247 /* Return the value in the decl_or_value. */
249 dv_as_value (decl_or_value dv
)
251 gcc_checking_assert (dv_is_value_p (dv
));
255 /* Return the opaque pointer in the decl_or_value. */
257 dv_as_opaque (decl_or_value dv
)
263 /* Description of location of a part of a variable. The content of a physical
264 register is described by a chain of these structures.
265 The chains are pretty short (usually 1 or 2 elements) and thus
266 chain is the best data structure. */
267 typedef struct attrs_def
269 /* Pointer to next member of the list. */
270 struct attrs_def
*next
;
272 /* The rtx of register. */
275 /* The declaration corresponding to LOC. */
278 /* Offset from start of DECL. */
279 HOST_WIDE_INT offset
;
281 /* Pool allocation new operator. */
282 inline void *operator new (size_t)
284 return pool
.allocate ();
287 /* Delete operator utilizing pool allocation. */
288 inline void operator delete (void *ptr
)
290 pool
.remove ((attrs_def
*) ptr
);
293 /* Memory allocation pool. */
294 static pool_allocator
<attrs_def
> pool
;
297 /* Structure for chaining the locations. */
298 typedef struct location_chain_def
300 /* Next element in the chain. */
301 struct location_chain_def
*next
;
303 /* The location (REG, MEM or VALUE). */
306 /* The "value" stored in this location. */
310 enum var_init_status init
;
312 /* Pool allocation new operator. */
313 inline void *operator new (size_t)
315 return pool
.allocate ();
318 /* Delete operator utilizing pool allocation. */
319 inline void operator delete (void *ptr
)
321 pool
.remove ((location_chain_def
*) ptr
);
324 /* Memory allocation pool. */
325 static pool_allocator
<location_chain_def
> pool
;
328 /* A vector of loc_exp_dep holds the active dependencies of a one-part
329 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
330 location of DV. Each entry is also part of VALUE' s linked-list of
331 backlinks back to DV. */
332 typedef struct loc_exp_dep_s
334 /* The dependent DV. */
336 /* The dependency VALUE or DECL_DEBUG. */
338 /* The next entry in VALUE's backlinks list. */
339 struct loc_exp_dep_s
*next
;
340 /* A pointer to the pointer to this entry (head or prev's next) in
341 the doubly-linked list. */
342 struct loc_exp_dep_s
**pprev
;
344 /* Pool allocation new operator. */
345 inline void *operator new (size_t)
347 return pool
.allocate ();
350 /* Delete operator utilizing pool allocation. */
351 inline void operator delete (void *ptr
)
353 pool
.remove ((loc_exp_dep_s
*) ptr
);
356 /* Memory allocation pool. */
357 static pool_allocator
<loc_exp_dep_s
> pool
;
361 /* This data structure holds information about the depth of a variable
363 typedef struct expand_depth_struct
365 /* This measures the complexity of the expanded expression. It
366 grows by one for each level of expansion that adds more than one
369 /* This counts the number of ENTRY_VALUE expressions in an
370 expansion. We want to minimize their use. */
374 /* This data structure is allocated for one-part variables at the time
375 of emitting notes. */
378 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
379 computation used the expansion of this variable, and that ought
380 to be notified should this variable change. If the DV's cur_loc
381 expanded to NULL, all components of the loc list are regarded as
382 active, so that any changes in them give us a chance to get a
383 location. Otherwise, only components of the loc that expanded to
384 non-NULL are regarded as active dependencies. */
385 loc_exp_dep
*backlinks
;
386 /* This holds the LOC that was expanded into cur_loc. We need only
387 mark a one-part variable as changed if the FROM loc is removed,
388 or if it has no known location and a loc is added, or if it gets
389 a change notification from any of its active dependencies. */
391 /* The depth of the cur_loc expression. */
393 /* Dependencies actively used when expand FROM into cur_loc. */
394 vec
<loc_exp_dep
, va_heap
, vl_embed
> deps
;
397 /* Structure describing one part of variable. */
398 typedef struct variable_part_def
400 /* Chain of locations of the part. */
401 location_chain loc_chain
;
403 /* Location which was last emitted to location list. */
408 /* The offset in the variable, if !var->onepart. */
409 HOST_WIDE_INT offset
;
411 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
412 struct onepart_aux
*onepaux
;
416 /* Maximum number of location parts. */
417 #define MAX_VAR_PARTS 16
419 /* Enumeration type used to discriminate various types of one-part
421 typedef enum onepart_enum
423 /* Not a one-part variable. */
425 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
427 /* A DEBUG_EXPR_DECL. */
433 /* Structure describing where the variable is located. */
434 typedef struct variable_def
436 /* The declaration of the variable, or an RTL value being handled
437 like a declaration. */
440 /* Reference count. */
443 /* Number of variable parts. */
446 /* What type of DV this is, according to enum onepart_enum. */
447 ENUM_BITFIELD (onepart_enum
) onepart
: CHAR_BIT
;
449 /* True if this variable_def struct is currently in the
450 changed_variables hash table. */
451 bool in_changed_variables
;
453 /* The variable parts. */
454 variable_part var_part
[1];
456 typedef const struct variable_def
*const_variable
;
458 /* Pointer to the BB's information specific to variable tracking pass. */
459 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
461 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
462 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
464 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
466 /* Access VAR's Ith part's offset, checking that it's not a one-part
468 #define VAR_PART_OFFSET(var, i) __extension__ \
469 (*({ variable const __v = (var); \
470 gcc_checking_assert (!__v->onepart); \
471 &__v->var_part[(i)].aux.offset; }))
473 /* Access VAR's one-part auxiliary data, checking that it is a
474 one-part variable. */
475 #define VAR_LOC_1PAUX(var) __extension__ \
476 (*({ variable const __v = (var); \
477 gcc_checking_assert (__v->onepart); \
478 &__v->var_part[0].aux.onepaux; }))
481 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
482 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
485 /* These are accessor macros for the one-part auxiliary data. When
486 convenient for users, they're guarded by tests that the data was
488 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
489 ? VAR_LOC_1PAUX (var)->backlinks \
491 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
492 ? &VAR_LOC_1PAUX (var)->backlinks \
494 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
495 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
496 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
497 ? &VAR_LOC_1PAUX (var)->deps \
502 typedef unsigned int dvuid
;
504 /* Return the uid of DV. */
507 dv_uid (decl_or_value dv
)
509 if (dv_is_value_p (dv
))
510 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
512 return DECL_UID (dv_as_decl (dv
));
515 /* Compute the hash from the uid. */
517 static inline hashval_t
518 dv_uid2hash (dvuid uid
)
523 /* The hash function for a mask table in a shared_htab chain. */
525 static inline hashval_t
526 dv_htab_hash (decl_or_value dv
)
528 return dv_uid2hash (dv_uid (dv
));
531 static void variable_htab_free (void *);
533 /* Variable hashtable helpers. */
535 struct variable_hasher
537 typedef variable_def
*value_type
;
538 typedef void *compare_type
;
539 static inline hashval_t
hash (const variable_def
*);
540 static inline bool equal (const variable_def
*, const void *);
541 static inline void remove (variable_def
*);
544 /* The hash function for variable_htab, computes the hash value
545 from the declaration of variable X. */
548 variable_hasher::hash (const variable_def
*v
)
550 return dv_htab_hash (v
->dv
);
553 /* Compare the declaration of variable X with declaration Y. */
556 variable_hasher::equal (const variable_def
*v
, const void *y
)
558 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
560 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
563 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
566 variable_hasher::remove (variable_def
*var
)
568 variable_htab_free (var
);
571 typedef hash_table
<variable_hasher
> variable_table_type
;
572 typedef variable_table_type::iterator variable_iterator_type
;
574 /* Structure for passing some other parameters to function
575 emit_note_insn_var_location. */
576 typedef struct emit_note_data_def
578 /* The instruction which the note will be emitted before/after. */
581 /* Where the note will be emitted (before/after insn)? */
582 enum emit_note_where where
;
584 /* The variables and values active at this point. */
585 variable_table_type
*vars
;
588 /* Structure holding a refcounted hash table. If refcount > 1,
589 it must be first unshared before modified. */
590 typedef struct shared_hash_def
592 /* Reference count. */
595 /* Actual hash table. */
596 variable_table_type
*htab
;
598 /* Pool allocation new operator. */
599 inline void *operator new (size_t)
601 return pool
.allocate ();
604 /* Delete operator utilizing pool allocation. */
605 inline void operator delete (void *ptr
)
607 pool
.remove ((shared_hash_def
*) ptr
);
610 /* Memory allocation pool. */
611 static pool_allocator
<shared_hash_def
> pool
;
614 /* Structure holding the IN or OUT set for a basic block. */
615 typedef struct dataflow_set_def
617 /* Adjustment of stack offset. */
618 HOST_WIDE_INT stack_adjust
;
620 /* Attributes for registers (lists of attrs). */
621 attrs regs
[FIRST_PSEUDO_REGISTER
];
623 /* Variable locations. */
626 /* Vars that is being traversed. */
627 shared_hash traversed_vars
;
630 /* The structure (one for each basic block) containing the information
631 needed for variable tracking. */
632 typedef struct variable_tracking_info_def
634 /* The vector of micro operations. */
635 vec
<micro_operation
> mos
;
637 /* The IN and OUT set for dataflow analysis. */
641 /* The permanent-in dataflow set for this block. This is used to
642 hold values for which we had to compute entry values. ??? This
643 should probably be dynamically allocated, to avoid using more
644 memory in non-debug builds. */
647 /* Has the block been visited in DFS? */
650 /* Has the block been flooded in VTA? */
653 } *variable_tracking_info
;
655 /* Alloc pool for struct attrs_def. */
656 pool_allocator
<attrs_def
> attrs_def::pool ("attrs_def pool", 1024);
658 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
660 static pool_allocator
<variable_def
> var_pool
661 ("variable_def pool", 64,
662 (MAX_VAR_PARTS
- 1) * sizeof (((variable
)NULL
)->var_part
[0]));
664 /* Alloc pool for struct variable_def with a single var_part entry. */
665 static pool_allocator
<variable_def
> valvar_pool
666 ("small variable_def pool", 256);
668 /* Alloc pool for struct location_chain_def. */
669 pool_allocator
<location_chain_def
> location_chain_def::pool
670 ("location_chain_def pool", 1024);
672 /* Alloc pool for struct shared_hash_def. */
673 pool_allocator
<shared_hash_def
> shared_hash_def::pool
674 ("shared_hash_def pool", 256);
676 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
677 pool_allocator
<loc_exp_dep
> loc_exp_dep::pool ("loc_exp_dep pool", 64);
679 /* Changed variables, notes will be emitted for them. */
680 static variable_table_type
*changed_variables
;
682 /* Shall notes be emitted? */
683 static bool emit_notes
;
685 /* Values whose dynamic location lists have gone empty, but whose
686 cselib location lists are still usable. Use this to hold the
687 current location, the backlinks, etc, during emit_notes. */
688 static variable_table_type
*dropped_values
;
690 /* Empty shared hashtable. */
691 static shared_hash empty_shared_hash
;
693 /* Scratch register bitmap used by cselib_expand_value_rtx. */
694 static bitmap scratch_regs
= NULL
;
696 #ifdef HAVE_window_save
697 typedef struct GTY(()) parm_reg
{
703 /* Vector of windowed parameter registers, if any. */
704 static vec
<parm_reg_t
, va_gc
> *windowed_parm_regs
= NULL
;
707 /* Variable used to tell whether cselib_process_insn called our hook. */
708 static bool cselib_hook_called
;
710 /* Local function prototypes. */
711 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
713 static void insn_stack_adjust_offset_pre_post (rtx_insn
*, HOST_WIDE_INT
*,
715 static bool vt_stack_adjustments (void);
717 static void init_attrs_list_set (attrs
*);
718 static void attrs_list_clear (attrs
*);
719 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
720 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
721 static void attrs_list_copy (attrs
*, attrs
);
722 static void attrs_list_union (attrs
*, attrs
);
724 static variable_def
**unshare_variable (dataflow_set
*set
, variable_def
**slot
,
725 variable var
, enum var_init_status
);
726 static void vars_copy (variable_table_type
*, variable_table_type
*);
727 static tree
var_debug_decl (tree
);
728 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
729 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
730 enum var_init_status
, rtx
);
731 static void var_reg_delete (dataflow_set
*, rtx
, bool);
732 static void var_regno_delete (dataflow_set
*, int);
733 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
734 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
735 enum var_init_status
, rtx
);
736 static void var_mem_delete (dataflow_set
*, rtx
, bool);
738 static void dataflow_set_init (dataflow_set
*);
739 static void dataflow_set_clear (dataflow_set
*);
740 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
741 static int variable_union_info_cmp_pos (const void *, const void *);
742 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
743 static location_chain
find_loc_in_1pdv (rtx
, variable
, variable_table_type
*);
744 static bool canon_value_cmp (rtx
, rtx
);
745 static int loc_cmp (rtx
, rtx
);
746 static bool variable_part_different_p (variable_part
*, variable_part
*);
747 static bool onepart_variable_different_p (variable
, variable
);
748 static bool variable_different_p (variable
, variable
);
749 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
750 static void dataflow_set_destroy (dataflow_set
*);
752 static bool contains_symbol_ref (rtx
);
753 static bool track_expr_p (tree
, bool);
754 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
755 static void add_uses_1 (rtx
*, void *);
756 static void add_stores (rtx
, const_rtx
, void *);
757 static bool compute_bb_dataflow (basic_block
);
758 static bool vt_find_locations (void);
760 static void dump_attrs_list (attrs
);
761 static void dump_var (variable
);
762 static void dump_vars (variable_table_type
*);
763 static void dump_dataflow_set (dataflow_set
*);
764 static void dump_dataflow_sets (void);
766 static void set_dv_changed (decl_or_value
, bool);
767 static void variable_was_changed (variable
, dataflow_set
*);
768 static variable_def
**set_slot_part (dataflow_set
*, rtx
, variable_def
**,
769 decl_or_value
, HOST_WIDE_INT
,
770 enum var_init_status
, rtx
);
771 static void set_variable_part (dataflow_set
*, rtx
,
772 decl_or_value
, HOST_WIDE_INT
,
773 enum var_init_status
, rtx
, enum insert_option
);
774 static variable_def
**clobber_slot_part (dataflow_set
*, rtx
,
775 variable_def
**, HOST_WIDE_INT
, rtx
);
776 static void clobber_variable_part (dataflow_set
*, rtx
,
777 decl_or_value
, HOST_WIDE_INT
, rtx
);
778 static variable_def
**delete_slot_part (dataflow_set
*, rtx
, variable_def
**,
780 static void delete_variable_part (dataflow_set
*, rtx
,
781 decl_or_value
, HOST_WIDE_INT
);
782 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
783 static void vt_emit_notes (void);
785 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
786 static void vt_add_function_parameters (void);
787 static bool vt_initialize (void);
788 static void vt_finalize (void);
790 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
793 stack_adjust_offset_pre_post_cb (rtx
, rtx op
, rtx dest
, rtx src
, rtx srcoff
,
796 if (dest
!= stack_pointer_rtx
)
799 switch (GET_CODE (op
))
803 ((HOST_WIDE_INT
*)arg
)[0] -= INTVAL (srcoff
);
807 ((HOST_WIDE_INT
*)arg
)[1] -= INTVAL (srcoff
);
811 /* We handle only adjustments by constant amount. */
812 gcc_assert (GET_CODE (src
) == PLUS
813 && CONST_INT_P (XEXP (src
, 1))
814 && XEXP (src
, 0) == stack_pointer_rtx
);
815 ((HOST_WIDE_INT
*)arg
)[GET_CODE (op
) == POST_MODIFY
]
816 -= INTVAL (XEXP (src
, 1));
823 /* Given a SET, calculate the amount of stack adjustment it contains
824 PRE- and POST-modifying stack pointer.
825 This function is similar to stack_adjust_offset. */
828 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
831 rtx src
= SET_SRC (pattern
);
832 rtx dest
= SET_DEST (pattern
);
835 if (dest
== stack_pointer_rtx
)
837 /* (set (reg sp) (plus (reg sp) (const_int))) */
838 code
= GET_CODE (src
);
839 if (! (code
== PLUS
|| code
== MINUS
)
840 || XEXP (src
, 0) != stack_pointer_rtx
841 || !CONST_INT_P (XEXP (src
, 1)))
845 *post
+= INTVAL (XEXP (src
, 1));
847 *post
-= INTVAL (XEXP (src
, 1));
850 HOST_WIDE_INT res
[2] = { 0, 0 };
851 for_each_inc_dec (pattern
, stack_adjust_offset_pre_post_cb
, res
);
856 /* Given an INSN, calculate the amount of stack adjustment it contains
857 PRE- and POST-modifying stack pointer. */
860 insn_stack_adjust_offset_pre_post (rtx_insn
*insn
, HOST_WIDE_INT
*pre
,
868 pattern
= PATTERN (insn
);
869 if (RTX_FRAME_RELATED_P (insn
))
871 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
873 pattern
= XEXP (expr
, 0);
876 if (GET_CODE (pattern
) == SET
)
877 stack_adjust_offset_pre_post (pattern
, pre
, post
);
878 else if (GET_CODE (pattern
) == PARALLEL
879 || GET_CODE (pattern
) == SEQUENCE
)
883 /* There may be stack adjustments inside compound insns. Search
885 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
886 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
887 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
891 /* Compute stack adjustments for all blocks by traversing DFS tree.
892 Return true when the adjustments on all incoming edges are consistent.
893 Heavily borrowed from pre_and_rev_post_order_compute. */
896 vt_stack_adjustments (void)
898 edge_iterator
*stack
;
901 /* Initialize entry block. */
902 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->visited
= true;
903 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->in
.stack_adjust
904 = INCOMING_FRAME_SP_OFFSET
;
905 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
.stack_adjust
906 = INCOMING_FRAME_SP_OFFSET
;
908 /* Allocate stack for back-tracking up CFG. */
909 stack
= XNEWVEC (edge_iterator
, n_basic_blocks_for_fn (cfun
) + 1);
912 /* Push the first edge on to the stack. */
913 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
921 /* Look at the edge on the top of the stack. */
923 src
= ei_edge (ei
)->src
;
924 dest
= ei_edge (ei
)->dest
;
926 /* Check if the edge destination has been visited yet. */
927 if (!VTI (dest
)->visited
)
930 HOST_WIDE_INT pre
, post
, offset
;
931 VTI (dest
)->visited
= true;
932 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
934 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
935 for (insn
= BB_HEAD (dest
);
936 insn
!= NEXT_INSN (BB_END (dest
));
937 insn
= NEXT_INSN (insn
))
940 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
941 offset
+= pre
+ post
;
944 VTI (dest
)->out
.stack_adjust
= offset
;
946 if (EDGE_COUNT (dest
->succs
) > 0)
947 /* Since the DEST node has been visited for the first
948 time, check its successors. */
949 stack
[sp
++] = ei_start (dest
->succs
);
953 /* We can end up with different stack adjustments for the exit block
954 of a shrink-wrapped function if stack_adjust_offset_pre_post
955 doesn't understand the rtx pattern used to restore the stack
956 pointer in the epilogue. For example, on s390(x), the stack
957 pointer is often restored via a load-multiple instruction
958 and so no stack_adjust offset is recorded for it. This means
959 that the stack offset at the end of the epilogue block is the
960 the same as the offset before the epilogue, whereas other paths
961 to the exit block will have the correct stack_adjust.
963 It is safe to ignore these differences because (a) we never
964 use the stack_adjust for the exit block in this pass and
965 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
966 function are correct.
968 We must check whether the adjustments on other edges are
970 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
971 && VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
977 if (! ei_one_before_end_p (ei
))
978 /* Go to the next edge. */
979 ei_next (&stack
[sp
- 1]);
981 /* Return to previous level if there are no more edges. */
990 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
991 hard_frame_pointer_rtx is being mapped to it and offset for it. */
992 static rtx cfa_base_rtx
;
993 static HOST_WIDE_INT cfa_base_offset
;
995 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
996 or hard_frame_pointer_rtx. */
999 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
1001 return plus_constant (Pmode
, cfa_base_rtx
, adjustment
+ cfa_base_offset
);
1004 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
1005 or -1 if the replacement shouldn't be done. */
1006 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
1008 /* Data for adjust_mems callback. */
1010 struct adjust_mem_data
1013 machine_mode mem_mode
;
1014 HOST_WIDE_INT stack_adjust
;
1015 rtx_expr_list
*side_effects
;
1018 /* Helper for adjust_mems. Return true if X is suitable for
1019 transformation of wider mode arithmetics to narrower mode. */
1022 use_narrower_mode_test (rtx x
, const_rtx subreg
)
1024 subrtx_var_iterator::array_type array
;
1025 FOR_EACH_SUBRTX_VAR (iter
, array
, x
, NONCONST
)
1029 iter
.skip_subrtxes ();
1031 switch (GET_CODE (x
))
1034 if (cselib_lookup (x
, GET_MODE (SUBREG_REG (subreg
)), 0, VOIDmode
))
1036 if (!validate_subreg (GET_MODE (subreg
), GET_MODE (x
), x
,
1037 subreg_lowpart_offset (GET_MODE (subreg
),
1046 iter
.substitute (XEXP (x
, 0));
1055 /* Transform X into narrower mode MODE from wider mode WMODE. */
1058 use_narrower_mode (rtx x
, machine_mode mode
, machine_mode wmode
)
1062 return lowpart_subreg (mode
, x
, wmode
);
1063 switch (GET_CODE (x
))
1066 return lowpart_subreg (mode
, x
, wmode
);
1070 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
1071 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
1072 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
1074 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
1076 /* Ensure shift amount is not wider than mode. */
1077 if (GET_MODE (op1
) == VOIDmode
)
1078 op1
= lowpart_subreg (mode
, op1
, wmode
);
1079 else if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (GET_MODE (op1
)))
1080 op1
= lowpart_subreg (mode
, op1
, GET_MODE (op1
));
1081 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
1087 /* Helper function for adjusting used MEMs. */
1090 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
1092 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
1093 rtx mem
, addr
= loc
, tem
;
1094 machine_mode mem_mode_save
;
1096 switch (GET_CODE (loc
))
1099 /* Don't do any sp or fp replacements outside of MEM addresses
1101 if (amd
->mem_mode
== VOIDmode
&& amd
->store
)
1103 if (loc
== stack_pointer_rtx
1104 && !frame_pointer_needed
1106 return compute_cfa_pointer (amd
->stack_adjust
);
1107 else if (loc
== hard_frame_pointer_rtx
1108 && frame_pointer_needed
1109 && hard_frame_pointer_adjustment
!= -1
1111 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
1112 gcc_checking_assert (loc
!= virtual_incoming_args_rtx
);
1118 mem
= targetm
.delegitimize_address (mem
);
1119 if (mem
!= loc
&& !MEM_P (mem
))
1120 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
1123 addr
= XEXP (mem
, 0);
1124 mem_mode_save
= amd
->mem_mode
;
1125 amd
->mem_mode
= GET_MODE (mem
);
1126 store_save
= amd
->store
;
1128 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1129 amd
->store
= store_save
;
1130 amd
->mem_mode
= mem_mode_save
;
1132 addr
= targetm
.delegitimize_address (addr
);
1133 if (addr
!= XEXP (mem
, 0))
1134 mem
= replace_equiv_address_nv (mem
, addr
);
1136 mem
= avoid_constant_pool_reference (mem
);
1140 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1141 gen_int_mode (GET_CODE (loc
) == PRE_INC
1142 ? GET_MODE_SIZE (amd
->mem_mode
)
1143 : -GET_MODE_SIZE (amd
->mem_mode
),
1148 addr
= XEXP (loc
, 0);
1149 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
1150 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1151 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1152 gen_int_mode ((GET_CODE (loc
) == PRE_INC
1153 || GET_CODE (loc
) == POST_INC
)
1154 ? GET_MODE_SIZE (amd
->mem_mode
)
1155 : -GET_MODE_SIZE (amd
->mem_mode
),
1157 store_save
= amd
->store
;
1159 tem
= simplify_replace_fn_rtx (tem
, old_rtx
, adjust_mems
, data
);
1160 amd
->store
= store_save
;
1161 amd
->side_effects
= alloc_EXPR_LIST (0,
1162 gen_rtx_SET (XEXP (loc
, 0), tem
),
1166 addr
= XEXP (loc
, 1);
1169 addr
= XEXP (loc
, 0);
1170 gcc_assert (amd
->mem_mode
!= VOIDmode
);
1171 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1172 store_save
= amd
->store
;
1174 tem
= simplify_replace_fn_rtx (XEXP (loc
, 1), old_rtx
,
1176 amd
->store
= store_save
;
1177 amd
->side_effects
= alloc_EXPR_LIST (0,
1178 gen_rtx_SET (XEXP (loc
, 0), tem
),
1182 /* First try without delegitimization of whole MEMs and
1183 avoid_constant_pool_reference, which is more likely to succeed. */
1184 store_save
= amd
->store
;
1186 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
1188 amd
->store
= store_save
;
1189 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1190 if (mem
== SUBREG_REG (loc
))
1195 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
1196 GET_MODE (SUBREG_REG (loc
)),
1200 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
1201 GET_MODE (SUBREG_REG (loc
)),
1203 if (tem
== NULL_RTX
)
1204 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
1206 if (MAY_HAVE_DEBUG_INSNS
1207 && GET_CODE (tem
) == SUBREG
1208 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
1209 || GET_CODE (SUBREG_REG (tem
)) == MINUS
1210 || GET_CODE (SUBREG_REG (tem
)) == MULT
1211 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
1212 && (GET_MODE_CLASS (GET_MODE (tem
)) == MODE_INT
1213 || GET_MODE_CLASS (GET_MODE (tem
)) == MODE_PARTIAL_INT
)
1214 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_INT
1215 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_PARTIAL_INT
)
1216 && GET_MODE_PRECISION (GET_MODE (tem
))
1217 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem
)))
1218 && subreg_lowpart_p (tem
)
1219 && use_narrower_mode_test (SUBREG_REG (tem
), tem
))
1220 return use_narrower_mode (SUBREG_REG (tem
), GET_MODE (tem
),
1221 GET_MODE (SUBREG_REG (tem
)));
1224 /* Don't do any replacements in second and following
1225 ASM_OPERANDS of inline-asm with multiple sets.
1226 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1227 and ASM_OPERANDS_LABEL_VEC need to be equal between
1228 all the ASM_OPERANDs in the insn and adjust_insn will
1230 if (ASM_OPERANDS_OUTPUT_IDX (loc
) != 0)
1239 /* Helper function for replacement of uses. */
1242 adjust_mem_uses (rtx
*x
, void *data
)
1244 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
1246 validate_change (NULL_RTX
, x
, new_x
, true);
1249 /* Helper function for replacement of stores. */
1252 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
1256 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
1258 if (new_dest
!= SET_DEST (expr
))
1260 rtx xexpr
= CONST_CAST_RTX (expr
);
1261 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
1266 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1267 replace them with their value in the insn and add the side-effects
1268 as other sets to the insn. */
1271 adjust_insn (basic_block bb
, rtx_insn
*insn
)
1273 struct adjust_mem_data amd
;
1276 #ifdef HAVE_window_save
1277 /* If the target machine has an explicit window save instruction, the
1278 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1279 if (RTX_FRAME_RELATED_P (insn
)
1280 && find_reg_note (insn
, REG_CFA_WINDOW_SAVE
, NULL_RTX
))
1282 unsigned int i
, nregs
= vec_safe_length (windowed_parm_regs
);
1283 rtx rtl
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nregs
* 2));
1286 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs
, i
, p
)
1288 XVECEXP (rtl
, 0, i
* 2)
1289 = gen_rtx_SET (p
->incoming
, p
->outgoing
);
1290 /* Do not clobber the attached DECL, but only the REG. */
1291 XVECEXP (rtl
, 0, i
* 2 + 1)
1292 = gen_rtx_CLOBBER (GET_MODE (p
->outgoing
),
1293 gen_raw_REG (GET_MODE (p
->outgoing
),
1294 REGNO (p
->outgoing
)));
1297 validate_change (NULL_RTX
, &PATTERN (insn
), rtl
, true);
1302 amd
.mem_mode
= VOIDmode
;
1303 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
1304 amd
.side_effects
= NULL
;
1307 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
1310 if (GET_CODE (PATTERN (insn
)) == PARALLEL
1311 && asm_noperands (PATTERN (insn
)) > 0
1312 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1317 /* inline-asm with multiple sets is tiny bit more complicated,
1318 because the 3 vectors in ASM_OPERANDS need to be shared between
1319 all ASM_OPERANDS in the instruction. adjust_mems will
1320 not touch ASM_OPERANDS other than the first one, asm_noperands
1321 test above needs to be called before that (otherwise it would fail)
1322 and afterwards this code fixes it up. */
1323 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1324 body
= PATTERN (insn
);
1325 set0
= XVECEXP (body
, 0, 0);
1326 gcc_checking_assert (GET_CODE (set0
) == SET
1327 && GET_CODE (SET_SRC (set0
)) == ASM_OPERANDS
1328 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0
)) == 0);
1329 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
1330 if (GET_CODE (XVECEXP (body
, 0, i
)) != SET
)
1334 set
= XVECEXP (body
, 0, i
);
1335 gcc_checking_assert (GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
1336 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set
))
1338 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set
))
1339 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
))
1340 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set
))
1341 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
))
1342 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set
))
1343 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
)))
1345 rtx newsrc
= shallow_copy_rtx (SET_SRC (set
));
1346 ASM_OPERANDS_INPUT_VEC (newsrc
)
1347 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
));
1348 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc
)
1349 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
));
1350 ASM_OPERANDS_LABEL_VEC (newsrc
)
1351 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
));
1352 validate_change (NULL_RTX
, &SET_SRC (set
), newsrc
, true);
1357 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1359 /* For read-only MEMs containing some constant, prefer those
1361 set
= single_set (insn
);
1362 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
1364 rtx note
= find_reg_equal_equiv_note (insn
);
1366 if (note
&& CONSTANT_P (XEXP (note
, 0)))
1367 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
1370 if (amd
.side_effects
)
1372 rtx
*pat
, new_pat
, s
;
1375 pat
= &PATTERN (insn
);
1376 if (GET_CODE (*pat
) == COND_EXEC
)
1377 pat
= &COND_EXEC_CODE (*pat
);
1378 if (GET_CODE (*pat
) == PARALLEL
)
1379 oldn
= XVECLEN (*pat
, 0);
1382 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
1384 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
1385 if (GET_CODE (*pat
) == PARALLEL
)
1386 for (i
= 0; i
< oldn
; i
++)
1387 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
1389 XVECEXP (new_pat
, 0, 0) = *pat
;
1390 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
1391 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
1392 free_EXPR_LIST_list (&amd
.side_effects
);
1393 validate_change (NULL_RTX
, pat
, new_pat
, true);
1397 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1399 dv_as_rtx (decl_or_value dv
)
1403 if (dv_is_value_p (dv
))
1404 return dv_as_value (dv
);
1406 decl
= dv_as_decl (dv
);
1408 gcc_checking_assert (TREE_CODE (decl
) == DEBUG_EXPR_DECL
);
1409 return DECL_RTL_KNOWN_SET (decl
);
1412 /* Return nonzero if a decl_or_value must not have more than one
1413 variable part. The returned value discriminates among various
1414 kinds of one-part DVs ccording to enum onepart_enum. */
1415 static inline onepart_enum_t
1416 dv_onepart_p (decl_or_value dv
)
1420 if (!MAY_HAVE_DEBUG_INSNS
)
1423 if (dv_is_value_p (dv
))
1424 return ONEPART_VALUE
;
1426 decl
= dv_as_decl (dv
);
1428 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1429 return ONEPART_DEXPR
;
1431 if (target_for_debug_bind (decl
) != NULL_TREE
)
1432 return ONEPART_VDECL
;
1437 /* Return the variable pool to be used for a dv of type ONEPART. */
1438 static inline pool_allocator
<variable_def
> &
1439 onepart_pool (onepart_enum_t onepart
)
1441 return onepart
? valvar_pool
: var_pool
;
1444 /* Build a decl_or_value out of a decl. */
1445 static inline decl_or_value
1446 dv_from_decl (tree decl
)
1450 gcc_checking_assert (dv_is_decl_p (dv
));
1454 /* Build a decl_or_value out of a value. */
1455 static inline decl_or_value
1456 dv_from_value (rtx value
)
1460 gcc_checking_assert (dv_is_value_p (dv
));
1464 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1465 static inline decl_or_value
1470 switch (GET_CODE (x
))
1473 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
1474 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x
)) == x
);
1478 dv
= dv_from_value (x
);
1488 extern void debug_dv (decl_or_value dv
);
1491 debug_dv (decl_or_value dv
)
1493 if (dv_is_value_p (dv
))
1494 debug_rtx (dv_as_value (dv
));
1496 debug_generic_stmt (dv_as_decl (dv
));
1499 static void loc_exp_dep_clear (variable var
);
1501 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1504 variable_htab_free (void *elem
)
1507 variable var
= (variable
) elem
;
1508 location_chain node
, next
;
1510 gcc_checking_assert (var
->refcount
> 0);
1513 if (var
->refcount
> 0)
1516 for (i
= 0; i
< var
->n_var_parts
; i
++)
1518 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1523 var
->var_part
[i
].loc_chain
= NULL
;
1525 if (var
->onepart
&& VAR_LOC_1PAUX (var
))
1527 loc_exp_dep_clear (var
);
1528 if (VAR_LOC_DEP_LST (var
))
1529 VAR_LOC_DEP_LST (var
)->pprev
= NULL
;
1530 XDELETE (VAR_LOC_1PAUX (var
));
1531 /* These may be reused across functions, so reset
1533 if (var
->onepart
== ONEPART_DEXPR
)
1534 set_dv_changed (var
->dv
, true);
1536 onepart_pool (var
->onepart
).remove (var
);
1539 /* Initialize the set (array) SET of attrs to empty lists. */
1542 init_attrs_list_set (attrs
*set
)
1546 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1550 /* Make the list *LISTP empty. */
1553 attrs_list_clear (attrs
*listp
)
1557 for (list
= *listp
; list
; list
= next
)
1565 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1568 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1570 for (; list
; list
= list
->next
)
1571 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1576 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1579 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1580 HOST_WIDE_INT offset
, rtx loc
)
1582 attrs list
= new attrs_def
;
1585 list
->offset
= offset
;
1586 list
->next
= *listp
;
1590 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1593 attrs_list_copy (attrs
*dstp
, attrs src
)
1595 attrs_list_clear (dstp
);
1596 for (; src
; src
= src
->next
)
1598 attrs n
= new attrs_def
;
1601 n
->offset
= src
->offset
;
1607 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1610 attrs_list_union (attrs
*dstp
, attrs src
)
1612 for (; src
; src
= src
->next
)
1614 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1615 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1619 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1623 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1625 gcc_assert (!*dstp
);
1626 for (; src
; src
= src
->next
)
1628 if (!dv_onepart_p (src
->dv
))
1629 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1631 for (src
= src2
; src
; src
= src
->next
)
1633 if (!dv_onepart_p (src
->dv
)
1634 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1635 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1639 /* Shared hashtable support. */
1641 /* Return true if VARS is shared. */
1644 shared_hash_shared (shared_hash vars
)
1646 return vars
->refcount
> 1;
1649 /* Return the hash table for VARS. */
1651 static inline variable_table_type
*
1652 shared_hash_htab (shared_hash vars
)
1657 /* Return true if VAR is shared, or maybe because VARS is shared. */
1660 shared_var_p (variable var
, shared_hash vars
)
1662 /* Don't count an entry in the changed_variables table as a duplicate. */
1663 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1664 || shared_hash_shared (vars
));
1667 /* Copy variables into a new hash table. */
1670 shared_hash_unshare (shared_hash vars
)
1672 shared_hash new_vars
= new shared_hash_def
;
1673 gcc_assert (vars
->refcount
> 1);
1674 new_vars
->refcount
= 1;
1675 new_vars
->htab
= new variable_table_type (vars
->htab
->elements () + 3);
1676 vars_copy (new_vars
->htab
, vars
->htab
);
1681 /* Increment reference counter on VARS and return it. */
1683 static inline shared_hash
1684 shared_hash_copy (shared_hash vars
)
1690 /* Decrement reference counter and destroy hash table if not shared
1694 shared_hash_destroy (shared_hash vars
)
1696 gcc_checking_assert (vars
->refcount
> 0);
1697 if (--vars
->refcount
== 0)
1704 /* Unshare *PVARS if shared and return slot for DV. If INS is
1705 INSERT, insert it if not already present. */
1707 static inline variable_def
**
1708 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1709 hashval_t dvhash
, enum insert_option ins
)
1711 if (shared_hash_shared (*pvars
))
1712 *pvars
= shared_hash_unshare (*pvars
);
1713 return shared_hash_htab (*pvars
)->find_slot_with_hash (dv
, dvhash
, ins
);
1716 static inline variable_def
**
1717 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1718 enum insert_option ins
)
1720 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1723 /* Return slot for DV, if it is already present in the hash table.
1724 If it is not present, insert it only VARS is not shared, otherwise
1727 static inline variable_def
**
1728 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1730 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
,
1731 shared_hash_shared (vars
)
1732 ? NO_INSERT
: INSERT
);
1735 static inline variable_def
**
1736 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1738 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1741 /* Return slot for DV only if it is already present in the hash table. */
1743 static inline variable_def
**
1744 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1747 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
, NO_INSERT
);
1750 static inline variable_def
**
1751 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1753 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1756 /* Return variable for DV or NULL if not already present in the hash
1759 static inline variable
1760 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1762 return shared_hash_htab (vars
)->find_with_hash (dv
, dvhash
);
1765 static inline variable
1766 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1768 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1771 /* Return true if TVAL is better than CVAL as a canonival value. We
1772 choose lowest-numbered VALUEs, using the RTX address as a
1773 tie-breaker. The idea is to arrange them into a star topology,
1774 such that all of them are at most one step away from the canonical
1775 value, and the canonical value has backlinks to all of them, in
1776 addition to all the actual locations. We don't enforce this
1777 topology throughout the entire dataflow analysis, though.
1781 canon_value_cmp (rtx tval
, rtx cval
)
1784 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1787 static bool dst_can_be_shared
;
1789 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1791 static variable_def
**
1792 unshare_variable (dataflow_set
*set
, variable_def
**slot
, variable var
,
1793 enum var_init_status initialized
)
1798 new_var
= onepart_pool (var
->onepart
).allocate ();
1799 new_var
->dv
= var
->dv
;
1800 new_var
->refcount
= 1;
1802 new_var
->n_var_parts
= var
->n_var_parts
;
1803 new_var
->onepart
= var
->onepart
;
1804 new_var
->in_changed_variables
= false;
1806 if (! flag_var_tracking_uninit
)
1807 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1809 for (i
= 0; i
< var
->n_var_parts
; i
++)
1811 location_chain node
;
1812 location_chain
*nextp
;
1814 if (i
== 0 && var
->onepart
)
1816 /* One-part auxiliary data is only used while emitting
1817 notes, so propagate it to the new variable in the active
1818 dataflow set. If we're not emitting notes, this will be
1820 gcc_checking_assert (!VAR_LOC_1PAUX (var
) || emit_notes
);
1821 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (var
);
1822 VAR_LOC_1PAUX (var
) = NULL
;
1825 VAR_PART_OFFSET (new_var
, i
) = VAR_PART_OFFSET (var
, i
);
1826 nextp
= &new_var
->var_part
[i
].loc_chain
;
1827 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1829 location_chain new_lc
;
1831 new_lc
= new location_chain_def
;
1832 new_lc
->next
= NULL
;
1833 if (node
->init
> initialized
)
1834 new_lc
->init
= node
->init
;
1836 new_lc
->init
= initialized
;
1837 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1838 new_lc
->set_src
= node
->set_src
;
1840 new_lc
->set_src
= NULL
;
1841 new_lc
->loc
= node
->loc
;
1844 nextp
= &new_lc
->next
;
1847 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1850 dst_can_be_shared
= false;
1851 if (shared_hash_shared (set
->vars
))
1852 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1853 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1854 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1856 if (var
->in_changed_variables
)
1858 variable_def
**cslot
1859 = changed_variables
->find_slot_with_hash (var
->dv
,
1860 dv_htab_hash (var
->dv
),
1862 gcc_assert (*cslot
== (void *) var
);
1863 var
->in_changed_variables
= false;
1864 variable_htab_free (var
);
1866 new_var
->in_changed_variables
= true;
1871 /* Copy all variables from hash table SRC to hash table DST. */
1874 vars_copy (variable_table_type
*dst
, variable_table_type
*src
)
1876 variable_iterator_type hi
;
1879 FOR_EACH_HASH_TABLE_ELEMENT (*src
, var
, variable
, hi
)
1881 variable_def
**dstp
;
1883 dstp
= dst
->find_slot_with_hash (var
->dv
, dv_htab_hash (var
->dv
),
1889 /* Map a decl to its main debug decl. */
1892 var_debug_decl (tree decl
)
1894 if (decl
&& TREE_CODE (decl
) == VAR_DECL
1895 && DECL_HAS_DEBUG_EXPR_P (decl
))
1897 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1898 if (DECL_P (debugdecl
))
1905 /* Set the register LOC to contain DV, OFFSET. */
1908 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1909 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1910 enum insert_option iopt
)
1913 bool decl_p
= dv_is_decl_p (dv
);
1916 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1918 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1919 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1920 && node
->offset
== offset
)
1923 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1924 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1927 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1930 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1933 tree decl
= REG_EXPR (loc
);
1934 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1936 var_reg_decl_set (set
, loc
, initialized
,
1937 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1940 static enum var_init_status
1941 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1945 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1947 if (! flag_var_tracking_uninit
)
1948 return VAR_INIT_STATUS_INITIALIZED
;
1950 var
= shared_hash_find (set
->vars
, dv
);
1953 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1955 location_chain nextp
;
1956 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1957 if (rtx_equal_p (nextp
->loc
, loc
))
1959 ret_val
= nextp
->init
;
1968 /* Delete current content of register LOC in dataflow set SET and set
1969 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1970 MODIFY is true, any other live copies of the same variable part are
1971 also deleted from the dataflow set, otherwise the variable part is
1972 assumed to be copied from another location holding the same
1976 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1977 enum var_init_status initialized
, rtx set_src
)
1979 tree decl
= REG_EXPR (loc
);
1980 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1984 decl
= var_debug_decl (decl
);
1986 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1987 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1989 nextp
= &set
->regs
[REGNO (loc
)];
1990 for (node
= *nextp
; node
; node
= next
)
1993 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1995 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
2002 nextp
= &node
->next
;
2006 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
2007 var_reg_set (set
, loc
, initialized
, set_src
);
2010 /* Delete the association of register LOC in dataflow set SET with any
2011 variables that aren't onepart. If CLOBBER is true, also delete any
2012 other live copies of the same variable part, and delete the
2013 association with onepart dvs too. */
2016 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2018 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
2023 tree decl
= REG_EXPR (loc
);
2024 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
2026 decl
= var_debug_decl (decl
);
2028 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2031 for (node
= *nextp
; node
; node
= next
)
2034 if (clobber
|| !dv_onepart_p (node
->dv
))
2036 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
2041 nextp
= &node
->next
;
2045 /* Delete content of register with number REGNO in dataflow set SET. */
2048 var_regno_delete (dataflow_set
*set
, int regno
)
2050 attrs
*reg
= &set
->regs
[regno
];
2053 for (node
= *reg
; node
; node
= next
)
2056 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
2062 /* Return true if I is the negated value of a power of two. */
2064 negative_power_of_two_p (HOST_WIDE_INT i
)
2066 unsigned HOST_WIDE_INT x
= -(unsigned HOST_WIDE_INT
)i
;
2067 return x
== (x
& -x
);
2070 /* Strip constant offsets and alignments off of LOC. Return the base
2074 vt_get_canonicalize_base (rtx loc
)
2076 while ((GET_CODE (loc
) == PLUS
2077 || GET_CODE (loc
) == AND
)
2078 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2079 && (GET_CODE (loc
) != AND
2080 || negative_power_of_two_p (INTVAL (XEXP (loc
, 1)))))
2081 loc
= XEXP (loc
, 0);
2086 /* This caches canonicalized addresses for VALUEs, computed using
2087 information in the global cselib table. */
2088 static hash_map
<rtx
, rtx
> *global_get_addr_cache
;
2090 /* This caches canonicalized addresses for VALUEs, computed using
2091 information from the global cache and information pertaining to a
2092 basic block being analyzed. */
2093 static hash_map
<rtx
, rtx
> *local_get_addr_cache
;
2095 static rtx
vt_canonicalize_addr (dataflow_set
*, rtx
);
2097 /* Return the canonical address for LOC, that must be a VALUE, using a
2098 cached global equivalence or computing it and storing it in the
2102 get_addr_from_global_cache (rtx
const loc
)
2106 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2109 rtx
*slot
= &global_get_addr_cache
->get_or_insert (loc
, &existed
);
2113 x
= canon_rtx (get_addr (loc
));
2115 /* Tentative, avoiding infinite recursion. */
2120 rtx nx
= vt_canonicalize_addr (NULL
, x
);
2123 /* The table may have moved during recursion, recompute
2125 *global_get_addr_cache
->get (loc
) = x
= nx
;
2132 /* Return the canonical address for LOC, that must be a VALUE, using a
2133 cached local equivalence or computing it and storing it in the
2137 get_addr_from_local_cache (dataflow_set
*set
, rtx
const loc
)
2144 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2147 rtx
*slot
= &local_get_addr_cache
->get_or_insert (loc
, &existed
);
2151 x
= get_addr_from_global_cache (loc
);
2153 /* Tentative, avoiding infinite recursion. */
2156 /* Recurse to cache local expansion of X, or if we need to search
2157 for a VALUE in the expansion. */
2160 rtx nx
= vt_canonicalize_addr (set
, x
);
2163 slot
= local_get_addr_cache
->get (loc
);
2169 dv
= dv_from_rtx (x
);
2170 var
= shared_hash_find (set
->vars
, dv
);
2174 /* Look for an improved equivalent expression. */
2175 for (l
= var
->var_part
[0].loc_chain
; l
; l
= l
->next
)
2177 rtx base
= vt_get_canonicalize_base (l
->loc
);
2178 if (GET_CODE (base
) == VALUE
2179 && canon_value_cmp (base
, loc
))
2181 rtx nx
= vt_canonicalize_addr (set
, l
->loc
);
2184 slot
= local_get_addr_cache
->get (loc
);
2194 /* Canonicalize LOC using equivalences from SET in addition to those
2195 in the cselib static table. It expects a VALUE-based expression,
2196 and it will only substitute VALUEs with other VALUEs or
2197 function-global equivalences, so that, if two addresses have base
2198 VALUEs that are locally or globally related in ways that
2199 memrefs_conflict_p cares about, they will both canonicalize to
2200 expressions that have the same base VALUE.
2202 The use of VALUEs as canonical base addresses enables the canonical
2203 RTXs to remain unchanged globally, if they resolve to a constant,
2204 or throughout a basic block otherwise, so that they can be cached
2205 and the cache needs not be invalidated when REGs, MEMs or such
2209 vt_canonicalize_addr (dataflow_set
*set
, rtx oloc
)
2211 HOST_WIDE_INT ofst
= 0;
2212 machine_mode mode
= GET_MODE (oloc
);
2219 while (GET_CODE (loc
) == PLUS
2220 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2222 ofst
+= INTVAL (XEXP (loc
, 1));
2223 loc
= XEXP (loc
, 0);
2226 /* Alignment operations can't normally be combined, so just
2227 canonicalize the base and we're done. We'll normally have
2228 only one stack alignment anyway. */
2229 if (GET_CODE (loc
) == AND
2230 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2231 && negative_power_of_two_p (INTVAL (XEXP (loc
, 1))))
2233 x
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2234 if (x
!= XEXP (loc
, 0))
2235 loc
= gen_rtx_AND (mode
, x
, XEXP (loc
, 1));
2239 if (GET_CODE (loc
) == VALUE
)
2242 loc
= get_addr_from_local_cache (set
, loc
);
2244 loc
= get_addr_from_global_cache (loc
);
2246 /* Consolidate plus_constants. */
2247 while (ofst
&& GET_CODE (loc
) == PLUS
2248 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2250 ofst
+= INTVAL (XEXP (loc
, 1));
2251 loc
= XEXP (loc
, 0);
2258 x
= canon_rtx (loc
);
2265 /* Add OFST back in. */
2268 /* Don't build new RTL if we can help it. */
2269 if (GET_CODE (oloc
) == PLUS
2270 && XEXP (oloc
, 0) == loc
2271 && INTVAL (XEXP (oloc
, 1)) == ofst
)
2274 loc
= plus_constant (mode
, loc
, ofst
);
2280 /* Return true iff there's a true dependence between MLOC and LOC.
2281 MADDR must be a canonicalized version of MLOC's address. */
2284 vt_canon_true_dep (dataflow_set
*set
, rtx mloc
, rtx maddr
, rtx loc
)
2286 if (GET_CODE (loc
) != MEM
)
2289 rtx addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2290 if (!canon_true_dependence (mloc
, GET_MODE (mloc
), maddr
, loc
, addr
))
2296 /* Hold parameters for the hashtab traversal function
2297 drop_overlapping_mem_locs, see below. */
2299 struct overlapping_mems
2305 /* Remove all MEMs that overlap with COMS->LOC from the location list
2306 of a hash table entry for a value. COMS->ADDR must be a
2307 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2308 canonicalized itself. */
2311 drop_overlapping_mem_locs (variable_def
**slot
, overlapping_mems
*coms
)
2313 dataflow_set
*set
= coms
->set
;
2314 rtx mloc
= coms
->loc
, addr
= coms
->addr
;
2315 variable var
= *slot
;
2317 if (var
->onepart
== ONEPART_VALUE
)
2319 location_chain loc
, *locp
;
2320 bool changed
= false;
2323 gcc_assert (var
->n_var_parts
== 1);
2325 if (shared_var_p (var
, set
->vars
))
2327 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
2328 if (vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2334 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
2336 gcc_assert (var
->n_var_parts
== 1);
2339 if (VAR_LOC_1PAUX (var
))
2340 cur_loc
= VAR_LOC_FROM (var
);
2342 cur_loc
= var
->var_part
[0].cur_loc
;
2344 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
2347 if (!vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2354 /* If we have deleted the location which was last emitted
2355 we have to emit new location so add the variable to set
2356 of changed variables. */
2357 if (cur_loc
== loc
->loc
)
2360 var
->var_part
[0].cur_loc
= NULL
;
2361 if (VAR_LOC_1PAUX (var
))
2362 VAR_LOC_FROM (var
) = NULL
;
2367 if (!var
->var_part
[0].loc_chain
)
2373 variable_was_changed (var
, set
);
2379 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2382 clobber_overlapping_mems (dataflow_set
*set
, rtx loc
)
2384 struct overlapping_mems coms
;
2386 gcc_checking_assert (GET_CODE (loc
) == MEM
);
2389 coms
.loc
= canon_rtx (loc
);
2390 coms
.addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2392 set
->traversed_vars
= set
->vars
;
2393 shared_hash_htab (set
->vars
)
2394 ->traverse
<overlapping_mems
*, drop_overlapping_mem_locs
> (&coms
);
2395 set
->traversed_vars
= NULL
;
2398 /* Set the location of DV, OFFSET as the MEM LOC. */
2401 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2402 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
2403 enum insert_option iopt
)
2405 if (dv_is_decl_p (dv
))
2406 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
2408 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
2411 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2413 Adjust the address first if it is stack pointer based. */
2416 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2419 tree decl
= MEM_EXPR (loc
);
2420 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2422 var_mem_decl_set (set
, loc
, initialized
,
2423 dv_from_decl (decl
), offset
, set_src
, INSERT
);
2426 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2427 dataflow set SET to LOC. If MODIFY is true, any other live copies
2428 of the same variable part are also deleted from the dataflow set,
2429 otherwise the variable part is assumed to be copied from another
2430 location holding the same part.
2431 Adjust the address first if it is stack pointer based. */
2434 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
2435 enum var_init_status initialized
, rtx set_src
)
2437 tree decl
= MEM_EXPR (loc
);
2438 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2440 clobber_overlapping_mems (set
, loc
);
2441 decl
= var_debug_decl (decl
);
2443 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
2444 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
2447 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
2448 var_mem_set (set
, loc
, initialized
, set_src
);
2451 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2452 true, also delete any other live copies of the same variable part.
2453 Adjust the address first if it is stack pointer based. */
2456 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2458 tree decl
= MEM_EXPR (loc
);
2459 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2461 clobber_overlapping_mems (set
, loc
);
2462 decl
= var_debug_decl (decl
);
2464 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2465 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
2468 /* Return true if LOC should not be expanded for location expressions,
2472 unsuitable_loc (rtx loc
)
2474 switch (GET_CODE (loc
))
2488 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2492 val_bind (dataflow_set
*set
, rtx val
, rtx loc
, bool modified
)
2497 var_regno_delete (set
, REGNO (loc
));
2498 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2499 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2501 else if (MEM_P (loc
))
2503 struct elt_loc_list
*l
= CSELIB_VAL_PTR (val
)->locs
;
2506 clobber_overlapping_mems (set
, loc
);
2508 if (l
&& GET_CODE (l
->loc
) == VALUE
)
2509 l
= canonical_cselib_val (CSELIB_VAL_PTR (l
->loc
))->locs
;
2511 /* If this MEM is a global constant, we don't need it in the
2512 dynamic tables. ??? We should test this before emitting the
2513 micro-op in the first place. */
2515 if (GET_CODE (l
->loc
) == MEM
&& XEXP (l
->loc
, 0) == XEXP (loc
, 0))
2521 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2522 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2526 /* Other kinds of equivalences are necessarily static, at least
2527 so long as we do not perform substitutions while merging
2530 set_variable_part (set
, loc
, dv_from_value (val
), 0,
2531 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2535 /* Bind a value to a location it was just stored in. If MODIFIED
2536 holds, assume the location was modified, detaching it from any
2537 values bound to it. */
2540 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
,
2543 cselib_val
*v
= CSELIB_VAL_PTR (val
);
2545 gcc_assert (cselib_preserved_value_p (v
));
2549 fprintf (dump_file
, "%i: ", insn
? INSN_UID (insn
) : 0);
2550 print_inline_rtx (dump_file
, loc
, 0);
2551 fprintf (dump_file
, " evaluates to ");
2552 print_inline_rtx (dump_file
, val
, 0);
2555 struct elt_loc_list
*l
;
2556 for (l
= v
->locs
; l
; l
= l
->next
)
2558 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
2559 print_inline_rtx (dump_file
, l
->loc
, 0);
2562 fprintf (dump_file
, "\n");
2565 gcc_checking_assert (!unsuitable_loc (loc
));
2567 val_bind (set
, val
, loc
, modified
);
2570 /* Clear (canonical address) slots that reference X. */
2573 local_get_addr_clear_given_value (rtx
const &, rtx
*slot
, rtx x
)
2575 if (vt_get_canonicalize_base (*slot
) == x
)
2580 /* Reset this node, detaching all its equivalences. Return the slot
2581 in the variable hash table that holds dv, if there is one. */
2584 val_reset (dataflow_set
*set
, decl_or_value dv
)
2586 variable var
= shared_hash_find (set
->vars
, dv
) ;
2587 location_chain node
;
2590 if (!var
|| !var
->n_var_parts
)
2593 gcc_assert (var
->n_var_parts
== 1);
2595 if (var
->onepart
== ONEPART_VALUE
)
2597 rtx x
= dv_as_value (dv
);
2599 /* Relationships in the global cache don't change, so reset the
2600 local cache entry only. */
2601 rtx
*slot
= local_get_addr_cache
->get (x
);
2604 /* If the value resolved back to itself, odds are that other
2605 values may have cached it too. These entries now refer
2606 to the old X, so detach them too. Entries that used the
2607 old X but resolved to something else remain ok as long as
2608 that something else isn't also reset. */
2610 local_get_addr_cache
2611 ->traverse
<rtx
, local_get_addr_clear_given_value
> (x
);
2617 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2618 if (GET_CODE (node
->loc
) == VALUE
2619 && canon_value_cmp (node
->loc
, cval
))
2622 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2623 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
2625 /* Redirect the equivalence link to the new canonical
2626 value, or simply remove it if it would point at
2629 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
2630 0, node
->init
, node
->set_src
, NO_INSERT
);
2631 delete_variable_part (set
, dv_as_value (dv
),
2632 dv_from_value (node
->loc
), 0);
2637 decl_or_value cdv
= dv_from_value (cval
);
2639 /* Keep the remaining values connected, accummulating links
2640 in the canonical value. */
2641 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2643 if (node
->loc
== cval
)
2645 else if (GET_CODE (node
->loc
) == REG
)
2646 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2647 node
->set_src
, NO_INSERT
);
2648 else if (GET_CODE (node
->loc
) == MEM
)
2649 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2650 node
->set_src
, NO_INSERT
);
2652 set_variable_part (set
, node
->loc
, cdv
, 0,
2653 node
->init
, node
->set_src
, NO_INSERT
);
2657 /* We remove this last, to make sure that the canonical value is not
2658 removed to the point of requiring reinsertion. */
2660 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
2662 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
2665 /* Find the values in a given location and map the val to another
2666 value, if it is unique, or add the location as one holding the
2670 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
)
2672 decl_or_value dv
= dv_from_value (val
);
2674 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2677 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
2679 fprintf (dump_file
, "head: ");
2680 print_inline_rtx (dump_file
, val
, 0);
2681 fputs (" is at ", dump_file
);
2682 print_inline_rtx (dump_file
, loc
, 0);
2683 fputc ('\n', dump_file
);
2686 val_reset (set
, dv
);
2688 gcc_checking_assert (!unsuitable_loc (loc
));
2692 attrs node
, found
= NULL
;
2694 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
2695 if (dv_is_value_p (node
->dv
)
2696 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
2700 /* Map incoming equivalences. ??? Wouldn't it be nice if
2701 we just started sharing the location lists? Maybe a
2702 circular list ending at the value itself or some
2704 set_variable_part (set
, dv_as_value (node
->dv
),
2705 dv_from_value (val
), node
->offset
,
2706 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2707 set_variable_part (set
, val
, node
->dv
, node
->offset
,
2708 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2711 /* If we didn't find any equivalence, we need to remember that
2712 this value is held in the named register. */
2716 /* ??? Attempt to find and merge equivalent MEMs or other
2719 val_bind (set
, val
, loc
, false);
2722 /* Initialize dataflow set SET to be empty.
2723 VARS_SIZE is the initial size of hash table VARS. */
2726 dataflow_set_init (dataflow_set
*set
)
2728 init_attrs_list_set (set
->regs
);
2729 set
->vars
= shared_hash_copy (empty_shared_hash
);
2730 set
->stack_adjust
= 0;
2731 set
->traversed_vars
= NULL
;
2734 /* Delete the contents of dataflow set SET. */
2737 dataflow_set_clear (dataflow_set
*set
)
2741 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2742 attrs_list_clear (&set
->regs
[i
]);
2744 shared_hash_destroy (set
->vars
);
2745 set
->vars
= shared_hash_copy (empty_shared_hash
);
2748 /* Copy the contents of dataflow set SRC to DST. */
2751 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2755 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2756 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2758 shared_hash_destroy (dst
->vars
);
2759 dst
->vars
= shared_hash_copy (src
->vars
);
2760 dst
->stack_adjust
= src
->stack_adjust
;
2763 /* Information for merging lists of locations for a given offset of variable.
2765 struct variable_union_info
2767 /* Node of the location chain. */
2770 /* The sum of positions in the input chains. */
2773 /* The position in the chain of DST dataflow set. */
2777 /* Buffer for location list sorting and its allocated size. */
2778 static struct variable_union_info
*vui_vec
;
2779 static int vui_allocated
;
2781 /* Compare function for qsort, order the structures by POS element. */
2784 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2786 const struct variable_union_info
*const i1
=
2787 (const struct variable_union_info
*) n1
;
2788 const struct variable_union_info
*const i2
=
2789 ( const struct variable_union_info
*) n2
;
2791 if (i1
->pos
!= i2
->pos
)
2792 return i1
->pos
- i2
->pos
;
2794 return (i1
->pos_dst
- i2
->pos_dst
);
2797 /* Compute union of location parts of variable *SLOT and the same variable
2798 from hash table DATA. Compute "sorted" union of the location chains
2799 for common offsets, i.e. the locations of a variable part are sorted by
2800 a priority where the priority is the sum of the positions in the 2 chains
2801 (if a location is only in one list the position in the second list is
2802 defined to be larger than the length of the chains).
2803 When we are updating the location parts the newest location is in the
2804 beginning of the chain, so when we do the described "sorted" union
2805 we keep the newest locations in the beginning. */
2808 variable_union (variable src
, dataflow_set
*set
)
2811 variable_def
**dstp
;
2814 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2815 if (!dstp
|| !*dstp
)
2819 dst_can_be_shared
= false;
2821 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2825 /* Continue traversing the hash table. */
2831 gcc_assert (src
->n_var_parts
);
2832 gcc_checking_assert (src
->onepart
== dst
->onepart
);
2834 /* We can combine one-part variables very efficiently, because their
2835 entries are in canonical order. */
2838 location_chain
*nodep
, dnode
, snode
;
2840 gcc_assert (src
->n_var_parts
== 1
2841 && dst
->n_var_parts
== 1);
2843 snode
= src
->var_part
[0].loc_chain
;
2846 restart_onepart_unshared
:
2847 nodep
= &dst
->var_part
[0].loc_chain
;
2853 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2857 location_chain nnode
;
2859 if (shared_var_p (dst
, set
->vars
))
2861 dstp
= unshare_variable (set
, dstp
, dst
,
2862 VAR_INIT_STATUS_INITIALIZED
);
2864 goto restart_onepart_unshared
;
2867 *nodep
= nnode
= new location_chain_def
;
2868 nnode
->loc
= snode
->loc
;
2869 nnode
->init
= snode
->init
;
2870 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2871 nnode
->set_src
= NULL
;
2873 nnode
->set_src
= snode
->set_src
;
2874 nnode
->next
= dnode
;
2878 gcc_checking_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2881 snode
= snode
->next
;
2883 nodep
= &dnode
->next
;
2890 gcc_checking_assert (!src
->onepart
);
2892 /* Count the number of location parts, result is K. */
2893 for (i
= 0, j
= 0, k
= 0;
2894 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2896 if (VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2901 else if (VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
2906 k
+= src
->n_var_parts
- i
;
2907 k
+= dst
->n_var_parts
- j
;
2909 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2910 thus there are at most MAX_VAR_PARTS different offsets. */
2911 gcc_checking_assert (dst
->onepart
? k
== 1 : k
<= MAX_VAR_PARTS
);
2913 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2915 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2919 i
= src
->n_var_parts
- 1;
2920 j
= dst
->n_var_parts
- 1;
2921 dst
->n_var_parts
= k
;
2923 for (k
--; k
>= 0; k
--)
2925 location_chain node
, node2
;
2927 if (i
>= 0 && j
>= 0
2928 && VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2930 /* Compute the "sorted" union of the chains, i.e. the locations which
2931 are in both chains go first, they are sorted by the sum of
2932 positions in the chains. */
2935 struct variable_union_info
*vui
;
2937 /* If DST is shared compare the location chains.
2938 If they are different we will modify the chain in DST with
2939 high probability so make a copy of DST. */
2940 if (shared_var_p (dst
, set
->vars
))
2942 for (node
= src
->var_part
[i
].loc_chain
,
2943 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2944 node
= node
->next
, node2
= node2
->next
)
2946 if (!((REG_P (node2
->loc
)
2947 && REG_P (node
->loc
)
2948 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2949 || rtx_equal_p (node2
->loc
, node
->loc
)))
2951 if (node2
->init
< node
->init
)
2952 node2
->init
= node
->init
;
2958 dstp
= unshare_variable (set
, dstp
, dst
,
2959 VAR_INIT_STATUS_UNKNOWN
);
2960 dst
= (variable
)*dstp
;
2965 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2968 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2973 /* The most common case, much simpler, no qsort is needed. */
2974 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2975 dst
->var_part
[k
].loc_chain
= dstnode
;
2976 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
2978 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2979 if (!((REG_P (dstnode
->loc
)
2980 && REG_P (node
->loc
)
2981 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2982 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2984 location_chain new_node
;
2986 /* Copy the location from SRC. */
2987 new_node
= new location_chain_def
;
2988 new_node
->loc
= node
->loc
;
2989 new_node
->init
= node
->init
;
2990 if (!node
->set_src
|| MEM_P (node
->set_src
))
2991 new_node
->set_src
= NULL
;
2993 new_node
->set_src
= node
->set_src
;
2994 node2
->next
= new_node
;
3001 if (src_l
+ dst_l
> vui_allocated
)
3003 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
3004 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
3009 /* Fill in the locations from DST. */
3010 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
3011 node
= node
->next
, jj
++)
3014 vui
[jj
].pos_dst
= jj
;
3016 /* Pos plus value larger than a sum of 2 valid positions. */
3017 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
3020 /* Fill in the locations from SRC. */
3022 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
3023 node
= node
->next
, ii
++)
3025 /* Find location from NODE. */
3026 for (jj
= 0; jj
< dst_l
; jj
++)
3028 if ((REG_P (vui
[jj
].lc
->loc
)
3029 && REG_P (node
->loc
)
3030 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
3031 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
3033 vui
[jj
].pos
= jj
+ ii
;
3037 if (jj
>= dst_l
) /* The location has not been found. */
3039 location_chain new_node
;
3041 /* Copy the location from SRC. */
3042 new_node
= new location_chain_def
;
3043 new_node
->loc
= node
->loc
;
3044 new_node
->init
= node
->init
;
3045 if (!node
->set_src
|| MEM_P (node
->set_src
))
3046 new_node
->set_src
= NULL
;
3048 new_node
->set_src
= node
->set_src
;
3049 vui
[n
].lc
= new_node
;
3050 vui
[n
].pos_dst
= src_l
+ dst_l
;
3051 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
3058 /* Special case still very common case. For dst_l == 2
3059 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3060 vui[i].pos == i + src_l + dst_l. */
3061 if (vui
[0].pos
> vui
[1].pos
)
3063 /* Order should be 1, 0, 2... */
3064 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
3065 vui
[1].lc
->next
= vui
[0].lc
;
3068 vui
[0].lc
->next
= vui
[2].lc
;
3069 vui
[n
- 1].lc
->next
= NULL
;
3072 vui
[0].lc
->next
= NULL
;
3077 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3078 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
3080 /* Order should be 0, 2, 1, 3... */
3081 vui
[0].lc
->next
= vui
[2].lc
;
3082 vui
[2].lc
->next
= vui
[1].lc
;
3085 vui
[1].lc
->next
= vui
[3].lc
;
3086 vui
[n
- 1].lc
->next
= NULL
;
3089 vui
[1].lc
->next
= NULL
;
3094 /* Order should be 0, 1, 2... */
3096 vui
[n
- 1].lc
->next
= NULL
;
3099 for (; ii
< n
; ii
++)
3100 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3104 qsort (vui
, n
, sizeof (struct variable_union_info
),
3105 variable_union_info_cmp_pos
);
3107 /* Reconnect the nodes in sorted order. */
3108 for (ii
= 1; ii
< n
; ii
++)
3109 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3110 vui
[n
- 1].lc
->next
= NULL
;
3111 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3114 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
3119 else if ((i
>= 0 && j
>= 0
3120 && VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
3123 dst
->var_part
[k
] = dst
->var_part
[j
];
3126 else if ((i
>= 0 && j
>= 0
3127 && VAR_PART_OFFSET (src
, i
) > VAR_PART_OFFSET (dst
, j
))
3130 location_chain
*nextp
;
3132 /* Copy the chain from SRC. */
3133 nextp
= &dst
->var_part
[k
].loc_chain
;
3134 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3136 location_chain new_lc
;
3138 new_lc
= new location_chain_def
;
3139 new_lc
->next
= NULL
;
3140 new_lc
->init
= node
->init
;
3141 if (!node
->set_src
|| MEM_P (node
->set_src
))
3142 new_lc
->set_src
= NULL
;
3144 new_lc
->set_src
= node
->set_src
;
3145 new_lc
->loc
= node
->loc
;
3148 nextp
= &new_lc
->next
;
3151 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (src
, i
);
3154 dst
->var_part
[k
].cur_loc
= NULL
;
3157 if (flag_var_tracking_uninit
)
3158 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
3160 location_chain node
, node2
;
3161 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3162 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
3163 if (rtx_equal_p (node
->loc
, node2
->loc
))
3165 if (node
->init
> node2
->init
)
3166 node2
->init
= node
->init
;
3170 /* Continue traversing the hash table. */
3174 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3177 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
3181 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3182 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
3184 if (dst
->vars
== empty_shared_hash
)
3186 shared_hash_destroy (dst
->vars
);
3187 dst
->vars
= shared_hash_copy (src
->vars
);
3191 variable_iterator_type hi
;
3194 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src
->vars
),
3196 variable_union (var
, dst
);
3200 /* Whether the value is currently being expanded. */
3201 #define VALUE_RECURSED_INTO(x) \
3202 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3204 /* Whether no expansion was found, saving useless lookups.
3205 It must only be set when VALUE_CHANGED is clear. */
3206 #define NO_LOC_P(x) \
3207 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3209 /* Whether cur_loc in the value needs to be (re)computed. */
3210 #define VALUE_CHANGED(x) \
3211 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3212 /* Whether cur_loc in the decl needs to be (re)computed. */
3213 #define DECL_CHANGED(x) TREE_VISITED (x)
3215 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3216 user DECLs, this means they're in changed_variables. Values and
3217 debug exprs may be left with this flag set if no user variable
3218 requires them to be evaluated. */
3221 set_dv_changed (decl_or_value dv
, bool newv
)
3223 switch (dv_onepart_p (dv
))
3227 NO_LOC_P (dv_as_value (dv
)) = false;
3228 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
3233 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv
))) = false;
3234 /* Fall through... */
3237 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
3242 /* Return true if DV needs to have its cur_loc recomputed. */
3245 dv_changed_p (decl_or_value dv
)
3247 return (dv_is_value_p (dv
)
3248 ? VALUE_CHANGED (dv_as_value (dv
))
3249 : DECL_CHANGED (dv_as_decl (dv
)));
3252 /* Return a location list node whose loc is rtx_equal to LOC, in the
3253 location list of a one-part variable or value VAR, or in that of
3254 any values recursively mentioned in the location lists. VARS must
3255 be in star-canonical form. */
3257 static location_chain
3258 find_loc_in_1pdv (rtx loc
, variable var
, variable_table_type
*vars
)
3260 location_chain node
;
3261 enum rtx_code loc_code
;
3266 gcc_checking_assert (var
->onepart
);
3268 if (!var
->n_var_parts
)
3271 gcc_checking_assert (loc
!= dv_as_opaque (var
->dv
));
3273 loc_code
= GET_CODE (loc
);
3274 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3279 if (GET_CODE (node
->loc
) != loc_code
)
3281 if (GET_CODE (node
->loc
) != VALUE
)
3284 else if (loc
== node
->loc
)
3286 else if (loc_code
!= VALUE
)
3288 if (rtx_equal_p (loc
, node
->loc
))
3293 /* Since we're in star-canonical form, we don't need to visit
3294 non-canonical nodes: one-part variables and non-canonical
3295 values would only point back to the canonical node. */
3296 if (dv_is_value_p (var
->dv
)
3297 && !canon_value_cmp (node
->loc
, dv_as_value (var
->dv
)))
3299 /* Skip all subsequent VALUEs. */
3300 while (node
->next
&& GET_CODE (node
->next
->loc
) == VALUE
)
3303 gcc_checking_assert (!canon_value_cmp (node
->loc
,
3304 dv_as_value (var
->dv
)));
3305 if (loc
== node
->loc
)
3311 gcc_checking_assert (node
== var
->var_part
[0].loc_chain
);
3312 gcc_checking_assert (!node
->next
);
3314 dv
= dv_from_value (node
->loc
);
3315 rvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
3316 return find_loc_in_1pdv (loc
, rvar
, vars
);
3319 /* ??? Gotta look in cselib_val locations too. */
3324 /* Hash table iteration argument passed to variable_merge. */
3327 /* The set in which the merge is to be inserted. */
3329 /* The set that we're iterating in. */
3331 /* The set that may contain the other dv we are to merge with. */
3333 /* Number of onepart dvs in src. */
3334 int src_onepart_cnt
;
3337 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3338 loc_cmp order, and it is maintained as such. */
3341 insert_into_intersection (location_chain
*nodep
, rtx loc
,
3342 enum var_init_status status
)
3344 location_chain node
;
3347 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
3348 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
3350 node
->init
= MIN (node
->init
, status
);
3356 node
= new location_chain_def
;
3359 node
->set_src
= NULL
;
3360 node
->init
= status
;
3361 node
->next
= *nodep
;
3365 /* Insert in DEST the intersection of the locations present in both
3366 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3367 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3371 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
3372 location_chain s1node
, variable s2var
)
3374 dataflow_set
*s1set
= dsm
->cur
;
3375 dataflow_set
*s2set
= dsm
->src
;
3376 location_chain found
;
3380 location_chain s2node
;
3382 gcc_checking_assert (s2var
->onepart
);
3384 if (s2var
->n_var_parts
)
3386 s2node
= s2var
->var_part
[0].loc_chain
;
3388 for (; s1node
&& s2node
;
3389 s1node
= s1node
->next
, s2node
= s2node
->next
)
3390 if (s1node
->loc
!= s2node
->loc
)
3392 else if (s1node
->loc
== val
)
3395 insert_into_intersection (dest
, s1node
->loc
,
3396 MIN (s1node
->init
, s2node
->init
));
3400 for (; s1node
; s1node
= s1node
->next
)
3402 if (s1node
->loc
== val
)
3405 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
3406 shared_hash_htab (s2set
->vars
))))
3408 insert_into_intersection (dest
, s1node
->loc
,
3409 MIN (s1node
->init
, found
->init
));
3413 if (GET_CODE (s1node
->loc
) == VALUE
3414 && !VALUE_RECURSED_INTO (s1node
->loc
))
3416 decl_or_value dv
= dv_from_value (s1node
->loc
);
3417 variable svar
= shared_hash_find (s1set
->vars
, dv
);
3420 if (svar
->n_var_parts
== 1)
3422 VALUE_RECURSED_INTO (s1node
->loc
) = true;
3423 intersect_loc_chains (val
, dest
, dsm
,
3424 svar
->var_part
[0].loc_chain
,
3426 VALUE_RECURSED_INTO (s1node
->loc
) = false;
3431 /* ??? gotta look in cselib_val locations too. */
3433 /* ??? if the location is equivalent to any location in src,
3434 searched recursively
3436 add to dst the values needed to represent the equivalence
3438 telling whether locations S is equivalent to another dv's
3441 for each location D in the list
3443 if S and D satisfy rtx_equal_p, then it is present
3445 else if D is a value, recurse without cycles
3447 else if S and D have the same CODE and MODE
3449 for each operand oS and the corresponding oD
3451 if oS and oD are not equivalent, then S an D are not equivalent
3453 else if they are RTX vectors
3455 if any vector oS element is not equivalent to its respective oD,
3456 then S and D are not equivalent
3464 /* Return -1 if X should be before Y in a location list for a 1-part
3465 variable, 1 if Y should be before X, and 0 if they're equivalent
3466 and should not appear in the list. */
3469 loc_cmp (rtx x
, rtx y
)
3472 RTX_CODE code
= GET_CODE (x
);
3482 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3483 if (REGNO (x
) == REGNO (y
))
3485 else if (REGNO (x
) < REGNO (y
))
3498 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3499 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
3505 if (GET_CODE (x
) == VALUE
)
3507 if (GET_CODE (y
) != VALUE
)
3509 /* Don't assert the modes are the same, that is true only
3510 when not recursing. (subreg:QI (value:SI 1:1) 0)
3511 and (subreg:QI (value:DI 2:2) 0) can be compared,
3512 even when the modes are different. */
3513 if (canon_value_cmp (x
, y
))
3519 if (GET_CODE (y
) == VALUE
)
3522 /* Entry value is the least preferable kind of expression. */
3523 if (GET_CODE (x
) == ENTRY_VALUE
)
3525 if (GET_CODE (y
) != ENTRY_VALUE
)
3527 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3528 return loc_cmp (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
3531 if (GET_CODE (y
) == ENTRY_VALUE
)
3534 if (GET_CODE (x
) == GET_CODE (y
))
3535 /* Compare operands below. */;
3536 else if (GET_CODE (x
) < GET_CODE (y
))
3541 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3543 if (GET_CODE (x
) == DEBUG_EXPR
)
3545 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3546 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
3548 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3549 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
3553 fmt
= GET_RTX_FORMAT (code
);
3554 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
3558 if (XWINT (x
, i
) == XWINT (y
, i
))
3560 else if (XWINT (x
, i
) < XWINT (y
, i
))
3567 if (XINT (x
, i
) == XINT (y
, i
))
3569 else if (XINT (x
, i
) < XINT (y
, i
))
3576 /* Compare the vector length first. */
3577 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
3578 /* Compare the vectors elements. */;
3579 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
3584 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3585 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
3586 XVECEXP (y
, i
, j
))))
3591 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
3597 if (XSTR (x
, i
) == XSTR (y
, i
))
3603 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
3611 /* These are just backpointers, so they don't matter. */
3618 /* It is believed that rtx's at this level will never
3619 contain anything but integers and other rtx's,
3620 except for within LABEL_REFs and SYMBOL_REFs. */
3624 if (CONST_WIDE_INT_P (x
))
3626 /* Compare the vector length first. */
3627 if (CONST_WIDE_INT_NUNITS (x
) >= CONST_WIDE_INT_NUNITS (y
))
3629 else if (CONST_WIDE_INT_NUNITS (x
) < CONST_WIDE_INT_NUNITS (y
))
3632 /* Compare the vectors elements. */;
3633 for (j
= CONST_WIDE_INT_NUNITS (x
) - 1; j
>= 0 ; j
--)
3635 if (CONST_WIDE_INT_ELT (x
, j
) < CONST_WIDE_INT_ELT (y
, j
))
3637 if (CONST_WIDE_INT_ELT (x
, j
) > CONST_WIDE_INT_ELT (y
, j
))
3646 /* Check the order of entries in one-part variables. */
3649 canonicalize_loc_order_check (variable_def
**slot
,
3650 dataflow_set
*data ATTRIBUTE_UNUSED
)
3652 variable var
= *slot
;
3653 location_chain node
, next
;
3655 #ifdef ENABLE_RTL_CHECKING
3657 for (i
= 0; i
< var
->n_var_parts
; i
++)
3658 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3659 gcc_assert (!var
->in_changed_variables
);
3665 gcc_assert (var
->n_var_parts
== 1);
3666 node
= var
->var_part
[0].loc_chain
;
3669 while ((next
= node
->next
))
3671 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3679 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3680 more likely to be chosen as canonical for an equivalence set.
3681 Ensure less likely values can reach more likely neighbors, making
3682 the connections bidirectional. */
3685 canonicalize_values_mark (variable_def
**slot
, dataflow_set
*set
)
3687 variable var
= *slot
;
3688 decl_or_value dv
= var
->dv
;
3690 location_chain node
;
3692 if (!dv_is_value_p (dv
))
3695 gcc_checking_assert (var
->n_var_parts
== 1);
3697 val
= dv_as_value (dv
);
3699 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3700 if (GET_CODE (node
->loc
) == VALUE
)
3702 if (canon_value_cmp (node
->loc
, val
))
3703 VALUE_RECURSED_INTO (val
) = true;
3706 decl_or_value odv
= dv_from_value (node
->loc
);
3707 variable_def
**oslot
;
3708 oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3710 set_slot_part (set
, val
, oslot
, odv
, 0,
3711 node
->init
, NULL_RTX
);
3713 VALUE_RECURSED_INTO (node
->loc
) = true;
3720 /* Remove redundant entries from equivalence lists in onepart
3721 variables, canonicalizing equivalence sets into star shapes. */
3724 canonicalize_values_star (variable_def
**slot
, dataflow_set
*set
)
3726 variable var
= *slot
;
3727 decl_or_value dv
= var
->dv
;
3728 location_chain node
;
3731 variable_def
**cslot
;
3738 gcc_checking_assert (var
->n_var_parts
== 1);
3740 if (dv_is_value_p (dv
))
3742 cval
= dv_as_value (dv
);
3743 if (!VALUE_RECURSED_INTO (cval
))
3745 VALUE_RECURSED_INTO (cval
) = false;
3755 gcc_assert (var
->n_var_parts
== 1);
3757 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3758 if (GET_CODE (node
->loc
) == VALUE
)
3761 if (VALUE_RECURSED_INTO (node
->loc
))
3763 if (canon_value_cmp (node
->loc
, cval
))
3772 if (!has_marks
|| dv_is_decl_p (dv
))
3775 /* Keep it marked so that we revisit it, either after visiting a
3776 child node, or after visiting a new parent that might be
3778 VALUE_RECURSED_INTO (val
) = true;
3780 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3781 if (GET_CODE (node
->loc
) == VALUE
3782 && VALUE_RECURSED_INTO (node
->loc
))
3786 VALUE_RECURSED_INTO (cval
) = false;
3787 dv
= dv_from_value (cval
);
3788 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3791 gcc_assert (dv_is_decl_p (var
->dv
));
3792 /* The canonical value was reset and dropped.
3794 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3798 gcc_assert (dv_is_value_p (var
->dv
));
3799 if (var
->n_var_parts
== 0)
3801 gcc_assert (var
->n_var_parts
== 1);
3805 VALUE_RECURSED_INTO (val
) = false;
3810 /* Push values to the canonical one. */
3811 cdv
= dv_from_value (cval
);
3812 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3814 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3815 if (node
->loc
!= cval
)
3817 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3818 node
->init
, NULL_RTX
);
3819 if (GET_CODE (node
->loc
) == VALUE
)
3821 decl_or_value ndv
= dv_from_value (node
->loc
);
3823 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3826 if (canon_value_cmp (node
->loc
, val
))
3828 /* If it could have been a local minimum, it's not any more,
3829 since it's now neighbor to cval, so it may have to push
3830 to it. Conversely, if it wouldn't have prevailed over
3831 val, then whatever mark it has is fine: if it was to
3832 push, it will now push to a more canonical node, but if
3833 it wasn't, then it has already pushed any values it might
3835 VALUE_RECURSED_INTO (node
->loc
) = true;
3836 /* Make sure we visit node->loc by ensuring we cval is
3838 VALUE_RECURSED_INTO (cval
) = true;
3840 else if (!VALUE_RECURSED_INTO (node
->loc
))
3841 /* If we have no need to "recurse" into this node, it's
3842 already "canonicalized", so drop the link to the old
3844 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3846 else if (GET_CODE (node
->loc
) == REG
)
3848 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3850 /* Change an existing attribute referring to dv so that it
3851 refers to cdv, removing any duplicate this might
3852 introduce, and checking that no previous duplicates
3853 existed, all in a single pass. */
3857 if (list
->offset
== 0
3858 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3859 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3866 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3869 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3874 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3876 *listp
= list
->next
;
3882 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3885 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3887 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3892 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3894 *listp
= list
->next
;
3900 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3909 if (list
->offset
== 0
3910 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3911 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3921 set_slot_part (set
, val
, cslot
, cdv
, 0,
3922 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3924 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3926 /* Variable may have been unshared. */
3928 gcc_checking_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3929 && var
->var_part
[0].loc_chain
->next
== NULL
);
3931 if (VALUE_RECURSED_INTO (cval
))
3932 goto restart_with_cval
;
3937 /* Bind one-part variables to the canonical value in an equivalence
3938 set. Not doing this causes dataflow convergence failure in rare
3939 circumstances, see PR42873. Unfortunately we can't do this
3940 efficiently as part of canonicalize_values_star, since we may not
3941 have determined or even seen the canonical value of a set when we
3942 get to a variable that references another member of the set. */
3945 canonicalize_vars_star (variable_def
**slot
, dataflow_set
*set
)
3947 variable var
= *slot
;
3948 decl_or_value dv
= var
->dv
;
3949 location_chain node
;
3952 variable_def
**cslot
;
3954 location_chain cnode
;
3956 if (!var
->onepart
|| var
->onepart
== ONEPART_VALUE
)
3959 gcc_assert (var
->n_var_parts
== 1);
3961 node
= var
->var_part
[0].loc_chain
;
3963 if (GET_CODE (node
->loc
) != VALUE
)
3966 gcc_assert (!node
->next
);
3969 /* Push values to the canonical one. */
3970 cdv
= dv_from_value (cval
);
3971 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3975 gcc_assert (cvar
->n_var_parts
== 1);
3977 cnode
= cvar
->var_part
[0].loc_chain
;
3979 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3980 that are not “more canonical” than it. */
3981 if (GET_CODE (cnode
->loc
) != VALUE
3982 || !canon_value_cmp (cnode
->loc
, cval
))
3985 /* CVAL was found to be non-canonical. Change the variable to point
3986 to the canonical VALUE. */
3987 gcc_assert (!cnode
->next
);
3990 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3991 node
->init
, node
->set_src
);
3992 clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3997 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3998 corresponding entry in DSM->src. Multi-part variables are combined
3999 with variable_union, whereas onepart dvs are combined with
4003 variable_merge_over_cur (variable s1var
, struct dfset_merge
*dsm
)
4005 dataflow_set
*dst
= dsm
->dst
;
4006 variable_def
**dstslot
;
4007 variable s2var
, dvar
= NULL
;
4008 decl_or_value dv
= s1var
->dv
;
4009 onepart_enum_t onepart
= s1var
->onepart
;
4012 location_chain node
, *nodep
;
4014 /* If the incoming onepart variable has an empty location list, then
4015 the intersection will be just as empty. For other variables,
4016 it's always union. */
4017 gcc_checking_assert (s1var
->n_var_parts
4018 && s1var
->var_part
[0].loc_chain
);
4021 return variable_union (s1var
, dst
);
4023 gcc_checking_assert (s1var
->n_var_parts
== 1);
4025 dvhash
= dv_htab_hash (dv
);
4026 if (dv_is_value_p (dv
))
4027 val
= dv_as_value (dv
);
4031 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
4034 dst_can_be_shared
= false;
4038 dsm
->src_onepart_cnt
--;
4039 gcc_assert (s2var
->var_part
[0].loc_chain
4040 && s2var
->onepart
== onepart
4041 && s2var
->n_var_parts
== 1);
4043 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4047 gcc_assert (dvar
->refcount
== 1
4048 && dvar
->onepart
== onepart
4049 && dvar
->n_var_parts
== 1);
4050 nodep
= &dvar
->var_part
[0].loc_chain
;
4058 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
4060 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
4062 *dstslot
= dvar
= s2var
;
4067 dst_can_be_shared
= false;
4069 intersect_loc_chains (val
, nodep
, dsm
,
4070 s1var
->var_part
[0].loc_chain
, s2var
);
4076 dvar
= onepart_pool (onepart
).allocate ();
4079 dvar
->n_var_parts
= 1;
4080 dvar
->onepart
= onepart
;
4081 dvar
->in_changed_variables
= false;
4082 dvar
->var_part
[0].loc_chain
= node
;
4083 dvar
->var_part
[0].cur_loc
= NULL
;
4085 VAR_LOC_1PAUX (dvar
) = NULL
;
4087 VAR_PART_OFFSET (dvar
, 0) = 0;
4090 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
4092 gcc_assert (!*dstslot
);
4100 nodep
= &dvar
->var_part
[0].loc_chain
;
4101 while ((node
= *nodep
))
4103 location_chain
*nextp
= &node
->next
;
4105 if (GET_CODE (node
->loc
) == REG
)
4109 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
4110 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
4111 && dv_is_value_p (list
->dv
))
4115 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
4117 /* If this value became canonical for another value that had
4118 this register, we want to leave it alone. */
4119 else if (dv_as_value (list
->dv
) != val
)
4121 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
4123 node
->init
, NULL_RTX
);
4124 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
4126 /* Since nextp points into the removed node, we can't
4127 use it. The pointer to the next node moved to nodep.
4128 However, if the variable we're walking is unshared
4129 during our walk, we'll keep walking the location list
4130 of the previously-shared variable, in which case the
4131 node won't have been removed, and we'll want to skip
4132 it. That's why we test *nodep here. */
4138 /* Canonicalization puts registers first, so we don't have to
4144 if (dvar
!= *dstslot
)
4146 nodep
= &dvar
->var_part
[0].loc_chain
;
4150 /* Mark all referenced nodes for canonicalization, and make sure
4151 we have mutual equivalence links. */
4152 VALUE_RECURSED_INTO (val
) = true;
4153 for (node
= *nodep
; node
; node
= node
->next
)
4154 if (GET_CODE (node
->loc
) == VALUE
)
4156 VALUE_RECURSED_INTO (node
->loc
) = true;
4157 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
4158 node
->init
, NULL
, INSERT
);
4161 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4162 gcc_assert (*dstslot
== dvar
);
4163 canonicalize_values_star (dstslot
, dst
);
4164 gcc_checking_assert (dstslot
4165 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4171 bool has_value
= false, has_other
= false;
4173 /* If we have one value and anything else, we're going to
4174 canonicalize this, so make sure all values have an entry in
4175 the table and are marked for canonicalization. */
4176 for (node
= *nodep
; node
; node
= node
->next
)
4178 if (GET_CODE (node
->loc
) == VALUE
)
4180 /* If this was marked during register canonicalization,
4181 we know we have to canonicalize values. */
4196 if (has_value
&& has_other
)
4198 for (node
= *nodep
; node
; node
= node
->next
)
4200 if (GET_CODE (node
->loc
) == VALUE
)
4202 decl_or_value dv
= dv_from_value (node
->loc
);
4203 variable_def
**slot
= NULL
;
4205 if (shared_hash_shared (dst
->vars
))
4206 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
4208 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
4212 variable var
= onepart_pool (ONEPART_VALUE
).allocate ();
4215 var
->n_var_parts
= 1;
4216 var
->onepart
= ONEPART_VALUE
;
4217 var
->in_changed_variables
= false;
4218 var
->var_part
[0].loc_chain
= NULL
;
4219 var
->var_part
[0].cur_loc
= NULL
;
4220 VAR_LOC_1PAUX (var
) = NULL
;
4224 VALUE_RECURSED_INTO (node
->loc
) = true;
4228 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4229 gcc_assert (*dstslot
== dvar
);
4230 canonicalize_values_star (dstslot
, dst
);
4231 gcc_checking_assert (dstslot
4232 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4238 if (!onepart_variable_different_p (dvar
, s2var
))
4240 variable_htab_free (dvar
);
4241 *dstslot
= dvar
= s2var
;
4244 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
4246 variable_htab_free (dvar
);
4247 *dstslot
= dvar
= s1var
;
4249 dst_can_be_shared
= false;
4252 dst_can_be_shared
= false;
4257 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4258 multi-part variable. Unions of multi-part variables and
4259 intersections of one-part ones will be handled in
4260 variable_merge_over_cur(). */
4263 variable_merge_over_src (variable s2var
, struct dfset_merge
*dsm
)
4265 dataflow_set
*dst
= dsm
->dst
;
4266 decl_or_value dv
= s2var
->dv
;
4268 if (!s2var
->onepart
)
4270 variable_def
**dstp
= shared_hash_find_slot (dst
->vars
, dv
);
4276 dsm
->src_onepart_cnt
++;
4280 /* Combine dataflow set information from SRC2 into DST, using PDST
4281 to carry over information across passes. */
4284 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
4286 dataflow_set cur
= *dst
;
4287 dataflow_set
*src1
= &cur
;
4288 struct dfset_merge dsm
;
4290 size_t src1_elems
, src2_elems
;
4291 variable_iterator_type hi
;
4294 src1_elems
= shared_hash_htab (src1
->vars
)->elements ();
4295 src2_elems
= shared_hash_htab (src2
->vars
)->elements ();
4296 dataflow_set_init (dst
);
4297 dst
->stack_adjust
= cur
.stack_adjust
;
4298 shared_hash_destroy (dst
->vars
);
4299 dst
->vars
= new shared_hash_def
;
4300 dst
->vars
->refcount
= 1;
4301 dst
->vars
->htab
= new variable_table_type (MAX (src1_elems
, src2_elems
));
4303 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4304 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
4309 dsm
.src_onepart_cnt
= 0;
4311 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.src
->vars
),
4313 variable_merge_over_src (var
, &dsm
);
4314 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.cur
->vars
),
4316 variable_merge_over_cur (var
, &dsm
);
4318 if (dsm
.src_onepart_cnt
)
4319 dst_can_be_shared
= false;
4321 dataflow_set_destroy (src1
);
4324 /* Mark register equivalences. */
4327 dataflow_set_equiv_regs (dataflow_set
*set
)
4332 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4334 rtx canon
[NUM_MACHINE_MODES
];
4336 /* If the list is empty or one entry, no need to canonicalize
4338 if (set
->regs
[i
] == NULL
|| set
->regs
[i
]->next
== NULL
)
4341 memset (canon
, 0, sizeof (canon
));
4343 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4344 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
4346 rtx val
= dv_as_value (list
->dv
);
4347 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
4350 if (canon_value_cmp (val
, cval
))
4354 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4355 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4357 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4362 if (dv_is_value_p (list
->dv
))
4364 rtx val
= dv_as_value (list
->dv
);
4369 VALUE_RECURSED_INTO (val
) = true;
4370 set_variable_part (set
, val
, dv_from_value (cval
), 0,
4371 VAR_INIT_STATUS_INITIALIZED
,
4375 VALUE_RECURSED_INTO (cval
) = true;
4376 set_variable_part (set
, cval
, list
->dv
, 0,
4377 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
4380 for (listp
= &set
->regs
[i
]; (list
= *listp
);
4381 listp
= list
? &list
->next
: listp
)
4382 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4384 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4385 variable_def
**slot
;
4390 if (dv_is_value_p (list
->dv
))
4392 rtx val
= dv_as_value (list
->dv
);
4393 if (!VALUE_RECURSED_INTO (val
))
4397 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
4398 canonicalize_values_star (slot
, set
);
4405 /* Remove any redundant values in the location list of VAR, which must
4406 be unshared and 1-part. */
4409 remove_duplicate_values (variable var
)
4411 location_chain node
, *nodep
;
4413 gcc_assert (var
->onepart
);
4414 gcc_assert (var
->n_var_parts
== 1);
4415 gcc_assert (var
->refcount
== 1);
4417 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
4419 if (GET_CODE (node
->loc
) == VALUE
)
4421 if (VALUE_RECURSED_INTO (node
->loc
))
4423 /* Remove duplicate value node. */
4424 *nodep
= node
->next
;
4429 VALUE_RECURSED_INTO (node
->loc
) = true;
4431 nodep
= &node
->next
;
4434 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4435 if (GET_CODE (node
->loc
) == VALUE
)
4437 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
4438 VALUE_RECURSED_INTO (node
->loc
) = false;
4443 /* Hash table iteration argument passed to variable_post_merge. */
4444 struct dfset_post_merge
4446 /* The new input set for the current block. */
4448 /* Pointer to the permanent input set for the current block, or
4450 dataflow_set
**permp
;
4453 /* Create values for incoming expressions associated with one-part
4454 variables that don't have value numbers for them. */
4457 variable_post_merge_new_vals (variable_def
**slot
, dfset_post_merge
*dfpm
)
4459 dataflow_set
*set
= dfpm
->set
;
4460 variable var
= *slot
;
4461 location_chain node
;
4463 if (!var
->onepart
|| !var
->n_var_parts
)
4466 gcc_assert (var
->n_var_parts
== 1);
4468 if (dv_is_decl_p (var
->dv
))
4470 bool check_dupes
= false;
4473 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4475 if (GET_CODE (node
->loc
) == VALUE
)
4476 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
4477 else if (GET_CODE (node
->loc
) == REG
)
4479 attrs att
, *attp
, *curp
= NULL
;
4481 if (var
->refcount
!= 1)
4483 slot
= unshare_variable (set
, slot
, var
,
4484 VAR_INIT_STATUS_INITIALIZED
);
4489 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
4491 if (att
->offset
== 0
4492 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4494 if (dv_is_value_p (att
->dv
))
4496 rtx cval
= dv_as_value (att
->dv
);
4501 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
4509 if ((*curp
)->offset
== 0
4510 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
4511 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
4514 curp
= &(*curp
)->next
;
4525 *dfpm
->permp
= XNEW (dataflow_set
);
4526 dataflow_set_init (*dfpm
->permp
);
4529 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
4530 att
; att
= att
->next
)
4531 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4533 gcc_assert (att
->offset
== 0
4534 && dv_is_value_p (att
->dv
));
4535 val_reset (set
, att
->dv
);
4542 cval
= dv_as_value (cdv
);
4546 /* Create a unique value to hold this register,
4547 that ought to be found and reused in
4548 subsequent rounds. */
4550 gcc_assert (!cselib_lookup (node
->loc
,
4551 GET_MODE (node
->loc
), 0,
4553 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1,
4555 cselib_preserve_value (v
);
4556 cselib_invalidate_rtx (node
->loc
);
4558 cdv
= dv_from_value (cval
);
4561 "Created new value %u:%u for reg %i\n",
4562 v
->uid
, v
->hash
, REGNO (node
->loc
));
4565 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
4566 VAR_INIT_STATUS_INITIALIZED
,
4567 cdv
, 0, NULL
, INSERT
);
4573 /* Remove attribute referring to the decl, which now
4574 uses the value for the register, already existing or
4575 to be added when we bring perm in. */
4583 remove_duplicate_values (var
);
4589 /* Reset values in the permanent set that are not associated with the
4590 chosen expression. */
4593 variable_post_merge_perm_vals (variable_def
**pslot
, dfset_post_merge
*dfpm
)
4595 dataflow_set
*set
= dfpm
->set
;
4596 variable pvar
= *pslot
, var
;
4597 location_chain pnode
;
4601 gcc_assert (dv_is_value_p (pvar
->dv
)
4602 && pvar
->n_var_parts
== 1);
4603 pnode
= pvar
->var_part
[0].loc_chain
;
4606 && REG_P (pnode
->loc
));
4610 var
= shared_hash_find (set
->vars
, dv
);
4613 /* Although variable_post_merge_new_vals may have made decls
4614 non-star-canonical, values that pre-existed in canonical form
4615 remain canonical, and newly-created values reference a single
4616 REG, so they are canonical as well. Since VAR has the
4617 location list for a VALUE, using find_loc_in_1pdv for it is
4618 fine, since VALUEs don't map back to DECLs. */
4619 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
4621 val_reset (set
, dv
);
4624 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
4625 if (att
->offset
== 0
4626 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
4627 && dv_is_value_p (att
->dv
))
4630 /* If there is a value associated with this register already, create
4632 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
4634 rtx cval
= dv_as_value (att
->dv
);
4635 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
4636 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
4641 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
4643 variable_union (pvar
, set
);
4649 /* Just checking stuff and registering register attributes for
4653 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4655 struct dfset_post_merge dfpm
;
4660 shared_hash_htab (set
->vars
)
4661 ->traverse
<dfset_post_merge
*, variable_post_merge_new_vals
> (&dfpm
);
4663 shared_hash_htab ((*permp
)->vars
)
4664 ->traverse
<dfset_post_merge
*, variable_post_merge_perm_vals
> (&dfpm
);
4665 shared_hash_htab (set
->vars
)
4666 ->traverse
<dataflow_set
*, canonicalize_values_star
> (set
);
4667 shared_hash_htab (set
->vars
)
4668 ->traverse
<dataflow_set
*, canonicalize_vars_star
> (set
);
4671 /* Return a node whose loc is a MEM that refers to EXPR in the
4672 location list of a one-part variable or value VAR, or in that of
4673 any values recursively mentioned in the location lists. */
4675 static location_chain
4676 find_mem_expr_in_1pdv (tree expr
, rtx val
, variable_table_type
*vars
)
4678 location_chain node
;
4681 location_chain where
= NULL
;
4686 gcc_assert (GET_CODE (val
) == VALUE
4687 && !VALUE_RECURSED_INTO (val
));
4689 dv
= dv_from_value (val
);
4690 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
4695 gcc_assert (var
->onepart
);
4697 if (!var
->n_var_parts
)
4700 VALUE_RECURSED_INTO (val
) = true;
4702 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4703 if (MEM_P (node
->loc
)
4704 && MEM_EXPR (node
->loc
) == expr
4705 && INT_MEM_OFFSET (node
->loc
) == 0)
4710 else if (GET_CODE (node
->loc
) == VALUE
4711 && !VALUE_RECURSED_INTO (node
->loc
)
4712 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4715 VALUE_RECURSED_INTO (val
) = false;
4720 /* Return TRUE if the value of MEM may vary across a call. */
4723 mem_dies_at_call (rtx mem
)
4725 tree expr
= MEM_EXPR (mem
);
4731 decl
= get_base_address (expr
);
4739 return (may_be_aliased (decl
)
4740 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4743 /* Remove all MEMs from the location list of a hash table entry for a
4744 one-part variable, except those whose MEM attributes map back to
4745 the variable itself, directly or within a VALUE. */
4748 dataflow_set_preserve_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4750 variable var
= *slot
;
4752 if (var
->onepart
== ONEPART_VDECL
|| var
->onepart
== ONEPART_DEXPR
)
4754 tree decl
= dv_as_decl (var
->dv
);
4755 location_chain loc
, *locp
;
4756 bool changed
= false;
4758 if (!var
->n_var_parts
)
4761 gcc_assert (var
->n_var_parts
== 1);
4763 if (shared_var_p (var
, set
->vars
))
4765 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4767 /* We want to remove dying MEMs that doesn't refer to DECL. */
4768 if (GET_CODE (loc
->loc
) == MEM
4769 && (MEM_EXPR (loc
->loc
) != decl
4770 || INT_MEM_OFFSET (loc
->loc
) != 0)
4771 && !mem_dies_at_call (loc
->loc
))
4773 /* We want to move here MEMs that do refer to DECL. */
4774 else if (GET_CODE (loc
->loc
) == VALUE
4775 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4776 shared_hash_htab (set
->vars
)))
4783 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4785 gcc_assert (var
->n_var_parts
== 1);
4788 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4791 rtx old_loc
= loc
->loc
;
4792 if (GET_CODE (old_loc
) == VALUE
)
4794 location_chain mem_node
4795 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4796 shared_hash_htab (set
->vars
));
4798 /* ??? This picks up only one out of multiple MEMs that
4799 refer to the same variable. Do we ever need to be
4800 concerned about dealing with more than one, or, given
4801 that they should all map to the same variable
4802 location, their addresses will have been merged and
4803 they will be regarded as equivalent? */
4806 loc
->loc
= mem_node
->loc
;
4807 loc
->set_src
= mem_node
->set_src
;
4808 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4812 if (GET_CODE (loc
->loc
) != MEM
4813 || (MEM_EXPR (loc
->loc
) == decl
4814 && INT_MEM_OFFSET (loc
->loc
) == 0)
4815 || !mem_dies_at_call (loc
->loc
))
4817 if (old_loc
!= loc
->loc
&& emit_notes
)
4819 if (old_loc
== var
->var_part
[0].cur_loc
)
4822 var
->var_part
[0].cur_loc
= NULL
;
4831 if (old_loc
== var
->var_part
[0].cur_loc
)
4834 var
->var_part
[0].cur_loc
= NULL
;
4841 if (!var
->var_part
[0].loc_chain
)
4847 variable_was_changed (var
, set
);
4853 /* Remove all MEMs from the location list of a hash table entry for a
4857 dataflow_set_remove_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4859 variable var
= *slot
;
4861 if (var
->onepart
== ONEPART_VALUE
)
4863 location_chain loc
, *locp
;
4864 bool changed
= false;
4867 gcc_assert (var
->n_var_parts
== 1);
4869 if (shared_var_p (var
, set
->vars
))
4871 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4872 if (GET_CODE (loc
->loc
) == MEM
4873 && mem_dies_at_call (loc
->loc
))
4879 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4881 gcc_assert (var
->n_var_parts
== 1);
4884 if (VAR_LOC_1PAUX (var
))
4885 cur_loc
= VAR_LOC_FROM (var
);
4887 cur_loc
= var
->var_part
[0].cur_loc
;
4889 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4892 if (GET_CODE (loc
->loc
) != MEM
4893 || !mem_dies_at_call (loc
->loc
))
4900 /* If we have deleted the location which was last emitted
4901 we have to emit new location so add the variable to set
4902 of changed variables. */
4903 if (cur_loc
== loc
->loc
)
4906 var
->var_part
[0].cur_loc
= NULL
;
4907 if (VAR_LOC_1PAUX (var
))
4908 VAR_LOC_FROM (var
) = NULL
;
4913 if (!var
->var_part
[0].loc_chain
)
4919 variable_was_changed (var
, set
);
4925 /* Remove all variable-location information about call-clobbered
4926 registers, as well as associations between MEMs and VALUEs. */
4929 dataflow_set_clear_at_call (dataflow_set
*set
)
4932 hard_reg_set_iterator hrsi
;
4934 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, r
, hrsi
)
4935 var_regno_delete (set
, r
);
4937 if (MAY_HAVE_DEBUG_INSNS
)
4939 set
->traversed_vars
= set
->vars
;
4940 shared_hash_htab (set
->vars
)
4941 ->traverse
<dataflow_set
*, dataflow_set_preserve_mem_locs
> (set
);
4942 set
->traversed_vars
= set
->vars
;
4943 shared_hash_htab (set
->vars
)
4944 ->traverse
<dataflow_set
*, dataflow_set_remove_mem_locs
> (set
);
4945 set
->traversed_vars
= NULL
;
4950 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4952 location_chain lc1
, lc2
;
4954 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4956 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4958 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4960 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4963 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4972 /* Return true if one-part variables VAR1 and VAR2 are different.
4973 They must be in canonical order. */
4976 onepart_variable_different_p (variable var1
, variable var2
)
4978 location_chain lc1
, lc2
;
4983 gcc_assert (var1
->n_var_parts
== 1
4984 && var2
->n_var_parts
== 1);
4986 lc1
= var1
->var_part
[0].loc_chain
;
4987 lc2
= var2
->var_part
[0].loc_chain
;
4989 gcc_assert (lc1
&& lc2
);
4993 if (loc_cmp (lc1
->loc
, lc2
->loc
))
5002 /* Return true if variables VAR1 and VAR2 are different. */
5005 variable_different_p (variable var1
, variable var2
)
5012 if (var1
->onepart
!= var2
->onepart
)
5015 if (var1
->n_var_parts
!= var2
->n_var_parts
)
5018 if (var1
->onepart
&& var1
->n_var_parts
)
5020 gcc_checking_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
)
5021 && var1
->n_var_parts
== 1);
5022 /* One-part values have locations in a canonical order. */
5023 return onepart_variable_different_p (var1
, var2
);
5026 for (i
= 0; i
< var1
->n_var_parts
; i
++)
5028 if (VAR_PART_OFFSET (var1
, i
) != VAR_PART_OFFSET (var2
, i
))
5030 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
5032 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
5038 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5041 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
5043 variable_iterator_type hi
;
5046 if (old_set
->vars
== new_set
->vars
)
5049 if (shared_hash_htab (old_set
->vars
)->elements ()
5050 != shared_hash_htab (new_set
->vars
)->elements ())
5053 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set
->vars
),
5056 variable_table_type
*htab
= shared_hash_htab (new_set
->vars
);
5057 variable var2
= htab
->find_with_hash (var1
->dv
, dv_htab_hash (var1
->dv
));
5060 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5062 fprintf (dump_file
, "dataflow difference found: removal of:\n");
5068 if (variable_different_p (var1
, var2
))
5070 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5072 fprintf (dump_file
, "dataflow difference found: "
5073 "old and new follow:\n");
5081 /* No need to traverse the second hashtab, if both have the same number
5082 of elements and the second one had all entries found in the first one,
5083 then it can't have any extra entries. */
5087 /* Free the contents of dataflow set SET. */
5090 dataflow_set_destroy (dataflow_set
*set
)
5094 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5095 attrs_list_clear (&set
->regs
[i
]);
5097 shared_hash_destroy (set
->vars
);
5101 /* Return true if RTL X contains a SYMBOL_REF. */
5104 contains_symbol_ref (rtx x
)
5113 code
= GET_CODE (x
);
5114 if (code
== SYMBOL_REF
)
5117 fmt
= GET_RTX_FORMAT (code
);
5118 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5122 if (contains_symbol_ref (XEXP (x
, i
)))
5125 else if (fmt
[i
] == 'E')
5128 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5129 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
5137 /* Shall EXPR be tracked? */
5140 track_expr_p (tree expr
, bool need_rtl
)
5145 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
5146 return DECL_RTL_SET_P (expr
);
5148 /* If EXPR is not a parameter or a variable do not track it. */
5149 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
5152 /* It also must have a name... */
5153 if (!DECL_NAME (expr
) && need_rtl
)
5156 /* ... and a RTL assigned to it. */
5157 decl_rtl
= DECL_RTL_IF_SET (expr
);
5158 if (!decl_rtl
&& need_rtl
)
5161 /* If this expression is really a debug alias of some other declaration, we
5162 don't need to track this expression if the ultimate declaration is
5165 if (TREE_CODE (realdecl
) == VAR_DECL
&& DECL_HAS_DEBUG_EXPR_P (realdecl
))
5167 realdecl
= DECL_DEBUG_EXPR (realdecl
);
5168 if (!DECL_P (realdecl
))
5170 if (handled_component_p (realdecl
)
5171 || (TREE_CODE (realdecl
) == MEM_REF
5172 && TREE_CODE (TREE_OPERAND (realdecl
, 0)) == ADDR_EXPR
))
5174 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
5176 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
5178 if (!DECL_P (innerdecl
)
5179 || DECL_IGNORED_P (innerdecl
)
5180 /* Do not track declarations for parts of tracked parameters
5181 since we want to track them as a whole instead. */
5182 || (TREE_CODE (innerdecl
) == PARM_DECL
5183 && DECL_MODE (innerdecl
) != BLKmode
5184 && TREE_CODE (TREE_TYPE (innerdecl
)) != UNION_TYPE
)
5185 || TREE_STATIC (innerdecl
)
5187 || bitpos
+ bitsize
> 256
5188 || bitsize
!= maxsize
)
5198 /* Do not track EXPR if REALDECL it should be ignored for debugging
5200 if (DECL_IGNORED_P (realdecl
))
5203 /* Do not track global variables until we are able to emit correct location
5205 if (TREE_STATIC (realdecl
))
5208 /* When the EXPR is a DECL for alias of some variable (see example)
5209 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5210 DECL_RTL contains SYMBOL_REF.
5213 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5216 if (decl_rtl
&& MEM_P (decl_rtl
)
5217 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
5220 /* If RTX is a memory it should not be very large (because it would be
5221 an array or struct). */
5222 if (decl_rtl
&& MEM_P (decl_rtl
))
5224 /* Do not track structures and arrays. */
5225 if (GET_MODE (decl_rtl
) == BLKmode
5226 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
5228 if (MEM_SIZE_KNOWN_P (decl_rtl
)
5229 && MEM_SIZE (decl_rtl
) > MAX_VAR_PARTS
)
5233 DECL_CHANGED (expr
) = 0;
5234 DECL_CHANGED (realdecl
) = 0;
5238 /* Determine whether a given LOC refers to the same variable part as
5242 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
5245 HOST_WIDE_INT offset2
;
5247 if (! DECL_P (expr
))
5252 expr2
= REG_EXPR (loc
);
5253 offset2
= REG_OFFSET (loc
);
5255 else if (MEM_P (loc
))
5257 expr2
= MEM_EXPR (loc
);
5258 offset2
= INT_MEM_OFFSET (loc
);
5263 if (! expr2
|| ! DECL_P (expr2
))
5266 expr
= var_debug_decl (expr
);
5267 expr2
= var_debug_decl (expr2
);
5269 return (expr
== expr2
&& offset
== offset2
);
5272 /* LOC is a REG or MEM that we would like to track if possible.
5273 If EXPR is null, we don't know what expression LOC refers to,
5274 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5275 LOC is an lvalue register.
5277 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5278 is something we can track. When returning true, store the mode of
5279 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5280 from EXPR in *OFFSET_OUT (if nonnull). */
5283 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
5284 machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
5288 if (expr
== NULL
|| !track_expr_p (expr
, true))
5291 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5292 whole subreg, but only the old inner part is really relevant. */
5293 mode
= GET_MODE (loc
);
5294 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
5296 machine_mode pseudo_mode
;
5298 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
5299 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
5301 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
5306 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5307 Do the same if we are storing to a register and EXPR occupies
5308 the whole of register LOC; in that case, the whole of EXPR is
5309 being changed. We exclude complex modes from the second case
5310 because the real and imaginary parts are represented as separate
5311 pseudo registers, even if the whole complex value fits into one
5313 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
5315 && !COMPLEX_MODE_P (DECL_MODE (expr
))
5316 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
5317 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
5319 mode
= DECL_MODE (expr
);
5323 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
5329 *offset_out
= offset
;
5333 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5334 want to track. When returning nonnull, make sure that the attributes
5335 on the returned value are updated. */
5338 var_lowpart (machine_mode mode
, rtx loc
)
5340 unsigned int offset
, reg_offset
, regno
;
5342 if (GET_MODE (loc
) == mode
)
5345 if (!REG_P (loc
) && !MEM_P (loc
))
5348 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
5351 return adjust_address_nv (loc
, mode
, offset
);
5353 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
5354 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
5356 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
5359 /* Carry information about uses and stores while walking rtx. */
5361 struct count_use_info
5363 /* The insn where the RTX is. */
5366 /* The basic block where insn is. */
5369 /* The array of n_sets sets in the insn, as determined by cselib. */
5370 struct cselib_set
*sets
;
5373 /* True if we're counting stores, false otherwise. */
5377 /* Find a VALUE corresponding to X. */
5379 static inline cselib_val
*
5380 find_use_val (rtx x
, machine_mode mode
, struct count_use_info
*cui
)
5386 /* This is called after uses are set up and before stores are
5387 processed by cselib, so it's safe to look up srcs, but not
5388 dsts. So we look up expressions that appear in srcs or in
5389 dest expressions, but we search the sets array for dests of
5393 /* Some targets represent memset and memcpy patterns
5394 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5395 (set (mem:BLK ...) (const_int ...)) or
5396 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5397 in that case, otherwise we end up with mode mismatches. */
5398 if (mode
== BLKmode
&& MEM_P (x
))
5400 for (i
= 0; i
< cui
->n_sets
; i
++)
5401 if (cui
->sets
[i
].dest
== x
)
5402 return cui
->sets
[i
].src_elt
;
5405 return cselib_lookup (x
, mode
, 0, VOIDmode
);
5411 /* Replace all registers and addresses in an expression with VALUE
5412 expressions that map back to them, unless the expression is a
5413 register. If no mapping is or can be performed, returns NULL. */
5416 replace_expr_with_values (rtx loc
)
5418 if (REG_P (loc
) || GET_CODE (loc
) == ENTRY_VALUE
)
5420 else if (MEM_P (loc
))
5422 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
5423 get_address_mode (loc
), 0,
5426 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
5431 return cselib_subst_to_values (loc
, VOIDmode
);
5434 /* Return true if X contains a DEBUG_EXPR. */
5437 rtx_debug_expr_p (const_rtx x
)
5439 subrtx_iterator::array_type array
;
5440 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5441 if (GET_CODE (*iter
) == DEBUG_EXPR
)
5446 /* Determine what kind of micro operation to choose for a USE. Return
5447 MO_CLOBBER if no micro operation is to be generated. */
5449 static enum micro_operation_type
5450 use_type (rtx loc
, struct count_use_info
*cui
, machine_mode
*modep
)
5454 if (cui
&& cui
->sets
)
5456 if (GET_CODE (loc
) == VAR_LOCATION
)
5458 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
5460 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
5461 if (! VAR_LOC_UNKNOWN_P (ploc
))
5463 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1,
5466 /* ??? flag_float_store and volatile mems are never
5467 given values, but we could in theory use them for
5469 gcc_assert (val
|| 1);
5477 if (REG_P (loc
) || MEM_P (loc
))
5480 *modep
= GET_MODE (loc
);
5484 || (find_use_val (loc
, GET_MODE (loc
), cui
)
5485 && cselib_lookup (XEXP (loc
, 0),
5486 get_address_mode (loc
), 0,
5492 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5494 if (val
&& !cselib_preserved_value_p (val
))
5502 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
5504 if (loc
== cfa_base_rtx
)
5506 expr
= REG_EXPR (loc
);
5509 return MO_USE_NO_VAR
;
5510 else if (target_for_debug_bind (var_debug_decl (expr
)))
5512 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
5513 false, modep
, NULL
))
5516 return MO_USE_NO_VAR
;
5518 else if (MEM_P (loc
))
5520 expr
= MEM_EXPR (loc
);
5524 else if (target_for_debug_bind (var_debug_decl (expr
)))
5526 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
5528 /* Multi-part variables shouldn't refer to one-part
5529 variable names such as VALUEs (never happens) or
5530 DEBUG_EXPRs (only happens in the presence of debug
5532 && (!MAY_HAVE_DEBUG_INSNS
5533 || !rtx_debug_expr_p (XEXP (loc
, 0))))
5542 /* Log to OUT information about micro-operation MOPT involving X in
5546 log_op_type (rtx x
, basic_block bb
, rtx_insn
*insn
,
5547 enum micro_operation_type mopt
, FILE *out
)
5549 fprintf (out
, "bb %i op %i insn %i %s ",
5550 bb
->index
, VTI (bb
)->mos
.length (),
5551 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
5552 print_inline_rtx (out
, x
, 2);
5556 /* Tell whether the CONCAT used to holds a VALUE and its location
5557 needs value resolution, i.e., an attempt of mapping the location
5558 back to other incoming values. */
5559 #define VAL_NEEDS_RESOLUTION(x) \
5560 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5561 /* Whether the location in the CONCAT is a tracked expression, that
5562 should also be handled like a MO_USE. */
5563 #define VAL_HOLDS_TRACK_EXPR(x) \
5564 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5565 /* Whether the location in the CONCAT should be handled like a MO_COPY
5567 #define VAL_EXPR_IS_COPIED(x) \
5568 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5569 /* Whether the location in the CONCAT should be handled like a
5570 MO_CLOBBER as well. */
5571 #define VAL_EXPR_IS_CLOBBERED(x) \
5572 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5574 /* All preserved VALUEs. */
5575 static vec
<rtx
> preserved_values
;
5577 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5580 preserve_value (cselib_val
*val
)
5582 cselib_preserve_value (val
);
5583 preserved_values
.safe_push (val
->val_rtx
);
5586 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5587 any rtxes not suitable for CONST use not replaced by VALUEs
5591 non_suitable_const (const_rtx x
)
5593 subrtx_iterator::array_type array
;
5594 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5596 const_rtx x
= *iter
;
5597 switch (GET_CODE (x
))
5608 if (!MEM_READONLY_P (x
))
5618 /* Add uses (register and memory references) LOC which will be tracked
5619 to VTI (bb)->mos. */
5622 add_uses (rtx loc
, struct count_use_info
*cui
)
5624 machine_mode mode
= VOIDmode
;
5625 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5627 if (type
!= MO_CLOBBER
)
5629 basic_block bb
= cui
->bb
;
5633 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
5634 mo
.insn
= cui
->insn
;
5636 if (type
== MO_VAL_LOC
)
5639 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
5642 gcc_assert (cui
->sets
);
5645 && !REG_P (XEXP (vloc
, 0))
5646 && !MEM_P (XEXP (vloc
, 0)))
5649 machine_mode address_mode
= get_address_mode (mloc
);
5651 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5654 if (val
&& !cselib_preserved_value_p (val
))
5655 preserve_value (val
);
5658 if (CONSTANT_P (vloc
)
5659 && (GET_CODE (vloc
) != CONST
|| non_suitable_const (vloc
)))
5660 /* For constants don't look up any value. */;
5661 else if (!VAR_LOC_UNKNOWN_P (vloc
) && !unsuitable_loc (vloc
)
5662 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5665 enum micro_operation_type type2
;
5667 bool resolvable
= REG_P (vloc
) || MEM_P (vloc
);
5670 nloc
= replace_expr_with_values (vloc
);
5674 oloc
= shallow_copy_rtx (oloc
);
5675 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5678 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5680 type2
= use_type (vloc
, 0, &mode2
);
5682 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5683 || type2
== MO_CLOBBER
);
5685 if (type2
== MO_CLOBBER
5686 && !cselib_preserved_value_p (val
))
5688 VAL_NEEDS_RESOLUTION (oloc
) = resolvable
;
5689 preserve_value (val
);
5692 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5694 oloc
= shallow_copy_rtx (oloc
);
5695 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5700 else if (type
== MO_VAL_USE
)
5702 machine_mode mode2
= VOIDmode
;
5703 enum micro_operation_type type2
;
5704 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5705 rtx vloc
, oloc
= loc
, nloc
;
5707 gcc_assert (cui
->sets
);
5710 && !REG_P (XEXP (oloc
, 0))
5711 && !MEM_P (XEXP (oloc
, 0)))
5714 machine_mode address_mode
= get_address_mode (mloc
);
5716 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5719 if (val
&& !cselib_preserved_value_p (val
))
5720 preserve_value (val
);
5723 type2
= use_type (loc
, 0, &mode2
);
5725 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5726 || type2
== MO_CLOBBER
);
5728 if (type2
== MO_USE
)
5729 vloc
= var_lowpart (mode2
, loc
);
5733 /* The loc of a MO_VAL_USE may have two forms:
5735 (concat val src): val is at src, a value-based
5738 (concat (concat val use) src): same as above, with use as
5739 the MO_USE tracked value, if it differs from src.
5743 gcc_checking_assert (REG_P (loc
) || MEM_P (loc
));
5744 nloc
= replace_expr_with_values (loc
);
5749 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5751 oloc
= val
->val_rtx
;
5753 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5755 if (type2
== MO_USE
)
5756 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5757 if (!cselib_preserved_value_p (val
))
5759 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5760 preserve_value (val
);
5764 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5766 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5767 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5768 VTI (bb
)->mos
.safe_push (mo
);
5772 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5775 add_uses_1 (rtx
*x
, void *cui
)
5777 subrtx_var_iterator::array_type array
;
5778 FOR_EACH_SUBRTX_VAR (iter
, array
, *x
, NONCONST
)
5779 add_uses (*iter
, (struct count_use_info
*) cui
);
5782 /* This is the value used during expansion of locations. We want it
5783 to be unbounded, so that variables expanded deep in a recursion
5784 nest are fully evaluated, so that their values are cached
5785 correctly. We avoid recursion cycles through other means, and we
5786 don't unshare RTL, so excess complexity is not a problem. */
5787 #define EXPR_DEPTH (INT_MAX)
5788 /* We use this to keep too-complex expressions from being emitted as
5789 location notes, and then to debug information. Users can trade
5790 compile time for ridiculously complex expressions, although they're
5791 seldom useful, and they may often have to be discarded as not
5792 representable anyway. */
5793 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5795 /* Attempt to reverse the EXPR operation in the debug info and record
5796 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5797 no longer live we can express its value as VAL - 6. */
5800 reverse_op (rtx val
, const_rtx expr
, rtx_insn
*insn
)
5804 struct elt_loc_list
*l
;
5808 if (GET_CODE (expr
) != SET
)
5811 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5814 src
= SET_SRC (expr
);
5815 switch (GET_CODE (src
))
5822 if (!REG_P (XEXP (src
, 0)))
5827 if (!REG_P (XEXP (src
, 0)) && !MEM_P (XEXP (src
, 0)))
5834 if (!SCALAR_INT_MODE_P (GET_MODE (src
)) || XEXP (src
, 0) == cfa_base_rtx
)
5837 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0, VOIDmode
);
5838 if (!v
|| !cselib_preserved_value_p (v
))
5841 /* Use canonical V to avoid creating multiple redundant expressions
5842 for different VALUES equivalent to V. */
5843 v
= canonical_cselib_val (v
);
5845 /* Adding a reverse op isn't useful if V already has an always valid
5846 location. Ignore ENTRY_VALUE, while it is always constant, we should
5847 prefer non-ENTRY_VALUE locations whenever possible. */
5848 for (l
= v
->locs
, count
= 0; l
; l
= l
->next
, count
++)
5849 if (CONSTANT_P (l
->loc
)
5850 && (GET_CODE (l
->loc
) != CONST
|| !references_value_p (l
->loc
, 0)))
5852 /* Avoid creating too large locs lists. */
5853 else if (count
== PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE
))
5856 switch (GET_CODE (src
))
5860 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5862 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5866 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5878 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5880 arg
= XEXP (src
, 1);
5881 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5883 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5884 if (arg
== NULL_RTX
)
5886 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5889 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5891 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5892 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5893 breaks a lot of routines during var-tracking. */
5894 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5900 cselib_add_permanent_equiv (v
, ret
, insn
);
5903 /* Add stores (register and memory references) LOC which will be tracked
5904 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5905 CUIP->insn is instruction which the LOC is part of. */
5908 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5910 machine_mode mode
= VOIDmode
, mode2
;
5911 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5912 basic_block bb
= cui
->bb
;
5914 rtx oloc
= loc
, nloc
, src
= NULL
;
5915 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5916 bool track_p
= false;
5918 bool resolve
, preserve
;
5920 if (type
== MO_CLOBBER
)
5927 gcc_assert (loc
!= cfa_base_rtx
);
5928 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5929 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5930 || GET_CODE (expr
) == CLOBBER
)
5932 mo
.type
= MO_CLOBBER
;
5934 if (GET_CODE (expr
) == SET
5935 && SET_DEST (expr
) == loc
5936 && !unsuitable_loc (SET_SRC (expr
))
5937 && find_use_val (loc
, mode
, cui
))
5939 gcc_checking_assert (type
== MO_VAL_SET
);
5940 mo
.u
.loc
= gen_rtx_SET (loc
, SET_SRC (expr
));
5945 if (GET_CODE (expr
) == SET
5946 && SET_DEST (expr
) == loc
5947 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5948 src
= var_lowpart (mode2
, SET_SRC (expr
));
5949 loc
= var_lowpart (mode2
, loc
);
5958 rtx xexpr
= gen_rtx_SET (loc
, src
);
5959 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5961 /* If this is an instruction copying (part of) a parameter
5962 passed by invisible reference to its register location,
5963 pretend it's a SET so that the initial memory location
5964 is discarded, as the parameter register can be reused
5965 for other purposes and we do not track locations based
5966 on generic registers. */
5969 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
5970 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
5971 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
5972 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0)
5983 mo
.insn
= cui
->insn
;
5985 else if (MEM_P (loc
)
5986 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5989 if (MEM_P (loc
) && type
== MO_VAL_SET
5990 && !REG_P (XEXP (loc
, 0))
5991 && !MEM_P (XEXP (loc
, 0)))
5994 machine_mode address_mode
= get_address_mode (mloc
);
5995 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
5999 if (val
&& !cselib_preserved_value_p (val
))
6000 preserve_value (val
);
6003 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
6005 mo
.type
= MO_CLOBBER
;
6006 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
6010 if (GET_CODE (expr
) == SET
6011 && SET_DEST (expr
) == loc
6012 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
6013 src
= var_lowpart (mode2
, SET_SRC (expr
));
6014 loc
= var_lowpart (mode2
, loc
);
6023 rtx xexpr
= gen_rtx_SET (loc
, src
);
6024 if (same_variable_part_p (SET_SRC (xexpr
),
6026 INT_MEM_OFFSET (loc
)))
6033 mo
.insn
= cui
->insn
;
6038 if (type
!= MO_VAL_SET
)
6039 goto log_and_return
;
6041 v
= find_use_val (oloc
, mode
, cui
);
6044 goto log_and_return
;
6046 resolve
= preserve
= !cselib_preserved_value_p (v
);
6048 /* We cannot track values for multiple-part variables, so we track only
6049 locations for tracked parameters passed either by invisible reference
6050 or directly in multiple locations. */
6054 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
6055 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
6056 && TREE_CODE (TREE_TYPE (REG_EXPR (loc
))) != UNION_TYPE
6057 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
6058 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0) != arg_pointer_rtx
)
6059 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc
))) == PARALLEL
6060 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0) > 1)))
6062 /* Although we don't use the value here, it could be used later by the
6063 mere virtue of its existence as the operand of the reverse operation
6064 that gave rise to it (typically extension/truncation). Make sure it
6065 is preserved as required by vt_expand_var_loc_chain. */
6068 goto log_and_return
;
6071 if (loc
== stack_pointer_rtx
6072 && hard_frame_pointer_adjustment
!= -1
6074 cselib_set_value_sp_based (v
);
6076 nloc
= replace_expr_with_values (oloc
);
6080 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
6082 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0, VOIDmode
);
6086 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
6088 if (oval
&& !cselib_preserved_value_p (oval
))
6090 micro_operation moa
;
6092 preserve_value (oval
);
6094 moa
.type
= MO_VAL_USE
;
6095 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
6096 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
6097 moa
.insn
= cui
->insn
;
6099 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6100 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
6101 moa
.type
, dump_file
);
6102 VTI (bb
)->mos
.safe_push (moa
);
6107 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
6109 if (REG_P (SET_SRC (expr
)) || MEM_P (SET_SRC (expr
)))
6110 nloc
= replace_expr_with_values (SET_SRC (expr
));
6114 /* Avoid the mode mismatch between oexpr and expr. */
6115 if (!nloc
&& mode
!= mode2
)
6117 nloc
= SET_SRC (expr
);
6118 gcc_assert (oloc
== SET_DEST (expr
));
6121 if (nloc
&& nloc
!= SET_SRC (mo
.u
.loc
))
6122 oloc
= gen_rtx_SET (oloc
, nloc
);
6125 if (oloc
== SET_DEST (mo
.u
.loc
))
6126 /* No point in duplicating. */
6128 if (!REG_P (SET_SRC (mo
.u
.loc
)))
6134 if (GET_CODE (mo
.u
.loc
) == SET
6135 && oloc
== SET_DEST (mo
.u
.loc
))
6136 /* No point in duplicating. */
6142 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
6144 if (mo
.u
.loc
!= oloc
)
6145 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
6147 /* The loc of a MO_VAL_SET may have various forms:
6149 (concat val dst): dst now holds val
6151 (concat val (set dst src)): dst now holds val, copied from src
6153 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6154 after replacing mems and non-top-level regs with values.
6156 (concat (concat val dstv) (set dst src)): dst now holds val,
6157 copied from src. dstv is a value-based representation of dst, if
6158 it differs from dst. If resolution is needed, src is a REG, and
6159 its mode is the same as that of val.
6161 (concat (concat val (set dstv srcv)) (set dst src)): src
6162 copied to dst, holding val. dstv and srcv are value-based
6163 representations of dst and src, respectively.
6167 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
6168 reverse_op (v
->val_rtx
, expr
, cui
->insn
);
6173 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
6176 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
6179 if (mo
.type
== MO_CLOBBER
)
6180 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
6181 if (mo
.type
== MO_COPY
)
6182 VAL_EXPR_IS_COPIED (loc
) = 1;
6184 mo
.type
= MO_VAL_SET
;
6187 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6188 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
6189 VTI (bb
)->mos
.safe_push (mo
);
6192 /* Arguments to the call. */
6193 static rtx call_arguments
;
6195 /* Compute call_arguments. */
6198 prepare_call_arguments (basic_block bb
, rtx_insn
*insn
)
6201 rtx prev
, cur
, next
;
6202 rtx this_arg
= NULL_RTX
;
6203 tree type
= NULL_TREE
, t
, fndecl
= NULL_TREE
;
6204 tree obj_type_ref
= NULL_TREE
;
6205 CUMULATIVE_ARGS args_so_far_v
;
6206 cumulative_args_t args_so_far
;
6208 memset (&args_so_far_v
, 0, sizeof (args_so_far_v
));
6209 args_so_far
= pack_cumulative_args (&args_so_far_v
);
6210 call
= get_call_rtx_from (insn
);
6213 if (GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
6215 rtx symbol
= XEXP (XEXP (call
, 0), 0);
6216 if (SYMBOL_REF_DECL (symbol
))
6217 fndecl
= SYMBOL_REF_DECL (symbol
);
6219 if (fndecl
== NULL_TREE
)
6220 fndecl
= MEM_EXPR (XEXP (call
, 0));
6222 && TREE_CODE (TREE_TYPE (fndecl
)) != FUNCTION_TYPE
6223 && TREE_CODE (TREE_TYPE (fndecl
)) != METHOD_TYPE
)
6225 if (fndecl
&& TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
6226 type
= TREE_TYPE (fndecl
);
6227 if (fndecl
&& TREE_CODE (fndecl
) != FUNCTION_DECL
)
6229 if (TREE_CODE (fndecl
) == INDIRECT_REF
6230 && TREE_CODE (TREE_OPERAND (fndecl
, 0)) == OBJ_TYPE_REF
)
6231 obj_type_ref
= TREE_OPERAND (fndecl
, 0);
6236 for (t
= TYPE_ARG_TYPES (type
); t
&& t
!= void_list_node
;
6238 if (TREE_CODE (TREE_VALUE (t
)) == REFERENCE_TYPE
6239 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t
))))
6241 if ((t
== NULL
|| t
== void_list_node
) && obj_type_ref
== NULL_TREE
)
6245 int nargs ATTRIBUTE_UNUSED
= list_length (TYPE_ARG_TYPES (type
));
6246 link
= CALL_INSN_FUNCTION_USAGE (insn
);
6247 #ifndef PCC_STATIC_STRUCT_RETURN
6248 if (aggregate_value_p (TREE_TYPE (type
), type
)
6249 && targetm
.calls
.struct_value_rtx (type
, 0) == 0)
6251 tree struct_addr
= build_pointer_type (TREE_TYPE (type
));
6252 machine_mode mode
= TYPE_MODE (struct_addr
);
6254 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6256 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6258 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6260 if (reg
== NULL_RTX
)
6262 for (; link
; link
= XEXP (link
, 1))
6263 if (GET_CODE (XEXP (link
, 0)) == USE
6264 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6266 link
= XEXP (link
, 1);
6273 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6275 if (obj_type_ref
&& TYPE_ARG_TYPES (type
) != void_list_node
)
6278 t
= TYPE_ARG_TYPES (type
);
6279 mode
= TYPE_MODE (TREE_VALUE (t
));
6280 this_arg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6281 TREE_VALUE (t
), true);
6282 if (this_arg
&& !REG_P (this_arg
))
6283 this_arg
= NULL_RTX
;
6284 else if (this_arg
== NULL_RTX
)
6286 for (; link
; link
= XEXP (link
, 1))
6287 if (GET_CODE (XEXP (link
, 0)) == USE
6288 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6290 this_arg
= XEXP (XEXP (link
, 0), 0);
6298 t
= type
? TYPE_ARG_TYPES (type
) : NULL_TREE
;
6300 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
6301 if (GET_CODE (XEXP (link
, 0)) == USE
)
6303 rtx item
= NULL_RTX
;
6304 x
= XEXP (XEXP (link
, 0), 0);
6305 if (GET_MODE (link
) == VOIDmode
6306 || GET_MODE (link
) == BLKmode
6307 || (GET_MODE (link
) != GET_MODE (x
)
6308 && ((GET_MODE_CLASS (GET_MODE (link
)) != MODE_INT
6309 && GET_MODE_CLASS (GET_MODE (link
)) != MODE_PARTIAL_INT
)
6310 || (GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
6311 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_PARTIAL_INT
))))
6312 /* Can't do anything for these, if the original type mode
6313 isn't known or can't be converted. */;
6316 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6317 if (val
&& cselib_preserved_value_p (val
))
6318 item
= val
->val_rtx
;
6319 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
6320 || GET_MODE_CLASS (GET_MODE (x
)) == MODE_PARTIAL_INT
)
6322 machine_mode mode
= GET_MODE (x
);
6324 while ((mode
= GET_MODE_WIDER_MODE (mode
)) != VOIDmode
6325 && GET_MODE_BITSIZE (mode
) <= BITS_PER_WORD
)
6327 rtx reg
= simplify_subreg (mode
, x
, GET_MODE (x
), 0);
6329 if (reg
== NULL_RTX
|| !REG_P (reg
))
6331 val
= cselib_lookup (reg
, mode
, 0, VOIDmode
);
6332 if (val
&& cselib_preserved_value_p (val
))
6334 item
= val
->val_rtx
;
6345 if (!frame_pointer_needed
)
6347 struct adjust_mem_data amd
;
6348 amd
.mem_mode
= VOIDmode
;
6349 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
6350 amd
.side_effects
= NULL
;
6352 mem
= simplify_replace_fn_rtx (mem
, NULL_RTX
, adjust_mems
,
6354 gcc_assert (amd
.side_effects
== NULL_RTX
);
6356 val
= cselib_lookup (mem
, GET_MODE (mem
), 0, VOIDmode
);
6357 if (val
&& cselib_preserved_value_p (val
))
6358 item
= val
->val_rtx
;
6359 else if (GET_MODE_CLASS (GET_MODE (mem
)) != MODE_INT
6360 && GET_MODE_CLASS (GET_MODE (mem
)) != MODE_PARTIAL_INT
)
6362 /* For non-integer stack argument see also if they weren't
6363 initialized by integers. */
6364 machine_mode imode
= int_mode_for_mode (GET_MODE (mem
));
6365 if (imode
!= GET_MODE (mem
) && imode
!= BLKmode
)
6367 val
= cselib_lookup (adjust_address_nv (mem
, imode
, 0),
6368 imode
, 0, VOIDmode
);
6369 if (val
&& cselib_preserved_value_p (val
))
6370 item
= lowpart_subreg (GET_MODE (x
), val
->val_rtx
,
6378 if (GET_MODE (item
) != GET_MODE (link
))
6379 item
= lowpart_subreg (GET_MODE (link
), item
, GET_MODE (item
));
6380 if (GET_MODE (x2
) != GET_MODE (link
))
6381 x2
= lowpart_subreg (GET_MODE (link
), x2
, GET_MODE (x2
));
6382 item
= gen_rtx_CONCAT (GET_MODE (link
), x2
, item
);
6384 = gen_rtx_EXPR_LIST (VOIDmode
, item
, call_arguments
);
6386 if (t
&& t
!= void_list_node
)
6388 tree argtype
= TREE_VALUE (t
);
6389 machine_mode mode
= TYPE_MODE (argtype
);
6391 if (pass_by_reference (&args_so_far_v
, mode
, argtype
, true))
6393 argtype
= build_pointer_type (argtype
);
6394 mode
= TYPE_MODE (argtype
);
6396 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6398 if (TREE_CODE (argtype
) == REFERENCE_TYPE
6399 && INTEGRAL_TYPE_P (TREE_TYPE (argtype
))
6402 && GET_MODE (reg
) == mode
6403 && (GET_MODE_CLASS (mode
) == MODE_INT
6404 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
6406 && REGNO (x
) == REGNO (reg
)
6407 && GET_MODE (x
) == mode
6410 machine_mode indmode
6411 = TYPE_MODE (TREE_TYPE (argtype
));
6412 rtx mem
= gen_rtx_MEM (indmode
, x
);
6413 cselib_val
*val
= cselib_lookup (mem
, indmode
, 0, VOIDmode
);
6414 if (val
&& cselib_preserved_value_p (val
))
6416 item
= gen_rtx_CONCAT (indmode
, mem
, val
->val_rtx
);
6417 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6422 struct elt_loc_list
*l
;
6425 /* Try harder, when passing address of a constant
6426 pool integer it can be easily read back. */
6427 item
= XEXP (item
, 1);
6428 if (GET_CODE (item
) == SUBREG
)
6429 item
= SUBREG_REG (item
);
6430 gcc_assert (GET_CODE (item
) == VALUE
);
6431 val
= CSELIB_VAL_PTR (item
);
6432 for (l
= val
->locs
; l
; l
= l
->next
)
6433 if (GET_CODE (l
->loc
) == SYMBOL_REF
6434 && TREE_CONSTANT_POOL_ADDRESS_P (l
->loc
)
6435 && SYMBOL_REF_DECL (l
->loc
)
6436 && DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
)))
6438 initial
= DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
));
6439 if (tree_fits_shwi_p (initial
))
6441 item
= GEN_INT (tree_to_shwi (initial
));
6442 item
= gen_rtx_CONCAT (indmode
, mem
, item
);
6444 = gen_rtx_EXPR_LIST (VOIDmode
, item
,
6451 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6457 /* Add debug arguments. */
6459 && TREE_CODE (fndecl
) == FUNCTION_DECL
6460 && DECL_HAS_DEBUG_ARGS_P (fndecl
))
6462 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (fndecl
);
6467 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, ¶m
); ix
+= 2)
6470 tree dtemp
= (**debug_args
)[ix
+ 1];
6471 machine_mode mode
= DECL_MODE (dtemp
);
6472 item
= gen_rtx_DEBUG_PARAMETER_REF (mode
, param
);
6473 item
= gen_rtx_CONCAT (mode
, item
, DECL_RTL_KNOWN_SET (dtemp
));
6474 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6480 /* Reverse call_arguments chain. */
6482 for (cur
= call_arguments
; cur
; cur
= next
)
6484 next
= XEXP (cur
, 1);
6485 XEXP (cur
, 1) = prev
;
6488 call_arguments
= prev
;
6490 x
= get_call_rtx_from (insn
);
6493 x
= XEXP (XEXP (x
, 0), 0);
6494 if (GET_CODE (x
) == SYMBOL_REF
)
6495 /* Don't record anything. */;
6496 else if (CONSTANT_P (x
))
6498 x
= gen_rtx_CONCAT (GET_MODE (x
) == VOIDmode
? Pmode
: GET_MODE (x
),
6501 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6505 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6506 if (val
&& cselib_preserved_value_p (val
))
6508 x
= gen_rtx_CONCAT (GET_MODE (x
), pc_rtx
, val
->val_rtx
);
6510 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6517 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref
)));
6518 rtx clobbered
= gen_rtx_MEM (mode
, this_arg
);
6520 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref
));
6522 clobbered
= plus_constant (mode
, clobbered
,
6523 token
* GET_MODE_SIZE (mode
));
6524 clobbered
= gen_rtx_MEM (mode
, clobbered
);
6525 x
= gen_rtx_CONCAT (mode
, gen_rtx_CLOBBER (VOIDmode
, pc_rtx
), clobbered
);
6527 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6531 /* Callback for cselib_record_sets_hook, that records as micro
6532 operations uses and stores in an insn after cselib_record_sets has
6533 analyzed the sets in an insn, but before it modifies the stored
6534 values in the internal tables, unless cselib_record_sets doesn't
6535 call it directly (perhaps because we're not doing cselib in the
6536 first place, in which case sets and n_sets will be 0). */
6539 add_with_sets (rtx_insn
*insn
, struct cselib_set
*sets
, int n_sets
)
6541 basic_block bb
= BLOCK_FOR_INSN (insn
);
6543 struct count_use_info cui
;
6544 micro_operation
*mos
;
6546 cselib_hook_called
= true;
6551 cui
.n_sets
= n_sets
;
6553 n1
= VTI (bb
)->mos
.length ();
6554 cui
.store_p
= false;
6555 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
6556 n2
= VTI (bb
)->mos
.length () - 1;
6557 mos
= VTI (bb
)->mos
.address ();
6559 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6563 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
6565 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
6568 std::swap (mos
[n1
], mos
[n2
]);
6571 n2
= VTI (bb
)->mos
.length () - 1;
6574 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
6576 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
6579 std::swap (mos
[n1
], mos
[n2
]);
6588 mo
.u
.loc
= call_arguments
;
6589 call_arguments
= NULL_RTX
;
6591 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6592 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
6593 VTI (bb
)->mos
.safe_push (mo
);
6596 n1
= VTI (bb
)->mos
.length ();
6597 /* This will record NEXT_INSN (insn), such that we can
6598 insert notes before it without worrying about any
6599 notes that MO_USEs might emit after the insn. */
6601 note_stores (PATTERN (insn
), add_stores
, &cui
);
6602 n2
= VTI (bb
)->mos
.length () - 1;
6603 mos
= VTI (bb
)->mos
.address ();
6605 /* Order the MO_VAL_USEs first (note_stores does nothing
6606 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6607 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6610 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
6612 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
6615 std::swap (mos
[n1
], mos
[n2
]);
6618 n2
= VTI (bb
)->mos
.length () - 1;
6621 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
6623 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
6626 std::swap (mos
[n1
], mos
[n2
]);
6630 static enum var_init_status
6631 find_src_status (dataflow_set
*in
, rtx src
)
6633 tree decl
= NULL_TREE
;
6634 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
6636 if (! flag_var_tracking_uninit
)
6637 status
= VAR_INIT_STATUS_INITIALIZED
;
6639 if (src
&& REG_P (src
))
6640 decl
= var_debug_decl (REG_EXPR (src
));
6641 else if (src
&& MEM_P (src
))
6642 decl
= var_debug_decl (MEM_EXPR (src
));
6645 status
= get_init_value (in
, src
, dv_from_decl (decl
));
6650 /* SRC is the source of an assignment. Use SET to try to find what
6651 was ultimately assigned to SRC. Return that value if known,
6652 otherwise return SRC itself. */
6655 find_src_set_src (dataflow_set
*set
, rtx src
)
6657 tree decl
= NULL_TREE
; /* The variable being copied around. */
6658 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
6660 location_chain nextp
;
6664 if (src
&& REG_P (src
))
6665 decl
= var_debug_decl (REG_EXPR (src
));
6666 else if (src
&& MEM_P (src
))
6667 decl
= var_debug_decl (MEM_EXPR (src
));
6671 decl_or_value dv
= dv_from_decl (decl
);
6673 var
= shared_hash_find (set
->vars
, dv
);
6677 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
6678 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
6679 nextp
= nextp
->next
)
6680 if (rtx_equal_p (nextp
->loc
, src
))
6682 set_src
= nextp
->set_src
;
6692 /* Compute the changes of variable locations in the basic block BB. */
6695 compute_bb_dataflow (basic_block bb
)
6698 micro_operation
*mo
;
6700 dataflow_set old_out
;
6701 dataflow_set
*in
= &VTI (bb
)->in
;
6702 dataflow_set
*out
= &VTI (bb
)->out
;
6704 dataflow_set_init (&old_out
);
6705 dataflow_set_copy (&old_out
, out
);
6706 dataflow_set_copy (out
, in
);
6708 if (MAY_HAVE_DEBUG_INSNS
)
6709 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
6711 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
6713 rtx_insn
*insn
= mo
->insn
;
6718 dataflow_set_clear_at_call (out
);
6723 rtx loc
= mo
->u
.loc
;
6726 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6727 else if (MEM_P (loc
))
6728 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6734 rtx loc
= mo
->u
.loc
;
6738 if (GET_CODE (loc
) == CONCAT
)
6740 val
= XEXP (loc
, 0);
6741 vloc
= XEXP (loc
, 1);
6749 var
= PAT_VAR_LOCATION_DECL (vloc
);
6751 clobber_variable_part (out
, NULL_RTX
,
6752 dv_from_decl (var
), 0, NULL_RTX
);
6755 if (VAL_NEEDS_RESOLUTION (loc
))
6756 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
6757 set_variable_part (out
, val
, dv_from_decl (var
), 0,
6758 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6761 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
6762 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
6763 dv_from_decl (var
), 0,
6764 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6771 rtx loc
= mo
->u
.loc
;
6772 rtx val
, vloc
, uloc
;
6774 vloc
= uloc
= XEXP (loc
, 1);
6775 val
= XEXP (loc
, 0);
6777 if (GET_CODE (val
) == CONCAT
)
6779 uloc
= XEXP (val
, 1);
6780 val
= XEXP (val
, 0);
6783 if (VAL_NEEDS_RESOLUTION (loc
))
6784 val_resolve (out
, val
, vloc
, insn
);
6786 val_store (out
, val
, uloc
, insn
, false);
6788 if (VAL_HOLDS_TRACK_EXPR (loc
))
6790 if (GET_CODE (uloc
) == REG
)
6791 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6793 else if (GET_CODE (uloc
) == MEM
)
6794 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6802 rtx loc
= mo
->u
.loc
;
6803 rtx val
, vloc
, uloc
;
6807 uloc
= XEXP (vloc
, 1);
6808 val
= XEXP (vloc
, 0);
6811 if (GET_CODE (uloc
) == SET
)
6813 dstv
= SET_DEST (uloc
);
6814 srcv
= SET_SRC (uloc
);
6822 if (GET_CODE (val
) == CONCAT
)
6824 dstv
= vloc
= XEXP (val
, 1);
6825 val
= XEXP (val
, 0);
6828 if (GET_CODE (vloc
) == SET
)
6830 srcv
= SET_SRC (vloc
);
6832 gcc_assert (val
!= srcv
);
6833 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
6835 dstv
= vloc
= SET_DEST (vloc
);
6837 if (VAL_NEEDS_RESOLUTION (loc
))
6838 val_resolve (out
, val
, srcv
, insn
);
6840 else if (VAL_NEEDS_RESOLUTION (loc
))
6842 gcc_assert (GET_CODE (uloc
) == SET
6843 && GET_CODE (SET_SRC (uloc
)) == REG
);
6844 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
6847 if (VAL_HOLDS_TRACK_EXPR (loc
))
6849 if (VAL_EXPR_IS_CLOBBERED (loc
))
6852 var_reg_delete (out
, uloc
, true);
6853 else if (MEM_P (uloc
))
6855 gcc_assert (MEM_P (dstv
));
6856 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
6857 var_mem_delete (out
, dstv
, true);
6862 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
6863 rtx src
= NULL
, dst
= uloc
;
6864 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
6866 if (GET_CODE (uloc
) == SET
)
6868 src
= SET_SRC (uloc
);
6869 dst
= SET_DEST (uloc
);
6874 if (flag_var_tracking_uninit
)
6876 status
= find_src_status (in
, src
);
6878 if (status
== VAR_INIT_STATUS_UNKNOWN
)
6879 status
= find_src_status (out
, src
);
6882 src
= find_src_set_src (in
, src
);
6886 var_reg_delete_and_set (out
, dst
, !copied_p
,
6888 else if (MEM_P (dst
))
6890 gcc_assert (MEM_P (dstv
));
6891 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
6892 var_mem_delete_and_set (out
, dstv
, !copied_p
,
6897 else if (REG_P (uloc
))
6898 var_regno_delete (out
, REGNO (uloc
));
6899 else if (MEM_P (uloc
))
6901 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
6902 gcc_checking_assert (dstv
== vloc
);
6904 clobber_overlapping_mems (out
, vloc
);
6907 val_store (out
, val
, dstv
, insn
, true);
6913 rtx loc
= mo
->u
.loc
;
6916 if (GET_CODE (loc
) == SET
)
6918 set_src
= SET_SRC (loc
);
6919 loc
= SET_DEST (loc
);
6923 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6925 else if (MEM_P (loc
))
6926 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6933 rtx loc
= mo
->u
.loc
;
6934 enum var_init_status src_status
;
6937 if (GET_CODE (loc
) == SET
)
6939 set_src
= SET_SRC (loc
);
6940 loc
= SET_DEST (loc
);
6943 if (! flag_var_tracking_uninit
)
6944 src_status
= VAR_INIT_STATUS_INITIALIZED
;
6947 src_status
= find_src_status (in
, set_src
);
6949 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
6950 src_status
= find_src_status (out
, set_src
);
6953 set_src
= find_src_set_src (in
, set_src
);
6956 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
6957 else if (MEM_P (loc
))
6958 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
6964 rtx loc
= mo
->u
.loc
;
6967 var_reg_delete (out
, loc
, false);
6968 else if (MEM_P (loc
))
6969 var_mem_delete (out
, loc
, false);
6975 rtx loc
= mo
->u
.loc
;
6978 var_reg_delete (out
, loc
, true);
6979 else if (MEM_P (loc
))
6980 var_mem_delete (out
, loc
, true);
6985 out
->stack_adjust
+= mo
->u
.adjust
;
6990 if (MAY_HAVE_DEBUG_INSNS
)
6992 delete local_get_addr_cache
;
6993 local_get_addr_cache
= NULL
;
6995 dataflow_set_equiv_regs (out
);
6996 shared_hash_htab (out
->vars
)
6997 ->traverse
<dataflow_set
*, canonicalize_values_mark
> (out
);
6998 shared_hash_htab (out
->vars
)
6999 ->traverse
<dataflow_set
*, canonicalize_values_star
> (out
);
7001 shared_hash_htab (out
->vars
)
7002 ->traverse
<dataflow_set
*, canonicalize_loc_order_check
> (out
);
7005 changed
= dataflow_set_different (&old_out
, out
);
7006 dataflow_set_destroy (&old_out
);
7010 /* Find the locations of variables in the whole function. */
7013 vt_find_locations (void)
7015 bb_heap_t
*worklist
= new bb_heap_t (LONG_MIN
);
7016 bb_heap_t
*pending
= new bb_heap_t (LONG_MIN
);
7017 sbitmap visited
, in_worklist
, in_pending
;
7024 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
7025 bool success
= true;
7027 timevar_push (TV_VAR_TRACKING_DATAFLOW
);
7028 /* Compute reverse completion order of depth first search of the CFG
7029 so that the data-flow runs faster. */
7030 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
7031 bb_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
7032 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
7033 for (i
= 0; i
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; i
++)
7034 bb_order
[rc_order
[i
]] = i
;
7037 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7038 in_worklist
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7039 in_pending
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7040 bitmap_clear (in_worklist
);
7042 FOR_EACH_BB_FN (bb
, cfun
)
7043 pending
->insert (bb_order
[bb
->index
], bb
);
7044 bitmap_ones (in_pending
);
7046 while (success
&& !pending
->empty ())
7048 std::swap (worklist
, pending
);
7049 std::swap (in_worklist
, in_pending
);
7051 bitmap_clear (visited
);
7053 while (!worklist
->empty ())
7055 bb
= worklist
->extract_min ();
7056 bitmap_clear_bit (in_worklist
, bb
->index
);
7057 gcc_assert (!bitmap_bit_p (visited
, bb
->index
));
7058 if (!bitmap_bit_p (visited
, bb
->index
))
7062 int oldinsz
, oldoutsz
;
7064 bitmap_set_bit (visited
, bb
->index
);
7066 if (VTI (bb
)->in
.vars
)
7069 -= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7070 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7071 oldinsz
= shared_hash_htab (VTI (bb
)->in
.vars
)->elements ();
7073 = shared_hash_htab (VTI (bb
)->out
.vars
)->elements ();
7076 oldinsz
= oldoutsz
= 0;
7078 if (MAY_HAVE_DEBUG_INSNS
)
7080 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
7081 bool first
= true, adjust
= false;
7083 /* Calculate the IN set as the intersection of
7084 predecessor OUT sets. */
7086 dataflow_set_clear (in
);
7087 dst_can_be_shared
= true;
7089 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7090 if (!VTI (e
->src
)->flooded
)
7091 gcc_assert (bb_order
[bb
->index
]
7092 <= bb_order
[e
->src
->index
]);
7095 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
7096 first_out
= &VTI (e
->src
)->out
;
7101 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
7107 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
7109 /* Merge and merge_adjust should keep entries in
7111 shared_hash_htab (in
->vars
)
7112 ->traverse
<dataflow_set
*,
7113 canonicalize_loc_order_check
> (in
);
7115 if (dst_can_be_shared
)
7117 shared_hash_destroy (in
->vars
);
7118 in
->vars
= shared_hash_copy (first_out
->vars
);
7122 VTI (bb
)->flooded
= true;
7126 /* Calculate the IN set as union of predecessor OUT sets. */
7127 dataflow_set_clear (&VTI (bb
)->in
);
7128 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7129 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
7132 changed
= compute_bb_dataflow (bb
);
7133 htabsz
+= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7134 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7136 if (htabmax
&& htabsz
> htabmax
)
7138 if (MAY_HAVE_DEBUG_INSNS
)
7139 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7140 "variable tracking size limit exceeded with "
7141 "-fvar-tracking-assignments, retrying without");
7143 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7144 "variable tracking size limit exceeded");
7151 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7153 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7156 if (bitmap_bit_p (visited
, e
->dest
->index
))
7158 if (!bitmap_bit_p (in_pending
, e
->dest
->index
))
7160 /* Send E->DEST to next round. */
7161 bitmap_set_bit (in_pending
, e
->dest
->index
);
7162 pending
->insert (bb_order
[e
->dest
->index
],
7166 else if (!bitmap_bit_p (in_worklist
, e
->dest
->index
))
7168 /* Add E->DEST to current round. */
7169 bitmap_set_bit (in_worklist
, e
->dest
->index
);
7170 worklist
->insert (bb_order
[e
->dest
->index
],
7178 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7180 (int)shared_hash_htab (VTI (bb
)->in
.vars
)->size (),
7182 (int)shared_hash_htab (VTI (bb
)->out
.vars
)->size (),
7184 (int)worklist
->nodes (), (int)pending
->nodes (),
7187 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7189 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
7190 dump_dataflow_set (&VTI (bb
)->in
);
7191 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
7192 dump_dataflow_set (&VTI (bb
)->out
);
7198 if (success
&& MAY_HAVE_DEBUG_INSNS
)
7199 FOR_EACH_BB_FN (bb
, cfun
)
7200 gcc_assert (VTI (bb
)->flooded
);
7205 sbitmap_free (visited
);
7206 sbitmap_free (in_worklist
);
7207 sbitmap_free (in_pending
);
7209 timevar_pop (TV_VAR_TRACKING_DATAFLOW
);
7213 /* Print the content of the LIST to dump file. */
7216 dump_attrs_list (attrs list
)
7218 for (; list
; list
= list
->next
)
7220 if (dv_is_decl_p (list
->dv
))
7221 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
7223 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
7224 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
7226 fprintf (dump_file
, "\n");
7229 /* Print the information about variable *SLOT to dump file. */
7232 dump_var_tracking_slot (variable_def
**slot
, void *data ATTRIBUTE_UNUSED
)
7234 variable var
= *slot
;
7238 /* Continue traversing the hash table. */
7242 /* Print the information about variable VAR to dump file. */
7245 dump_var (variable var
)
7248 location_chain node
;
7250 if (dv_is_decl_p (var
->dv
))
7252 const_tree decl
= dv_as_decl (var
->dv
);
7254 if (DECL_NAME (decl
))
7256 fprintf (dump_file
, " name: %s",
7257 IDENTIFIER_POINTER (DECL_NAME (decl
)));
7258 if (dump_flags
& TDF_UID
)
7259 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
7261 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7262 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
7264 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
7265 fprintf (dump_file
, "\n");
7269 fputc (' ', dump_file
);
7270 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
7273 for (i
= 0; i
< var
->n_var_parts
; i
++)
7275 fprintf (dump_file
, " offset %ld\n",
7276 (long)(var
->onepart
? 0 : VAR_PART_OFFSET (var
, i
)));
7277 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
7279 fprintf (dump_file
, " ");
7280 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
7281 fprintf (dump_file
, "[uninit]");
7282 print_rtl_single (dump_file
, node
->loc
);
7287 /* Print the information about variables from hash table VARS to dump file. */
7290 dump_vars (variable_table_type
*vars
)
7292 if (vars
->elements () > 0)
7294 fprintf (dump_file
, "Variables:\n");
7295 vars
->traverse
<void *, dump_var_tracking_slot
> (NULL
);
7299 /* Print the dataflow set SET to dump file. */
7302 dump_dataflow_set (dataflow_set
*set
)
7306 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
7308 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7312 fprintf (dump_file
, "Reg %d:", i
);
7313 dump_attrs_list (set
->regs
[i
]);
7316 dump_vars (shared_hash_htab (set
->vars
));
7317 fprintf (dump_file
, "\n");
7320 /* Print the IN and OUT sets for each basic block to dump file. */
7323 dump_dataflow_sets (void)
7327 FOR_EACH_BB_FN (bb
, cfun
)
7329 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
7330 fprintf (dump_file
, "IN:\n");
7331 dump_dataflow_set (&VTI (bb
)->in
);
7332 fprintf (dump_file
, "OUT:\n");
7333 dump_dataflow_set (&VTI (bb
)->out
);
7337 /* Return the variable for DV in dropped_values, inserting one if
7338 requested with INSERT. */
7340 static inline variable
7341 variable_from_dropped (decl_or_value dv
, enum insert_option insert
)
7343 variable_def
**slot
;
7345 onepart_enum_t onepart
;
7347 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
), insert
);
7355 gcc_checking_assert (insert
== INSERT
);
7357 onepart
= dv_onepart_p (dv
);
7359 gcc_checking_assert (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
);
7361 empty_var
= onepart_pool (onepart
).allocate ();
7363 empty_var
->refcount
= 1;
7364 empty_var
->n_var_parts
= 0;
7365 empty_var
->onepart
= onepart
;
7366 empty_var
->in_changed_variables
= false;
7367 empty_var
->var_part
[0].loc_chain
= NULL
;
7368 empty_var
->var_part
[0].cur_loc
= NULL
;
7369 VAR_LOC_1PAUX (empty_var
) = NULL
;
7370 set_dv_changed (dv
, true);
7377 /* Recover the one-part aux from dropped_values. */
7379 static struct onepart_aux
*
7380 recover_dropped_1paux (variable var
)
7384 gcc_checking_assert (var
->onepart
);
7386 if (VAR_LOC_1PAUX (var
))
7387 return VAR_LOC_1PAUX (var
);
7389 if (var
->onepart
== ONEPART_VDECL
)
7392 dvar
= variable_from_dropped (var
->dv
, NO_INSERT
);
7397 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (dvar
);
7398 VAR_LOC_1PAUX (dvar
) = NULL
;
7400 return VAR_LOC_1PAUX (var
);
7403 /* Add variable VAR to the hash table of changed variables and
7404 if it has no locations delete it from SET's hash table. */
7407 variable_was_changed (variable var
, dataflow_set
*set
)
7409 hashval_t hash
= dv_htab_hash (var
->dv
);
7413 variable_def
**slot
;
7415 /* Remember this decl or VALUE has been added to changed_variables. */
7416 set_dv_changed (var
->dv
, true);
7418 slot
= changed_variables
->find_slot_with_hash (var
->dv
, hash
, INSERT
);
7422 variable old_var
= *slot
;
7423 gcc_assert (old_var
->in_changed_variables
);
7424 old_var
->in_changed_variables
= false;
7425 if (var
!= old_var
&& var
->onepart
)
7427 /* Restore the auxiliary info from an empty variable
7428 previously created for changed_variables, so it is
7430 gcc_checking_assert (!VAR_LOC_1PAUX (var
));
7431 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (old_var
);
7432 VAR_LOC_1PAUX (old_var
) = NULL
;
7434 variable_htab_free (*slot
);
7437 if (set
&& var
->n_var_parts
== 0)
7439 onepart_enum_t onepart
= var
->onepart
;
7440 variable empty_var
= NULL
;
7441 variable_def
**dslot
= NULL
;
7443 if (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
)
7445 dslot
= dropped_values
->find_slot_with_hash (var
->dv
,
7446 dv_htab_hash (var
->dv
),
7452 gcc_checking_assert (!empty_var
->in_changed_variables
);
7453 if (!VAR_LOC_1PAUX (var
))
7455 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (empty_var
);
7456 VAR_LOC_1PAUX (empty_var
) = NULL
;
7459 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
7465 empty_var
= onepart_pool (onepart
).allocate ();
7466 empty_var
->dv
= var
->dv
;
7467 empty_var
->refcount
= 1;
7468 empty_var
->n_var_parts
= 0;
7469 empty_var
->onepart
= onepart
;
7472 empty_var
->refcount
++;
7477 empty_var
->refcount
++;
7478 empty_var
->in_changed_variables
= true;
7482 empty_var
->var_part
[0].loc_chain
= NULL
;
7483 empty_var
->var_part
[0].cur_loc
= NULL
;
7484 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (var
);
7485 VAR_LOC_1PAUX (var
) = NULL
;
7491 if (var
->onepart
&& !VAR_LOC_1PAUX (var
))
7492 recover_dropped_1paux (var
);
7494 var
->in_changed_variables
= true;
7501 if (var
->n_var_parts
== 0)
7503 variable_def
**slot
;
7506 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
7509 if (shared_hash_shared (set
->vars
))
7510 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
7512 shared_hash_htab (set
->vars
)->clear_slot (slot
);
7518 /* Look for the index in VAR->var_part corresponding to OFFSET.
7519 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7520 referenced int will be set to the index that the part has or should
7521 have, if it should be inserted. */
7524 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
7525 int *insertion_point
)
7534 if (insertion_point
)
7535 *insertion_point
= 0;
7537 return var
->n_var_parts
- 1;
7540 /* Find the location part. */
7542 high
= var
->n_var_parts
;
7545 pos
= (low
+ high
) / 2;
7546 if (VAR_PART_OFFSET (var
, pos
) < offset
)
7553 if (insertion_point
)
7554 *insertion_point
= pos
;
7556 if (pos
< var
->n_var_parts
&& VAR_PART_OFFSET (var
, pos
) == offset
)
7562 static variable_def
**
7563 set_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7564 decl_or_value dv
, HOST_WIDE_INT offset
,
7565 enum var_init_status initialized
, rtx set_src
)
7568 location_chain node
, next
;
7569 location_chain
*nextp
;
7571 onepart_enum_t onepart
;
7576 onepart
= var
->onepart
;
7578 onepart
= dv_onepart_p (dv
);
7580 gcc_checking_assert (offset
== 0 || !onepart
);
7581 gcc_checking_assert (loc
!= dv_as_opaque (dv
));
7583 if (! flag_var_tracking_uninit
)
7584 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7588 /* Create new variable information. */
7589 var
= onepart_pool (onepart
).allocate ();
7592 var
->n_var_parts
= 1;
7593 var
->onepart
= onepart
;
7594 var
->in_changed_variables
= false;
7596 VAR_LOC_1PAUX (var
) = NULL
;
7598 VAR_PART_OFFSET (var
, 0) = offset
;
7599 var
->var_part
[0].loc_chain
= NULL
;
7600 var
->var_part
[0].cur_loc
= NULL
;
7603 nextp
= &var
->var_part
[0].loc_chain
;
7609 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
7613 if (GET_CODE (loc
) == VALUE
)
7615 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7616 nextp
= &node
->next
)
7617 if (GET_CODE (node
->loc
) == VALUE
)
7619 if (node
->loc
== loc
)
7624 if (canon_value_cmp (node
->loc
, loc
))
7632 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
7640 else if (REG_P (loc
))
7642 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7643 nextp
= &node
->next
)
7644 if (REG_P (node
->loc
))
7646 if (REGNO (node
->loc
) < REGNO (loc
))
7650 if (REGNO (node
->loc
) == REGNO (loc
))
7663 else if (MEM_P (loc
))
7665 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7666 nextp
= &node
->next
)
7667 if (REG_P (node
->loc
))
7669 else if (MEM_P (node
->loc
))
7671 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
7683 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7684 nextp
= &node
->next
)
7685 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
7693 if (shared_var_p (var
, set
->vars
))
7695 slot
= unshare_variable (set
, slot
, var
, initialized
);
7697 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
7698 nextp
= &(*nextp
)->next
)
7700 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
7707 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
7709 pos
= find_variable_location_part (var
, offset
, &inspos
);
7713 node
= var
->var_part
[pos
].loc_chain
;
7716 && ((REG_P (node
->loc
) && REG_P (loc
)
7717 && REGNO (node
->loc
) == REGNO (loc
))
7718 || rtx_equal_p (node
->loc
, loc
)))
7720 /* LOC is in the beginning of the chain so we have nothing
7722 if (node
->init
< initialized
)
7723 node
->init
= initialized
;
7724 if (set_src
!= NULL
)
7725 node
->set_src
= set_src
;
7731 /* We have to make a copy of a shared variable. */
7732 if (shared_var_p (var
, set
->vars
))
7734 slot
= unshare_variable (set
, slot
, var
, initialized
);
7741 /* We have not found the location part, new one will be created. */
7743 /* We have to make a copy of the shared variable. */
7744 if (shared_var_p (var
, set
->vars
))
7746 slot
= unshare_variable (set
, slot
, var
, initialized
);
7750 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7751 thus there are at most MAX_VAR_PARTS different offsets. */
7752 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
7753 && (!var
->n_var_parts
|| !onepart
));
7755 /* We have to move the elements of array starting at index
7756 inspos to the next position. */
7757 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
7758 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
7761 gcc_checking_assert (!onepart
);
7762 VAR_PART_OFFSET (var
, pos
) = offset
;
7763 var
->var_part
[pos
].loc_chain
= NULL
;
7764 var
->var_part
[pos
].cur_loc
= NULL
;
7767 /* Delete the location from the list. */
7768 nextp
= &var
->var_part
[pos
].loc_chain
;
7769 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
7772 if ((REG_P (node
->loc
) && REG_P (loc
)
7773 && REGNO (node
->loc
) == REGNO (loc
))
7774 || rtx_equal_p (node
->loc
, loc
))
7776 /* Save these values, to assign to the new node, before
7777 deleting this one. */
7778 if (node
->init
> initialized
)
7779 initialized
= node
->init
;
7780 if (node
->set_src
!= NULL
&& set_src
== NULL
)
7781 set_src
= node
->set_src
;
7782 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7783 var
->var_part
[pos
].cur_loc
= NULL
;
7789 nextp
= &node
->next
;
7792 nextp
= &var
->var_part
[pos
].loc_chain
;
7795 /* Add the location to the beginning. */
7796 node
= new location_chain_def
;
7798 node
->init
= initialized
;
7799 node
->set_src
= set_src
;
7800 node
->next
= *nextp
;
7803 /* If no location was emitted do so. */
7804 if (var
->var_part
[pos
].cur_loc
== NULL
)
7805 variable_was_changed (var
, set
);
7810 /* Set the part of variable's location in the dataflow set SET. The
7811 variable part is specified by variable's declaration in DV and
7812 offset OFFSET and the part's location by LOC. IOPT should be
7813 NO_INSERT if the variable is known to be in SET already and the
7814 variable hash table must not be resized, and INSERT otherwise. */
7817 set_variable_part (dataflow_set
*set
, rtx loc
,
7818 decl_or_value dv
, HOST_WIDE_INT offset
,
7819 enum var_init_status initialized
, rtx set_src
,
7820 enum insert_option iopt
)
7822 variable_def
**slot
;
7824 if (iopt
== NO_INSERT
)
7825 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7828 slot
= shared_hash_find_slot (set
->vars
, dv
);
7830 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
7832 set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
7835 /* Remove all recorded register locations for the given variable part
7836 from dataflow set SET, except for those that are identical to loc.
7837 The variable part is specified by variable's declaration or value
7838 DV and offset OFFSET. */
7840 static variable_def
**
7841 clobber_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7842 HOST_WIDE_INT offset
, rtx set_src
)
7844 variable var
= *slot
;
7845 int pos
= find_variable_location_part (var
, offset
, NULL
);
7849 location_chain node
, next
;
7851 /* Remove the register locations from the dataflow set. */
7852 next
= var
->var_part
[pos
].loc_chain
;
7853 for (node
= next
; node
; node
= next
)
7856 if (node
->loc
!= loc
7857 && (!flag_var_tracking_uninit
7860 || !rtx_equal_p (set_src
, node
->set_src
)))
7862 if (REG_P (node
->loc
))
7867 /* Remove the variable part from the register's
7868 list, but preserve any other variable parts
7869 that might be regarded as live in that same
7871 anextp
= &set
->regs
[REGNO (node
->loc
)];
7872 for (anode
= *anextp
; anode
; anode
= anext
)
7874 anext
= anode
->next
;
7875 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
7876 && anode
->offset
== offset
)
7882 anextp
= &anode
->next
;
7886 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
7894 /* Remove all recorded register locations for the given variable part
7895 from dataflow set SET, except for those that are identical to loc.
7896 The variable part is specified by variable's declaration or value
7897 DV and offset OFFSET. */
7900 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7901 HOST_WIDE_INT offset
, rtx set_src
)
7903 variable_def
**slot
;
7905 if (!dv_as_opaque (dv
)
7906 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
7909 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7913 clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
7916 /* Delete the part of variable's location from dataflow set SET. The
7917 variable part is specified by its SET->vars slot SLOT and offset
7918 OFFSET and the part's location by LOC. */
7920 static variable_def
**
7921 delete_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7922 HOST_WIDE_INT offset
)
7924 variable var
= *slot
;
7925 int pos
= find_variable_location_part (var
, offset
, NULL
);
7929 location_chain node
, next
;
7930 location_chain
*nextp
;
7934 if (shared_var_p (var
, set
->vars
))
7936 /* If the variable contains the location part we have to
7937 make a copy of the variable. */
7938 for (node
= var
->var_part
[pos
].loc_chain
; node
;
7941 if ((REG_P (node
->loc
) && REG_P (loc
)
7942 && REGNO (node
->loc
) == REGNO (loc
))
7943 || rtx_equal_p (node
->loc
, loc
))
7945 slot
= unshare_variable (set
, slot
, var
,
7946 VAR_INIT_STATUS_UNKNOWN
);
7953 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7954 cur_loc
= VAR_LOC_FROM (var
);
7956 cur_loc
= var
->var_part
[pos
].cur_loc
;
7958 /* Delete the location part. */
7960 nextp
= &var
->var_part
[pos
].loc_chain
;
7961 for (node
= *nextp
; node
; node
= next
)
7964 if ((REG_P (node
->loc
) && REG_P (loc
)
7965 && REGNO (node
->loc
) == REGNO (loc
))
7966 || rtx_equal_p (node
->loc
, loc
))
7968 /* If we have deleted the location which was last emitted
7969 we have to emit new location so add the variable to set
7970 of changed variables. */
7971 if (cur_loc
== node
->loc
)
7974 var
->var_part
[pos
].cur_loc
= NULL
;
7975 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7976 VAR_LOC_FROM (var
) = NULL
;
7983 nextp
= &node
->next
;
7986 if (var
->var_part
[pos
].loc_chain
== NULL
)
7990 while (pos
< var
->n_var_parts
)
7992 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
7997 variable_was_changed (var
, set
);
8003 /* Delete the part of variable's location from dataflow set SET. The
8004 variable part is specified by variable's declaration or value DV
8005 and offset OFFSET and the part's location by LOC. */
8008 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
8009 HOST_WIDE_INT offset
)
8011 variable_def
**slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
8015 delete_slot_part (set
, loc
, slot
, offset
);
8019 /* Structure for passing some other parameters to function
8020 vt_expand_loc_callback. */
8021 struct expand_loc_callback_data
8023 /* The variables and values active at this point. */
8024 variable_table_type
*vars
;
8026 /* Stack of values and debug_exprs under expansion, and their
8028 auto_vec
<rtx
, 4> expanding
;
8030 /* Stack of values and debug_exprs whose expansion hit recursion
8031 cycles. They will have VALUE_RECURSED_INTO marked when added to
8032 this list. This flag will be cleared if any of its dependencies
8033 resolves to a valid location. So, if the flag remains set at the
8034 end of the search, we know no valid location for this one can
8036 auto_vec
<rtx
, 4> pending
;
8038 /* The maximum depth among the sub-expressions under expansion.
8039 Zero indicates no expansion so far. */
8043 /* Allocate the one-part auxiliary data structure for VAR, with enough
8044 room for COUNT dependencies. */
8047 loc_exp_dep_alloc (variable var
, int count
)
8051 gcc_checking_assert (var
->onepart
);
8053 /* We can be called with COUNT == 0 to allocate the data structure
8054 without any dependencies, e.g. for the backlinks only. However,
8055 if we are specifying a COUNT, then the dependency list must have
8056 been emptied before. It would be possible to adjust pointers or
8057 force it empty here, but this is better done at an earlier point
8058 in the algorithm, so we instead leave an assertion to catch
8060 gcc_checking_assert (!count
8061 || VAR_LOC_DEP_VEC (var
) == NULL
8062 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8064 if (VAR_LOC_1PAUX (var
) && VAR_LOC_DEP_VEC (var
)->space (count
))
8067 allocsize
= offsetof (struct onepart_aux
, deps
)
8068 + vec
<loc_exp_dep
, va_heap
, vl_embed
>::embedded_size (count
);
8070 if (VAR_LOC_1PAUX (var
))
8072 VAR_LOC_1PAUX (var
) = XRESIZEVAR (struct onepart_aux
,
8073 VAR_LOC_1PAUX (var
), allocsize
);
8074 /* If the reallocation moves the onepaux structure, the
8075 back-pointer to BACKLINKS in the first list member will still
8076 point to its old location. Adjust it. */
8077 if (VAR_LOC_DEP_LST (var
))
8078 VAR_LOC_DEP_LST (var
)->pprev
= VAR_LOC_DEP_LSTP (var
);
8082 VAR_LOC_1PAUX (var
) = XNEWVAR (struct onepart_aux
, allocsize
);
8083 *VAR_LOC_DEP_LSTP (var
) = NULL
;
8084 VAR_LOC_FROM (var
) = NULL
;
8085 VAR_LOC_DEPTH (var
).complexity
= 0;
8086 VAR_LOC_DEPTH (var
).entryvals
= 0;
8088 VAR_LOC_DEP_VEC (var
)->embedded_init (count
);
8091 /* Remove all entries from the vector of active dependencies of VAR,
8092 removing them from the back-links lists too. */
8095 loc_exp_dep_clear (variable var
)
8097 while (VAR_LOC_DEP_VEC (var
) && !VAR_LOC_DEP_VEC (var
)->is_empty ())
8099 loc_exp_dep
*led
= &VAR_LOC_DEP_VEC (var
)->last ();
8101 led
->next
->pprev
= led
->pprev
;
8103 *led
->pprev
= led
->next
;
8104 VAR_LOC_DEP_VEC (var
)->pop ();
8108 /* Insert an active dependency from VAR on X to the vector of
8109 dependencies, and add the corresponding back-link to X's list of
8110 back-links in VARS. */
8113 loc_exp_insert_dep (variable var
, rtx x
, variable_table_type
*vars
)
8119 dv
= dv_from_rtx (x
);
8121 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8122 an additional look up? */
8123 xvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8127 xvar
= variable_from_dropped (dv
, NO_INSERT
);
8128 gcc_checking_assert (xvar
);
8131 /* No point in adding the same backlink more than once. This may
8132 arise if say the same value appears in two complex expressions in
8133 the same loc_list, or even more than once in a single
8135 if (VAR_LOC_DEP_LST (xvar
) && VAR_LOC_DEP_LST (xvar
)->dv
== var
->dv
)
8138 if (var
->onepart
== NOT_ONEPART
)
8139 led
= new loc_exp_dep
;
8143 memset (&empty
, 0, sizeof (empty
));
8144 VAR_LOC_DEP_VEC (var
)->quick_push (empty
);
8145 led
= &VAR_LOC_DEP_VEC (var
)->last ();
8150 loc_exp_dep_alloc (xvar
, 0);
8151 led
->pprev
= VAR_LOC_DEP_LSTP (xvar
);
8152 led
->next
= *led
->pprev
;
8154 led
->next
->pprev
= &led
->next
;
8158 /* Create active dependencies of VAR on COUNT values starting at
8159 VALUE, and corresponding back-links to the entries in VARS. Return
8160 true if we found any pending-recursion results. */
8163 loc_exp_dep_set (variable var
, rtx result
, rtx
*value
, int count
,
8164 variable_table_type
*vars
)
8166 bool pending_recursion
= false;
8168 gcc_checking_assert (VAR_LOC_DEP_VEC (var
) == NULL
8169 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8171 /* Set up all dependencies from last_child (as set up at the end of
8172 the loop above) to the end. */
8173 loc_exp_dep_alloc (var
, count
);
8179 if (!pending_recursion
)
8180 pending_recursion
= !result
&& VALUE_RECURSED_INTO (x
);
8182 loc_exp_insert_dep (var
, x
, vars
);
8185 return pending_recursion
;
8188 /* Notify the back-links of IVAR that are pending recursion that we
8189 have found a non-NIL value for it, so they are cleared for another
8190 attempt to compute a current location. */
8193 notify_dependents_of_resolved_value (variable ivar
, variable_table_type
*vars
)
8195 loc_exp_dep
*led
, *next
;
8197 for (led
= VAR_LOC_DEP_LST (ivar
); led
; led
= next
)
8199 decl_or_value dv
= led
->dv
;
8204 if (dv_is_value_p (dv
))
8206 rtx value
= dv_as_value (dv
);
8208 /* If we have already resolved it, leave it alone. */
8209 if (!VALUE_RECURSED_INTO (value
))
8212 /* Check that VALUE_RECURSED_INTO, true from the test above,
8213 implies NO_LOC_P. */
8214 gcc_checking_assert (NO_LOC_P (value
));
8216 /* We won't notify variables that are being expanded,
8217 because their dependency list is cleared before
8219 NO_LOC_P (value
) = false;
8220 VALUE_RECURSED_INTO (value
) = false;
8222 gcc_checking_assert (dv_changed_p (dv
));
8226 gcc_checking_assert (dv_onepart_p (dv
) != NOT_ONEPART
);
8227 if (!dv_changed_p (dv
))
8231 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8234 var
= variable_from_dropped (dv
, NO_INSERT
);
8237 notify_dependents_of_resolved_value (var
, vars
);
8240 next
->pprev
= led
->pprev
;
8248 static rtx
vt_expand_loc_callback (rtx x
, bitmap regs
,
8249 int max_depth
, void *data
);
8251 /* Return the combined depth, when one sub-expression evaluated to
8252 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8254 static inline expand_depth
8255 update_depth (expand_depth saved_depth
, expand_depth best_depth
)
8257 /* If we didn't find anything, stick with what we had. */
8258 if (!best_depth
.complexity
)
8261 /* If we found hadn't found anything, use the depth of the current
8262 expression. Do NOT add one extra level, we want to compute the
8263 maximum depth among sub-expressions. We'll increment it later,
8265 if (!saved_depth
.complexity
)
8268 /* Combine the entryval count so that regardless of which one we
8269 return, the entryval count is accurate. */
8270 best_depth
.entryvals
= saved_depth
.entryvals
8271 = best_depth
.entryvals
+ saved_depth
.entryvals
;
8273 if (saved_depth
.complexity
< best_depth
.complexity
)
8279 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8280 DATA for cselib expand callback. If PENDRECP is given, indicate in
8281 it whether any sub-expression couldn't be fully evaluated because
8282 it is pending recursion resolution. */
8285 vt_expand_var_loc_chain (variable var
, bitmap regs
, void *data
, bool *pendrecp
)
8287 struct expand_loc_callback_data
*elcd
8288 = (struct expand_loc_callback_data
*) data
;
8289 location_chain loc
, next
;
8291 int first_child
, result_first_child
, last_child
;
8292 bool pending_recursion
;
8293 rtx loc_from
= NULL
;
8294 struct elt_loc_list
*cloc
= NULL
;
8295 expand_depth depth
= { 0, 0 }, saved_depth
= elcd
->depth
;
8296 int wanted_entryvals
, found_entryvals
= 0;
8298 /* Clear all backlinks pointing at this, so that we're not notified
8299 while we're active. */
8300 loc_exp_dep_clear (var
);
8303 if (var
->onepart
== ONEPART_VALUE
)
8305 cselib_val
*val
= CSELIB_VAL_PTR (dv_as_value (var
->dv
));
8307 gcc_checking_assert (cselib_preserved_value_p (val
));
8312 first_child
= result_first_child
= last_child
8313 = elcd
->expanding
.length ();
8315 wanted_entryvals
= found_entryvals
;
8317 /* Attempt to expand each available location in turn. */
8318 for (next
= loc
= var
->n_var_parts
? var
->var_part
[0].loc_chain
: NULL
;
8319 loc
|| cloc
; loc
= next
)
8321 result_first_child
= last_child
;
8325 loc_from
= cloc
->loc
;
8328 if (unsuitable_loc (loc_from
))
8333 loc_from
= loc
->loc
;
8337 gcc_checking_assert (!unsuitable_loc (loc_from
));
8339 elcd
->depth
.complexity
= elcd
->depth
.entryvals
= 0;
8340 result
= cselib_expand_value_rtx_cb (loc_from
, regs
, EXPR_DEPTH
,
8341 vt_expand_loc_callback
, data
);
8342 last_child
= elcd
->expanding
.length ();
8346 depth
= elcd
->depth
;
8348 gcc_checking_assert (depth
.complexity
8349 || result_first_child
== last_child
);
8351 if (last_child
- result_first_child
!= 1)
8353 if (!depth
.complexity
&& GET_CODE (result
) == ENTRY_VALUE
)
8358 if (depth
.complexity
<= EXPR_USE_DEPTH
)
8360 if (depth
.entryvals
<= wanted_entryvals
)
8362 else if (!found_entryvals
|| depth
.entryvals
< found_entryvals
)
8363 found_entryvals
= depth
.entryvals
;
8369 /* Set it up in case we leave the loop. */
8370 depth
.complexity
= depth
.entryvals
= 0;
8372 result_first_child
= first_child
;
8375 if (!loc_from
&& wanted_entryvals
< found_entryvals
)
8377 /* We found entries with ENTRY_VALUEs and skipped them. Since
8378 we could not find any expansions without ENTRY_VALUEs, but we
8379 found at least one with them, go back and get an entry with
8380 the minimum number ENTRY_VALUE count that we found. We could
8381 avoid looping, but since each sub-loc is already resolved,
8382 the re-expansion should be trivial. ??? Should we record all
8383 attempted locs as dependencies, so that we retry the
8384 expansion should any of them change, in the hope it can give
8385 us a new entry without an ENTRY_VALUE? */
8386 elcd
->expanding
.truncate (first_child
);
8390 /* Register all encountered dependencies as active. */
8391 pending_recursion
= loc_exp_dep_set
8392 (var
, result
, elcd
->expanding
.address () + result_first_child
,
8393 last_child
- result_first_child
, elcd
->vars
);
8395 elcd
->expanding
.truncate (first_child
);
8397 /* Record where the expansion came from. */
8398 gcc_checking_assert (!result
|| !pending_recursion
);
8399 VAR_LOC_FROM (var
) = loc_from
;
8400 VAR_LOC_DEPTH (var
) = depth
;
8402 gcc_checking_assert (!depth
.complexity
== !result
);
8404 elcd
->depth
= update_depth (saved_depth
, depth
);
8406 /* Indicate whether any of the dependencies are pending recursion
8409 *pendrecp
= pending_recursion
;
8411 if (!pendrecp
|| !pending_recursion
)
8412 var
->var_part
[0].cur_loc
= result
;
8417 /* Callback for cselib_expand_value, that looks for expressions
8418 holding the value in the var-tracking hash tables. Return X for
8419 standard processing, anything else is to be used as-is. */
8422 vt_expand_loc_callback (rtx x
, bitmap regs
,
8423 int max_depth ATTRIBUTE_UNUSED
,
8426 struct expand_loc_callback_data
*elcd
8427 = (struct expand_loc_callback_data
*) data
;
8431 bool pending_recursion
= false;
8432 bool from_empty
= false;
8434 switch (GET_CODE (x
))
8437 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
8439 vt_expand_loc_callback
, data
);
8444 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
8445 GET_MODE (SUBREG_REG (x
)),
8448 /* Invalid SUBREGs are ok in debug info. ??? We could try
8449 alternate expansions for the VALUE as well. */
8451 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
8457 dv
= dv_from_rtx (x
);
8464 elcd
->expanding
.safe_push (x
);
8466 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8467 gcc_checking_assert (!VALUE_RECURSED_INTO (x
) || NO_LOC_P (x
));
8471 gcc_checking_assert (VALUE_RECURSED_INTO (x
) || !dv_changed_p (dv
));
8475 var
= elcd
->vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8480 var
= variable_from_dropped (dv
, INSERT
);
8483 gcc_checking_assert (var
);
8485 if (!dv_changed_p (dv
))
8487 gcc_checking_assert (!NO_LOC_P (x
));
8488 gcc_checking_assert (var
->var_part
[0].cur_loc
);
8489 gcc_checking_assert (VAR_LOC_1PAUX (var
));
8490 gcc_checking_assert (VAR_LOC_1PAUX (var
)->depth
.complexity
);
8492 elcd
->depth
= update_depth (elcd
->depth
, VAR_LOC_1PAUX (var
)->depth
);
8494 return var
->var_part
[0].cur_loc
;
8497 VALUE_RECURSED_INTO (x
) = true;
8498 /* This is tentative, but it makes some tests simpler. */
8499 NO_LOC_P (x
) = true;
8501 gcc_checking_assert (var
->n_var_parts
== 1 || from_empty
);
8503 result
= vt_expand_var_loc_chain (var
, regs
, data
, &pending_recursion
);
8505 if (pending_recursion
)
8507 gcc_checking_assert (!result
);
8508 elcd
->pending
.safe_push (x
);
8512 NO_LOC_P (x
) = !result
;
8513 VALUE_RECURSED_INTO (x
) = false;
8514 set_dv_changed (dv
, false);
8517 notify_dependents_of_resolved_value (var
, elcd
->vars
);
8523 /* While expanding variables, we may encounter recursion cycles
8524 because of mutual (possibly indirect) dependencies between two
8525 particular variables (or values), say A and B. If we're trying to
8526 expand A when we get to B, which in turn attempts to expand A, if
8527 we can't find any other expansion for B, we'll add B to this
8528 pending-recursion stack, and tentatively return NULL for its
8529 location. This tentative value will be used for any other
8530 occurrences of B, unless A gets some other location, in which case
8531 it will notify B that it is worth another try at computing a
8532 location for it, and it will use the location computed for A then.
8533 At the end of the expansion, the tentative NULL locations become
8534 final for all members of PENDING that didn't get a notification.
8535 This function performs this finalization of NULL locations. */
8538 resolve_expansions_pending_recursion (vec
<rtx
, va_heap
> *pending
)
8540 while (!pending
->is_empty ())
8542 rtx x
= pending
->pop ();
8545 if (!VALUE_RECURSED_INTO (x
))
8548 gcc_checking_assert (NO_LOC_P (x
));
8549 VALUE_RECURSED_INTO (x
) = false;
8550 dv
= dv_from_rtx (x
);
8551 gcc_checking_assert (dv_changed_p (dv
));
8552 set_dv_changed (dv
, false);
8556 /* Initialize expand_loc_callback_data D with variable hash table V.
8557 It must be a macro because of alloca (vec stack). */
8558 #define INIT_ELCD(d, v) \
8562 (d).depth.complexity = (d).depth.entryvals = 0; \
8565 /* Finalize expand_loc_callback_data D, resolved to location L. */
8566 #define FINI_ELCD(d, l) \
8569 resolve_expansions_pending_recursion (&(d).pending); \
8570 (d).pending.release (); \
8571 (d).expanding.release (); \
8573 if ((l) && MEM_P (l)) \
8574 (l) = targetm.delegitimize_address (l); \
8578 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8579 equivalences in VARS, updating their CUR_LOCs in the process. */
8582 vt_expand_loc (rtx loc
, variable_table_type
*vars
)
8584 struct expand_loc_callback_data data
;
8587 if (!MAY_HAVE_DEBUG_INSNS
)
8590 INIT_ELCD (data
, vars
);
8592 result
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
8593 vt_expand_loc_callback
, &data
);
8595 FINI_ELCD (data
, result
);
8600 /* Expand the one-part VARiable to a location, using the equivalences
8601 in VARS, updating their CUR_LOCs in the process. */
8604 vt_expand_1pvar (variable var
, variable_table_type
*vars
)
8606 struct expand_loc_callback_data data
;
8609 gcc_checking_assert (var
->onepart
&& var
->n_var_parts
== 1);
8611 if (!dv_changed_p (var
->dv
))
8612 return var
->var_part
[0].cur_loc
;
8614 INIT_ELCD (data
, vars
);
8616 loc
= vt_expand_var_loc_chain (var
, scratch_regs
, &data
, NULL
);
8618 gcc_checking_assert (data
.expanding
.is_empty ());
8620 FINI_ELCD (data
, loc
);
8625 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8626 additional parameters: WHERE specifies whether the note shall be emitted
8627 before or after instruction INSN. */
8630 emit_note_insn_var_location (variable_def
**varp
, emit_note_data
*data
)
8632 variable var
= *varp
;
8633 rtx_insn
*insn
= data
->insn
;
8634 enum emit_note_where where
= data
->where
;
8635 variable_table_type
*vars
= data
->vars
;
8638 int i
, j
, n_var_parts
;
8640 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
8641 HOST_WIDE_INT last_limit
;
8642 tree type_size_unit
;
8643 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
8644 rtx loc
[MAX_VAR_PARTS
];
8648 gcc_checking_assert (var
->onepart
== NOT_ONEPART
8649 || var
->onepart
== ONEPART_VDECL
);
8651 decl
= dv_as_decl (var
->dv
);
8657 for (i
= 0; i
< var
->n_var_parts
; i
++)
8658 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
8659 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
8660 for (i
= 0; i
< var
->n_var_parts
; i
++)
8662 machine_mode mode
, wider_mode
;
8664 HOST_WIDE_INT offset
;
8666 if (i
== 0 && var
->onepart
)
8668 gcc_checking_assert (var
->n_var_parts
== 1);
8670 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8671 loc2
= vt_expand_1pvar (var
, vars
);
8675 if (last_limit
< VAR_PART_OFFSET (var
, i
))
8680 else if (last_limit
> VAR_PART_OFFSET (var
, i
))
8682 offset
= VAR_PART_OFFSET (var
, i
);
8683 loc2
= var
->var_part
[i
].cur_loc
;
8684 if (loc2
&& GET_CODE (loc2
) == MEM
8685 && GET_CODE (XEXP (loc2
, 0)) == VALUE
)
8687 rtx depval
= XEXP (loc2
, 0);
8689 loc2
= vt_expand_loc (loc2
, vars
);
8692 loc_exp_insert_dep (var
, depval
, vars
);
8699 gcc_checking_assert (GET_CODE (loc2
) != VALUE
);
8700 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
8701 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
8703 initialized
= lc
->init
;
8709 offsets
[n_var_parts
] = offset
;
8715 loc
[n_var_parts
] = loc2
;
8716 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
8717 if (mode
== VOIDmode
&& var
->onepart
)
8718 mode
= DECL_MODE (decl
);
8719 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8721 /* Attempt to merge adjacent registers or memory. */
8722 wider_mode
= GET_MODE_WIDER_MODE (mode
);
8723 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
8724 if (last_limit
<= VAR_PART_OFFSET (var
, j
))
8726 if (j
< var
->n_var_parts
8727 && wider_mode
!= VOIDmode
8728 && var
->var_part
[j
].cur_loc
8729 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
8730 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
8731 && last_limit
== (var
->onepart
? 0 : VAR_PART_OFFSET (var
, j
))
8732 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
8733 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
8737 if (REG_P (loc
[n_var_parts
])
8738 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
8739 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
8740 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
8743 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
8744 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
8746 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
8747 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
8750 if (!REG_P (new_loc
)
8751 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
8754 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
8757 else if (MEM_P (loc
[n_var_parts
])
8758 && GET_CODE (XEXP (loc2
, 0)) == PLUS
8759 && REG_P (XEXP (XEXP (loc2
, 0), 0))
8760 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
8762 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
8763 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
8764 XEXP (XEXP (loc2
, 0), 0))
8765 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
8766 == GET_MODE_SIZE (mode
))
8767 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
8768 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8769 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
8770 XEXP (XEXP (loc2
, 0), 0))
8771 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8772 + GET_MODE_SIZE (mode
)
8773 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
8774 new_loc
= adjust_address_nv (loc
[n_var_parts
],
8780 loc
[n_var_parts
] = new_loc
;
8782 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8788 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8789 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
8792 if (! flag_var_tracking_uninit
)
8793 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8797 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
, initialized
);
8798 else if (n_var_parts
== 1)
8802 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
8803 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
8807 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
, initialized
);
8809 else if (n_var_parts
)
8813 for (i
= 0; i
< n_var_parts
; i
++)
8815 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
8817 parallel
= gen_rtx_PARALLEL (VOIDmode
,
8818 gen_rtvec_v (n_var_parts
, loc
));
8819 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
8820 parallel
, initialized
);
8823 if (where
!= EMIT_NOTE_BEFORE_INSN
)
8825 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8826 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
8827 NOTE_DURING_CALL_P (note
) = true;
8831 /* Make sure that the call related notes come first. */
8832 while (NEXT_INSN (insn
)
8834 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8835 && NOTE_DURING_CALL_P (insn
))
8836 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8837 insn
= NEXT_INSN (insn
);
8839 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8840 && NOTE_DURING_CALL_P (insn
))
8841 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8842 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8844 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
8846 NOTE_VAR_LOCATION (note
) = note_vl
;
8848 set_dv_changed (var
->dv
, false);
8849 gcc_assert (var
->in_changed_variables
);
8850 var
->in_changed_variables
= false;
8851 changed_variables
->clear_slot (varp
);
8853 /* Continue traversing the hash table. */
8857 /* While traversing changed_variables, push onto DATA (a stack of RTX
8858 values) entries that aren't user variables. */
8861 var_track_values_to_stack (variable_def
**slot
,
8862 vec
<rtx
, va_heap
> *changed_values_stack
)
8864 variable var
= *slot
;
8866 if (var
->onepart
== ONEPART_VALUE
)
8867 changed_values_stack
->safe_push (dv_as_value (var
->dv
));
8868 else if (var
->onepart
== ONEPART_DEXPR
)
8869 changed_values_stack
->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var
->dv
)));
8874 /* Remove from changed_variables the entry whose DV corresponds to
8875 value or debug_expr VAL. */
8877 remove_value_from_changed_variables (rtx val
)
8879 decl_or_value dv
= dv_from_rtx (val
);
8880 variable_def
**slot
;
8883 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8886 var
->in_changed_variables
= false;
8887 changed_variables
->clear_slot (slot
);
8890 /* If VAL (a value or debug_expr) has backlinks to variables actively
8891 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8892 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8893 have dependencies of their own to notify. */
8896 notify_dependents_of_changed_value (rtx val
, variable_table_type
*htab
,
8897 vec
<rtx
, va_heap
> *changed_values_stack
)
8899 variable_def
**slot
;
8902 decl_or_value dv
= dv_from_rtx (val
);
8904 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8907 slot
= htab
->find_slot_with_hash (dv
, dv_htab_hash (dv
), NO_INSERT
);
8909 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8913 while ((led
= VAR_LOC_DEP_LST (var
)))
8915 decl_or_value ldv
= led
->dv
;
8918 /* Deactivate and remove the backlink, as it was “used up”. It
8919 makes no sense to attempt to notify the same entity again:
8920 either it will be recomputed and re-register an active
8921 dependency, or it will still have the changed mark. */
8923 led
->next
->pprev
= led
->pprev
;
8925 *led
->pprev
= led
->next
;
8929 if (dv_changed_p (ldv
))
8932 switch (dv_onepart_p (ldv
))
8936 set_dv_changed (ldv
, true);
8937 changed_values_stack
->safe_push (dv_as_rtx (ldv
));
8941 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8942 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar
));
8943 variable_was_changed (ivar
, NULL
);
8948 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8951 int i
= ivar
->n_var_parts
;
8954 rtx loc
= ivar
->var_part
[i
].cur_loc
;
8956 if (loc
&& GET_CODE (loc
) == MEM
8957 && XEXP (loc
, 0) == val
)
8959 variable_was_changed (ivar
, NULL
);
8972 /* Take out of changed_variables any entries that don't refer to use
8973 variables. Back-propagate change notifications from values and
8974 debug_exprs to their active dependencies in HTAB or in
8975 CHANGED_VARIABLES. */
8978 process_changed_values (variable_table_type
*htab
)
8982 auto_vec
<rtx
, 20> changed_values_stack
;
8984 /* Move values from changed_variables to changed_values_stack. */
8986 ->traverse
<vec
<rtx
, va_heap
>*, var_track_values_to_stack
>
8987 (&changed_values_stack
);
8989 /* Back-propagate change notifications in values while popping
8990 them from the stack. */
8991 for (n
= i
= changed_values_stack
.length ();
8992 i
> 0; i
= changed_values_stack
.length ())
8994 val
= changed_values_stack
.pop ();
8995 notify_dependents_of_changed_value (val
, htab
, &changed_values_stack
);
8997 /* This condition will hold when visiting each of the entries
8998 originally in changed_variables. We can't remove them
8999 earlier because this could drop the backlinks before we got a
9000 chance to use them. */
9003 remove_value_from_changed_variables (val
);
9009 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9010 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9011 the notes shall be emitted before of after instruction INSN. */
9014 emit_notes_for_changes (rtx_insn
*insn
, enum emit_note_where where
,
9017 emit_note_data data
;
9018 variable_table_type
*htab
= shared_hash_htab (vars
);
9020 if (!changed_variables
->elements ())
9023 if (MAY_HAVE_DEBUG_INSNS
)
9024 process_changed_values (htab
);
9031 ->traverse
<emit_note_data
*, emit_note_insn_var_location
> (&data
);
9034 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9035 same variable in hash table DATA or is not there at all. */
9038 emit_notes_for_differences_1 (variable_def
**slot
, variable_table_type
*new_vars
)
9040 variable old_var
, new_var
;
9043 new_var
= new_vars
->find_with_hash (old_var
->dv
, dv_htab_hash (old_var
->dv
));
9047 /* Variable has disappeared. */
9048 variable empty_var
= NULL
;
9050 if (old_var
->onepart
== ONEPART_VALUE
9051 || old_var
->onepart
== ONEPART_DEXPR
)
9053 empty_var
= variable_from_dropped (old_var
->dv
, NO_INSERT
);
9056 gcc_checking_assert (!empty_var
->in_changed_variables
);
9057 if (!VAR_LOC_1PAUX (old_var
))
9059 VAR_LOC_1PAUX (old_var
) = VAR_LOC_1PAUX (empty_var
);
9060 VAR_LOC_1PAUX (empty_var
) = NULL
;
9063 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
9069 empty_var
= onepart_pool (old_var
->onepart
).allocate ();
9070 empty_var
->dv
= old_var
->dv
;
9071 empty_var
->refcount
= 0;
9072 empty_var
->n_var_parts
= 0;
9073 empty_var
->onepart
= old_var
->onepart
;
9074 empty_var
->in_changed_variables
= false;
9077 if (empty_var
->onepart
)
9079 /* Propagate the auxiliary data to (ultimately)
9080 changed_variables. */
9081 empty_var
->var_part
[0].loc_chain
= NULL
;
9082 empty_var
->var_part
[0].cur_loc
= NULL
;
9083 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (old_var
);
9084 VAR_LOC_1PAUX (old_var
) = NULL
;
9086 variable_was_changed (empty_var
, NULL
);
9087 /* Continue traversing the hash table. */
9090 /* Update cur_loc and one-part auxiliary data, before new_var goes
9091 through variable_was_changed. */
9092 if (old_var
!= new_var
&& new_var
->onepart
)
9094 gcc_checking_assert (VAR_LOC_1PAUX (new_var
) == NULL
);
9095 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (old_var
);
9096 VAR_LOC_1PAUX (old_var
) = NULL
;
9097 new_var
->var_part
[0].cur_loc
= old_var
->var_part
[0].cur_loc
;
9099 if (variable_different_p (old_var
, new_var
))
9100 variable_was_changed (new_var
, NULL
);
9102 /* Continue traversing the hash table. */
9106 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9110 emit_notes_for_differences_2 (variable_def
**slot
, variable_table_type
*old_vars
)
9112 variable old_var
, new_var
;
9115 old_var
= old_vars
->find_with_hash (new_var
->dv
, dv_htab_hash (new_var
->dv
));
9119 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
9120 new_var
->var_part
[i
].cur_loc
= NULL
;
9121 variable_was_changed (new_var
, NULL
);
9124 /* Continue traversing the hash table. */
9128 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9132 emit_notes_for_differences (rtx_insn
*insn
, dataflow_set
*old_set
,
9133 dataflow_set
*new_set
)
9135 shared_hash_htab (old_set
->vars
)
9136 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9137 (shared_hash_htab (new_set
->vars
));
9138 shared_hash_htab (new_set
->vars
)
9139 ->traverse
<variable_table_type
*, emit_notes_for_differences_2
>
9140 (shared_hash_htab (old_set
->vars
));
9141 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
9144 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9147 next_non_note_insn_var_location (rtx_insn
*insn
)
9151 insn
= NEXT_INSN (insn
);
9154 || NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
)
9161 /* Emit the notes for changes of location parts in the basic block BB. */
9164 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
9167 micro_operation
*mo
;
9169 dataflow_set_clear (set
);
9170 dataflow_set_copy (set
, &VTI (bb
)->in
);
9172 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
9174 rtx_insn
*insn
= mo
->insn
;
9175 rtx_insn
*next_insn
= next_non_note_insn_var_location (insn
);
9180 dataflow_set_clear_at_call (set
);
9181 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
9183 rtx arguments
= mo
->u
.loc
, *p
= &arguments
;
9187 XEXP (XEXP (*p
, 0), 1)
9188 = vt_expand_loc (XEXP (XEXP (*p
, 0), 1),
9189 shared_hash_htab (set
->vars
));
9190 /* If expansion is successful, keep it in the list. */
9191 if (XEXP (XEXP (*p
, 0), 1))
9193 /* Otherwise, if the following item is data_value for it,
9195 else if (XEXP (*p
, 1)
9196 && REG_P (XEXP (XEXP (*p
, 0), 0))
9197 && MEM_P (XEXP (XEXP (XEXP (*p
, 1), 0), 0))
9198 && REG_P (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0), 0),
9200 && REGNO (XEXP (XEXP (*p
, 0), 0))
9201 == REGNO (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0),
9203 *p
= XEXP (XEXP (*p
, 1), 1);
9204 /* Just drop this item. */
9208 note
= emit_note_after (NOTE_INSN_CALL_ARG_LOCATION
, insn
);
9209 NOTE_VAR_LOCATION (note
) = arguments
;
9215 rtx loc
= mo
->u
.loc
;
9218 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9220 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9222 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9228 rtx loc
= mo
->u
.loc
;
9232 if (GET_CODE (loc
) == CONCAT
)
9234 val
= XEXP (loc
, 0);
9235 vloc
= XEXP (loc
, 1);
9243 var
= PAT_VAR_LOCATION_DECL (vloc
);
9245 clobber_variable_part (set
, NULL_RTX
,
9246 dv_from_decl (var
), 0, NULL_RTX
);
9249 if (VAL_NEEDS_RESOLUTION (loc
))
9250 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
9251 set_variable_part (set
, val
, dv_from_decl (var
), 0,
9252 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9255 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
9256 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
9257 dv_from_decl (var
), 0,
9258 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9261 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9267 rtx loc
= mo
->u
.loc
;
9268 rtx val
, vloc
, uloc
;
9270 vloc
= uloc
= XEXP (loc
, 1);
9271 val
= XEXP (loc
, 0);
9273 if (GET_CODE (val
) == CONCAT
)
9275 uloc
= XEXP (val
, 1);
9276 val
= XEXP (val
, 0);
9279 if (VAL_NEEDS_RESOLUTION (loc
))
9280 val_resolve (set
, val
, vloc
, insn
);
9282 val_store (set
, val
, uloc
, insn
, false);
9284 if (VAL_HOLDS_TRACK_EXPR (loc
))
9286 if (GET_CODE (uloc
) == REG
)
9287 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9289 else if (GET_CODE (uloc
) == MEM
)
9290 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9294 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9300 rtx loc
= mo
->u
.loc
;
9301 rtx val
, vloc
, uloc
;
9305 uloc
= XEXP (vloc
, 1);
9306 val
= XEXP (vloc
, 0);
9309 if (GET_CODE (uloc
) == SET
)
9311 dstv
= SET_DEST (uloc
);
9312 srcv
= SET_SRC (uloc
);
9320 if (GET_CODE (val
) == CONCAT
)
9322 dstv
= vloc
= XEXP (val
, 1);
9323 val
= XEXP (val
, 0);
9326 if (GET_CODE (vloc
) == SET
)
9328 srcv
= SET_SRC (vloc
);
9330 gcc_assert (val
!= srcv
);
9331 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
9333 dstv
= vloc
= SET_DEST (vloc
);
9335 if (VAL_NEEDS_RESOLUTION (loc
))
9336 val_resolve (set
, val
, srcv
, insn
);
9338 else if (VAL_NEEDS_RESOLUTION (loc
))
9340 gcc_assert (GET_CODE (uloc
) == SET
9341 && GET_CODE (SET_SRC (uloc
)) == REG
);
9342 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
9345 if (VAL_HOLDS_TRACK_EXPR (loc
))
9347 if (VAL_EXPR_IS_CLOBBERED (loc
))
9350 var_reg_delete (set
, uloc
, true);
9351 else if (MEM_P (uloc
))
9353 gcc_assert (MEM_P (dstv
));
9354 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
9355 var_mem_delete (set
, dstv
, true);
9360 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
9361 rtx src
= NULL
, dst
= uloc
;
9362 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
9364 if (GET_CODE (uloc
) == SET
)
9366 src
= SET_SRC (uloc
);
9367 dst
= SET_DEST (uloc
);
9372 status
= find_src_status (set
, src
);
9374 src
= find_src_set_src (set
, src
);
9378 var_reg_delete_and_set (set
, dst
, !copied_p
,
9380 else if (MEM_P (dst
))
9382 gcc_assert (MEM_P (dstv
));
9383 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
9384 var_mem_delete_and_set (set
, dstv
, !copied_p
,
9389 else if (REG_P (uloc
))
9390 var_regno_delete (set
, REGNO (uloc
));
9391 else if (MEM_P (uloc
))
9393 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
9394 gcc_checking_assert (vloc
== dstv
);
9396 clobber_overlapping_mems (set
, vloc
);
9399 val_store (set
, val
, dstv
, insn
, true);
9401 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9408 rtx loc
= mo
->u
.loc
;
9411 if (GET_CODE (loc
) == SET
)
9413 set_src
= SET_SRC (loc
);
9414 loc
= SET_DEST (loc
);
9418 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9421 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9424 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9431 rtx loc
= mo
->u
.loc
;
9432 enum var_init_status src_status
;
9435 if (GET_CODE (loc
) == SET
)
9437 set_src
= SET_SRC (loc
);
9438 loc
= SET_DEST (loc
);
9441 src_status
= find_src_status (set
, set_src
);
9442 set_src
= find_src_set_src (set
, set_src
);
9445 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
9447 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
9449 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9456 rtx loc
= mo
->u
.loc
;
9459 var_reg_delete (set
, loc
, false);
9461 var_mem_delete (set
, loc
, false);
9463 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9469 rtx loc
= mo
->u
.loc
;
9472 var_reg_delete (set
, loc
, true);
9474 var_mem_delete (set
, loc
, true);
9476 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9482 set
->stack_adjust
+= mo
->u
.adjust
;
9488 /* Emit notes for the whole function. */
9491 vt_emit_notes (void)
9496 gcc_assert (!changed_variables
->elements ());
9498 /* Free memory occupied by the out hash tables, as they aren't used
9500 FOR_EACH_BB_FN (bb
, cfun
)
9501 dataflow_set_clear (&VTI (bb
)->out
);
9503 /* Enable emitting notes by functions (mainly by set_variable_part and
9504 delete_variable_part). */
9507 if (MAY_HAVE_DEBUG_INSNS
)
9509 dropped_values
= new variable_table_type (cselib_get_next_uid () * 2);
9512 dataflow_set_init (&cur
);
9514 FOR_EACH_BB_FN (bb
, cfun
)
9516 /* Emit the notes for changes of variable locations between two
9517 subsequent basic blocks. */
9518 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
9520 if (MAY_HAVE_DEBUG_INSNS
)
9521 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
9523 /* Emit the notes for the changes in the basic block itself. */
9524 emit_notes_in_bb (bb
, &cur
);
9526 if (MAY_HAVE_DEBUG_INSNS
)
9527 delete local_get_addr_cache
;
9528 local_get_addr_cache
= NULL
;
9530 /* Free memory occupied by the in hash table, we won't need it
9532 dataflow_set_clear (&VTI (bb
)->in
);
9534 #ifdef ENABLE_CHECKING
9535 shared_hash_htab (cur
.vars
)
9536 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9537 (shared_hash_htab (empty_shared_hash
));
9539 dataflow_set_destroy (&cur
);
9541 if (MAY_HAVE_DEBUG_INSNS
)
9542 delete dropped_values
;
9543 dropped_values
= NULL
;
9548 /* If there is a declaration and offset associated with register/memory RTL
9549 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9552 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
9556 if (REG_ATTRS (rtl
))
9558 *declp
= REG_EXPR (rtl
);
9559 *offsetp
= REG_OFFSET (rtl
);
9563 else if (GET_CODE (rtl
) == PARALLEL
)
9565 tree decl
= NULL_TREE
;
9566 HOST_WIDE_INT offset
= MAX_VAR_PARTS
;
9567 int len
= XVECLEN (rtl
, 0), i
;
9569 for (i
= 0; i
< len
; i
++)
9571 rtx reg
= XEXP (XVECEXP (rtl
, 0, i
), 0);
9572 if (!REG_P (reg
) || !REG_ATTRS (reg
))
9575 decl
= REG_EXPR (reg
);
9576 if (REG_EXPR (reg
) != decl
)
9578 if (REG_OFFSET (reg
) < offset
)
9579 offset
= REG_OFFSET (reg
);
9589 else if (MEM_P (rtl
))
9591 if (MEM_ATTRS (rtl
))
9593 *declp
= MEM_EXPR (rtl
);
9594 *offsetp
= INT_MEM_OFFSET (rtl
);
9601 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9605 record_entry_value (cselib_val
*val
, rtx rtl
)
9607 rtx ev
= gen_rtx_ENTRY_VALUE (GET_MODE (rtl
));
9609 ENTRY_VALUE_EXP (ev
) = rtl
;
9611 cselib_add_permanent_equiv (val
, ev
, get_insns ());
9614 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9617 vt_add_function_parameter (tree parm
)
9619 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
9620 rtx incoming
= DECL_INCOMING_RTL (parm
);
9623 HOST_WIDE_INT offset
;
9627 if (TREE_CODE (parm
) != PARM_DECL
)
9630 if (!decl_rtl
|| !incoming
)
9633 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
9636 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9637 rewrite the incoming location of parameters passed on the stack
9638 into MEMs based on the argument pointer, so that incoming doesn't
9639 depend on a pseudo. */
9640 if (MEM_P (incoming
)
9641 && (XEXP (incoming
, 0) == crtl
->args
.internal_arg_pointer
9642 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
9643 && XEXP (XEXP (incoming
, 0), 0)
9644 == crtl
->args
.internal_arg_pointer
9645 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
9647 HOST_WIDE_INT off
= -FIRST_PARM_OFFSET (current_function_decl
);
9648 if (GET_CODE (XEXP (incoming
, 0)) == PLUS
)
9649 off
+= INTVAL (XEXP (XEXP (incoming
, 0), 1));
9651 = replace_equiv_address_nv (incoming
,
9652 plus_constant (Pmode
,
9653 arg_pointer_rtx
, off
));
9656 #ifdef HAVE_window_save
9657 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9658 If the target machine has an explicit window save instruction, the
9659 actual entry value is the corresponding OUTGOING_REGNO instead. */
9660 if (HAVE_window_save
&& !crtl
->uses_only_leaf_regs
)
9662 if (REG_P (incoming
)
9663 && HARD_REGISTER_P (incoming
)
9664 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
9667 p
.incoming
= incoming
;
9669 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
9670 OUTGOING_REGNO (REGNO (incoming
)), 0);
9671 p
.outgoing
= incoming
;
9672 vec_safe_push (windowed_parm_regs
, p
);
9674 else if (GET_CODE (incoming
) == PARALLEL
)
9677 = gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (XVECLEN (incoming
, 0)));
9680 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9682 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9685 reg
= gen_rtx_REG_offset (reg
, GET_MODE (reg
),
9686 OUTGOING_REGNO (REGNO (reg
)), 0);
9688 XVECEXP (outgoing
, 0, i
)
9689 = gen_rtx_EXPR_LIST (VOIDmode
, reg
,
9690 XEXP (XVECEXP (incoming
, 0, i
), 1));
9691 vec_safe_push (windowed_parm_regs
, p
);
9694 incoming
= outgoing
;
9696 else if (MEM_P (incoming
)
9697 && REG_P (XEXP (incoming
, 0))
9698 && HARD_REGISTER_P (XEXP (incoming
, 0)))
9700 rtx reg
= XEXP (incoming
, 0);
9701 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
9705 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
9707 vec_safe_push (windowed_parm_regs
, p
);
9708 incoming
= replace_equiv_address_nv (incoming
, reg
);
9714 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
9716 if (MEM_P (incoming
))
9718 /* This means argument is passed by invisible reference. */
9724 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
9726 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
9727 GET_MODE (decl_rtl
));
9736 /* If that DECL_RTL wasn't a pseudo that got spilled to
9737 memory, bail out. Otherwise, the spill slot sharing code
9738 will force the memory to reference spill_slot_decl (%sfp),
9739 so we don't match above. That's ok, the pseudo must have
9740 referenced the entire parameter, so just reset OFFSET. */
9741 if (decl
!= get_spill_slot_decl (false))
9746 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
9749 out
= &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
;
9751 dv
= dv_from_decl (parm
);
9753 if (target_for_debug_bind (parm
)
9754 /* We can't deal with these right now, because this kind of
9755 variable is single-part. ??? We could handle parallels
9756 that describe multiple locations for the same single
9757 value, but ATM we don't. */
9758 && GET_CODE (incoming
) != PARALLEL
)
9763 /* ??? We shouldn't ever hit this, but it may happen because
9764 arguments passed by invisible reference aren't dealt with
9765 above: incoming-rtl will have Pmode rather than the
9766 expected mode for the type. */
9770 lowpart
= var_lowpart (mode
, incoming
);
9774 val
= cselib_lookup_from_insn (lowpart
, mode
, true,
9775 VOIDmode
, get_insns ());
9777 /* ??? Float-typed values in memory are not handled by
9781 preserve_value (val
);
9782 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
9783 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9784 dv
= dv_from_value (val
->val_rtx
);
9787 if (MEM_P (incoming
))
9789 val
= cselib_lookup_from_insn (XEXP (incoming
, 0), mode
, true,
9790 VOIDmode
, get_insns ());
9793 preserve_value (val
);
9794 incoming
= replace_equiv_address_nv (incoming
, val
->val_rtx
);
9799 if (REG_P (incoming
))
9801 incoming
= var_lowpart (mode
, incoming
);
9802 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
9803 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
9805 set_variable_part (out
, incoming
, dv
, offset
,
9806 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9807 if (dv_is_value_p (dv
))
9809 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv
)), incoming
);
9810 if (TREE_CODE (TREE_TYPE (parm
)) == REFERENCE_TYPE
9811 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))
9813 machine_mode indmode
9814 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm
)));
9815 rtx mem
= gen_rtx_MEM (indmode
, incoming
);
9816 cselib_val
*val
= cselib_lookup_from_insn (mem
, indmode
, true,
9821 preserve_value (val
);
9822 record_entry_value (val
, mem
);
9823 set_variable_part (out
, mem
, dv_from_value (val
->val_rtx
), 0,
9824 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9829 else if (GET_CODE (incoming
) == PARALLEL
&& !dv_onepart_p (dv
))
9833 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9835 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9836 offset
= REG_OFFSET (reg
);
9837 gcc_assert (REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
9838 attrs_list_insert (&out
->regs
[REGNO (reg
)], dv
, offset
, reg
);
9839 set_variable_part (out
, reg
, dv
, offset
,
9840 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9843 else if (MEM_P (incoming
))
9845 incoming
= var_lowpart (mode
, incoming
);
9846 set_variable_part (out
, incoming
, dv
, offset
,
9847 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9851 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9854 vt_add_function_parameters (void)
9858 for (parm
= DECL_ARGUMENTS (current_function_decl
);
9859 parm
; parm
= DECL_CHAIN (parm
))
9860 if (!POINTER_BOUNDS_P (parm
))
9861 vt_add_function_parameter (parm
);
9863 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl
)))
9865 tree vexpr
= DECL_VALUE_EXPR (DECL_RESULT (current_function_decl
));
9867 if (TREE_CODE (vexpr
) == INDIRECT_REF
)
9868 vexpr
= TREE_OPERAND (vexpr
, 0);
9870 if (TREE_CODE (vexpr
) == PARM_DECL
9871 && DECL_ARTIFICIAL (vexpr
)
9872 && !DECL_IGNORED_P (vexpr
)
9873 && DECL_NAMELESS (vexpr
))
9874 vt_add_function_parameter (vexpr
);
9878 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9879 ensure it isn't flushed during cselib_reset_table.
9880 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9881 has been eliminated. */
9884 vt_init_cfa_base (void)
9888 #ifdef FRAME_POINTER_CFA_OFFSET
9889 cfa_base_rtx
= frame_pointer_rtx
;
9890 cfa_base_offset
= -FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9892 cfa_base_rtx
= arg_pointer_rtx
;
9893 cfa_base_offset
= -ARG_POINTER_CFA_OFFSET (current_function_decl
);
9895 if (cfa_base_rtx
== hard_frame_pointer_rtx
9896 || !fixed_regs
[REGNO (cfa_base_rtx
)])
9898 cfa_base_rtx
= NULL_RTX
;
9901 if (!MAY_HAVE_DEBUG_INSNS
)
9904 /* Tell alias analysis that cfa_base_rtx should share
9905 find_base_term value with stack pointer or hard frame pointer. */
9906 if (!frame_pointer_needed
)
9907 vt_equate_reg_base_value (cfa_base_rtx
, stack_pointer_rtx
);
9908 else if (!crtl
->stack_realign_tried
)
9909 vt_equate_reg_base_value (cfa_base_rtx
, hard_frame_pointer_rtx
);
9911 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
9912 VOIDmode
, get_insns ());
9913 preserve_value (val
);
9914 cselib_preserve_cfa_base_value (val
, REGNO (cfa_base_rtx
));
9917 /* Allocate and initialize the data structures for variable tracking
9918 and parse the RTL to get the micro operations. */
9921 vt_initialize (void)
9924 HOST_WIDE_INT fp_cfa_offset
= -1;
9926 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
9928 empty_shared_hash
= new shared_hash_def
;
9929 empty_shared_hash
->refcount
= 1;
9930 empty_shared_hash
->htab
= new variable_table_type (1);
9931 changed_variables
= new variable_table_type (10);
9933 /* Init the IN and OUT sets. */
9934 FOR_ALL_BB_FN (bb
, cfun
)
9936 VTI (bb
)->visited
= false;
9937 VTI (bb
)->flooded
= false;
9938 dataflow_set_init (&VTI (bb
)->in
);
9939 dataflow_set_init (&VTI (bb
)->out
);
9940 VTI (bb
)->permp
= NULL
;
9943 if (MAY_HAVE_DEBUG_INSNS
)
9945 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
9946 scratch_regs
= BITMAP_ALLOC (NULL
);
9947 preserved_values
.create (256);
9948 global_get_addr_cache
= new hash_map
<rtx
, rtx
>;
9952 scratch_regs
= NULL
;
9953 global_get_addr_cache
= NULL
;
9956 if (MAY_HAVE_DEBUG_INSNS
)
9962 #ifdef FRAME_POINTER_CFA_OFFSET
9963 reg
= frame_pointer_rtx
;
9964 ofst
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9966 reg
= arg_pointer_rtx
;
9967 ofst
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
9970 ofst
-= INCOMING_FRAME_SP_OFFSET
;
9972 val
= cselib_lookup_from_insn (reg
, GET_MODE (reg
), 1,
9973 VOIDmode
, get_insns ());
9974 preserve_value (val
);
9975 if (reg
!= hard_frame_pointer_rtx
&& fixed_regs
[REGNO (reg
)])
9976 cselib_preserve_cfa_base_value (val
, REGNO (reg
));
9977 expr
= plus_constant (GET_MODE (stack_pointer_rtx
),
9978 stack_pointer_rtx
, -ofst
);
9979 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9983 val
= cselib_lookup_from_insn (stack_pointer_rtx
,
9984 GET_MODE (stack_pointer_rtx
), 1,
9985 VOIDmode
, get_insns ());
9986 preserve_value (val
);
9987 expr
= plus_constant (GET_MODE (reg
), reg
, ofst
);
9988 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9992 /* In order to factor out the adjustments made to the stack pointer or to
9993 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9994 instead of individual location lists, we're going to rewrite MEMs based
9995 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9996 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9997 resp. arg_pointer_rtx. We can do this either when there is no frame
9998 pointer in the function and stack adjustments are consistent for all
9999 basic blocks or when there is a frame pointer and no stack realignment.
10000 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10001 has been eliminated. */
10002 if (!frame_pointer_needed
)
10006 if (!vt_stack_adjustments ())
10009 #ifdef FRAME_POINTER_CFA_OFFSET
10010 reg
= frame_pointer_rtx
;
10012 reg
= arg_pointer_rtx
;
10014 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10017 if (GET_CODE (elim
) == PLUS
)
10018 elim
= XEXP (elim
, 0);
10019 if (elim
== stack_pointer_rtx
)
10020 vt_init_cfa_base ();
10023 else if (!crtl
->stack_realign_tried
)
10027 #ifdef FRAME_POINTER_CFA_OFFSET
10028 reg
= frame_pointer_rtx
;
10029 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
10031 reg
= arg_pointer_rtx
;
10032 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
10034 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10037 if (GET_CODE (elim
) == PLUS
)
10039 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
10040 elim
= XEXP (elim
, 0);
10042 if (elim
!= hard_frame_pointer_rtx
)
10043 fp_cfa_offset
= -1;
10046 fp_cfa_offset
= -1;
10049 /* If the stack is realigned and a DRAP register is used, we're going to
10050 rewrite MEMs based on it representing incoming locations of parameters
10051 passed on the stack into MEMs based on the argument pointer. Although
10052 we aren't going to rewrite other MEMs, we still need to initialize the
10053 virtual CFA pointer in order to ensure that the argument pointer will
10054 be seen as a constant throughout the function.
10056 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10057 else if (stack_realign_drap
)
10061 #ifdef FRAME_POINTER_CFA_OFFSET
10062 reg
= frame_pointer_rtx
;
10064 reg
= arg_pointer_rtx
;
10066 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10069 if (GET_CODE (elim
) == PLUS
)
10070 elim
= XEXP (elim
, 0);
10071 if (elim
== hard_frame_pointer_rtx
)
10072 vt_init_cfa_base ();
10076 hard_frame_pointer_adjustment
= -1;
10078 vt_add_function_parameters ();
10080 FOR_EACH_BB_FN (bb
, cfun
)
10083 HOST_WIDE_INT pre
, post
= 0;
10084 basic_block first_bb
, last_bb
;
10086 if (MAY_HAVE_DEBUG_INSNS
)
10088 cselib_record_sets_hook
= add_with_sets
;
10089 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10090 fprintf (dump_file
, "first value: %i\n",
10091 cselib_get_next_uid ());
10098 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
10099 || ! single_pred_p (bb
->next_bb
))
10101 e
= find_edge (bb
, bb
->next_bb
);
10102 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
10108 /* Add the micro-operations to the vector. */
10109 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
10111 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
10112 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
10113 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
10114 insn
= NEXT_INSN (insn
))
10118 if (!frame_pointer_needed
)
10120 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
10123 micro_operation mo
;
10124 mo
.type
= MO_ADJUST
;
10127 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10128 log_op_type (PATTERN (insn
), bb
, insn
,
10129 MO_ADJUST
, dump_file
);
10130 VTI (bb
)->mos
.safe_push (mo
);
10131 VTI (bb
)->out
.stack_adjust
+= pre
;
10135 cselib_hook_called
= false;
10136 adjust_insn (bb
, insn
);
10137 if (MAY_HAVE_DEBUG_INSNS
)
10140 prepare_call_arguments (bb
, insn
);
10141 cselib_process_insn (insn
);
10142 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10144 print_rtl_single (dump_file
, insn
);
10145 dump_cselib_table (dump_file
);
10148 if (!cselib_hook_called
)
10149 add_with_sets (insn
, 0, 0);
10150 cancel_changes (0);
10152 if (!frame_pointer_needed
&& post
)
10154 micro_operation mo
;
10155 mo
.type
= MO_ADJUST
;
10156 mo
.u
.adjust
= post
;
10158 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10159 log_op_type (PATTERN (insn
), bb
, insn
,
10160 MO_ADJUST
, dump_file
);
10161 VTI (bb
)->mos
.safe_push (mo
);
10162 VTI (bb
)->out
.stack_adjust
+= post
;
10165 if (fp_cfa_offset
!= -1
10166 && hard_frame_pointer_adjustment
== -1
10167 && fp_setter_insn (insn
))
10169 vt_init_cfa_base ();
10170 hard_frame_pointer_adjustment
= fp_cfa_offset
;
10171 /* Disassociate sp from fp now. */
10172 if (MAY_HAVE_DEBUG_INSNS
)
10175 cselib_invalidate_rtx (stack_pointer_rtx
);
10176 v
= cselib_lookup (stack_pointer_rtx
, Pmode
, 1,
10178 if (v
&& !cselib_preserved_value_p (v
))
10180 cselib_set_value_sp_based (v
);
10181 preserve_value (v
);
10187 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
10192 if (MAY_HAVE_DEBUG_INSNS
)
10194 cselib_preserve_only_values ();
10195 cselib_reset_table (cselib_get_next_uid ());
10196 cselib_record_sets_hook
= NULL
;
10200 hard_frame_pointer_adjustment
= -1;
10201 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->flooded
= true;
10202 cfa_base_rtx
= NULL_RTX
;
10206 /* This is *not* reset after each function. It gives each
10207 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10208 a unique label number. */
10210 static int debug_label_num
= 1;
10212 /* Get rid of all debug insns from the insn stream. */
10215 delete_debug_insns (void)
10218 rtx_insn
*insn
, *next
;
10220 if (!MAY_HAVE_DEBUG_INSNS
)
10223 FOR_EACH_BB_FN (bb
, cfun
)
10225 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
10226 if (DEBUG_INSN_P (insn
))
10228 tree decl
= INSN_VAR_LOCATION_DECL (insn
);
10229 if (TREE_CODE (decl
) == LABEL_DECL
10230 && DECL_NAME (decl
)
10231 && !DECL_RTL_SET_P (decl
))
10233 PUT_CODE (insn
, NOTE
);
10234 NOTE_KIND (insn
) = NOTE_INSN_DELETED_DEBUG_LABEL
;
10235 NOTE_DELETED_LABEL_NAME (insn
)
10236 = IDENTIFIER_POINTER (DECL_NAME (decl
));
10237 SET_DECL_RTL (decl
, insn
);
10238 CODE_LABEL_NUMBER (insn
) = debug_label_num
++;
10241 delete_insn (insn
);
10246 /* Run a fast, BB-local only version of var tracking, to take care of
10247 information that we don't do global analysis on, such that not all
10248 information is lost. If SKIPPED holds, we're skipping the global
10249 pass entirely, so we should try to use information it would have
10250 handled as well.. */
10253 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
10255 /* ??? Just skip it all for now. */
10256 delete_debug_insns ();
10259 /* Free the data structures needed for variable tracking. */
10266 FOR_EACH_BB_FN (bb
, cfun
)
10268 VTI (bb
)->mos
.release ();
10271 FOR_ALL_BB_FN (bb
, cfun
)
10273 dataflow_set_destroy (&VTI (bb
)->in
);
10274 dataflow_set_destroy (&VTI (bb
)->out
);
10275 if (VTI (bb
)->permp
)
10277 dataflow_set_destroy (VTI (bb
)->permp
);
10278 XDELETE (VTI (bb
)->permp
);
10281 free_aux_for_blocks ();
10282 delete empty_shared_hash
->htab
;
10283 empty_shared_hash
->htab
= NULL
;
10284 delete changed_variables
;
10285 changed_variables
= NULL
;
10286 attrs_def::pool
.release ();
10287 var_pool
.release ();
10288 location_chain_def::pool
.release ();
10289 shared_hash_def::pool
.release ();
10291 if (MAY_HAVE_DEBUG_INSNS
)
10293 if (global_get_addr_cache
)
10294 delete global_get_addr_cache
;
10295 global_get_addr_cache
= NULL
;
10296 loc_exp_dep::pool
.release ();
10297 valvar_pool
.release ();
10298 preserved_values
.release ();
10300 BITMAP_FREE (scratch_regs
);
10301 scratch_regs
= NULL
;
10304 #ifdef HAVE_window_save
10305 vec_free (windowed_parm_regs
);
10309 XDELETEVEC (vui_vec
);
10314 /* The entry point to variable tracking pass. */
10316 static inline unsigned int
10317 variable_tracking_main_1 (void)
10321 if (flag_var_tracking_assignments
< 0
10322 /* Var-tracking right now assumes the IR doesn't contain
10323 any pseudos at this point. */
10324 || targetm
.no_register_allocation
)
10326 delete_debug_insns ();
10330 if (n_basic_blocks_for_fn (cfun
) > 500 &&
10331 n_edges_for_fn (cfun
) / n_basic_blocks_for_fn (cfun
) >= 20)
10333 vt_debug_insns_local (true);
10337 mark_dfs_back_edges ();
10338 if (!vt_initialize ())
10341 vt_debug_insns_local (true);
10345 success
= vt_find_locations ();
10347 if (!success
&& flag_var_tracking_assignments
> 0)
10351 delete_debug_insns ();
10353 /* This is later restored by our caller. */
10354 flag_var_tracking_assignments
= 0;
10356 success
= vt_initialize ();
10357 gcc_assert (success
);
10359 success
= vt_find_locations ();
10365 vt_debug_insns_local (false);
10369 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10371 dump_dataflow_sets ();
10372 dump_reg_info (dump_file
);
10373 dump_flow_info (dump_file
, dump_flags
);
10376 timevar_push (TV_VAR_TRACKING_EMIT
);
10378 timevar_pop (TV_VAR_TRACKING_EMIT
);
10381 vt_debug_insns_local (false);
10386 variable_tracking_main (void)
10389 int save
= flag_var_tracking_assignments
;
10391 ret
= variable_tracking_main_1 ();
10393 flag_var_tracking_assignments
= save
;
10400 const pass_data pass_data_variable_tracking
=
10402 RTL_PASS
, /* type */
10403 "vartrack", /* name */
10404 OPTGROUP_NONE
, /* optinfo_flags */
10405 TV_VAR_TRACKING
, /* tv_id */
10406 0, /* properties_required */
10407 0, /* properties_provided */
10408 0, /* properties_destroyed */
10409 0, /* todo_flags_start */
10410 0, /* todo_flags_finish */
10413 class pass_variable_tracking
: public rtl_opt_pass
10416 pass_variable_tracking (gcc::context
*ctxt
)
10417 : rtl_opt_pass (pass_data_variable_tracking
, ctxt
)
10420 /* opt_pass methods: */
10421 virtual bool gate (function
*)
10423 return (flag_var_tracking
&& !targetm
.delay_vartrack
);
10426 virtual unsigned int execute (function
*)
10428 return variable_tracking_main ();
10431 }; // class pass_variable_tracking
10433 } // anon namespace
10436 make_pass_variable_tracking (gcc::context
*ctxt
)
10438 return new pass_variable_tracking (ctxt
);