]> gcc.gnu.org Git - gcc.git/blob - gcc/var-tracking.c
re PR debug/52001 (Huge compile-time regression with var-tracking)
[gcc.git] / gcc / var-tracking.c
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
28
29 How does the variable tracking pass work?
30
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
38
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
45
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
55
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
61
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
71
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
79
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
86
87 */
88
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "tm_p.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
98 #include "flags.h"
99 #include "output.h"
100 #include "insn-config.h"
101 #include "reload.h"
102 #include "sbitmap.h"
103 #include "alloc-pool.h"
104 #include "fibheap.h"
105 #include "hashtab.h"
106 #include "regs.h"
107 #include "expr.h"
108 #include "timevar.h"
109 #include "tree-pass.h"
110 #include "tree-flow.h"
111 #include "cselib.h"
112 #include "target.h"
113 #include "params.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
117 #include "recog.h"
118 #include "tm_p.h"
119
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
126
127 /* Type of micro operation. */
128 enum micro_operation_type
129 {
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
142
143 };
144
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
157 };
158
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
162 enum emit_note_where
163 {
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
167 };
168
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
171 {
172 /* Type of micro operation. */
173 enum micro_operation_type type;
174
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
181
182 union {
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
189 rtx loc;
190
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
194 } micro_operation;
195
196 DEF_VEC_O(micro_operation);
197 DEF_VEC_ALLOC_O(micro_operation,heap);
198
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
202
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
206 {
207 /* The instruction which the note will be emitted before/after. */
208 rtx insn;
209
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
212
213 /* The variables and values active at this point. */
214 htab_t vars;
215 } emit_note_data;
216
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
222 {
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
225
226 /* The rtx of register. */
227 rtx loc;
228
229 /* The declaration corresponding to LOC. */
230 decl_or_value dv;
231
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
234 } *attrs;
235
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
239 {
240 /* Reference count. */
241 int refcount;
242
243 /* Actual hash table. */
244 htab_t htab;
245 } *shared_hash;
246
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
249 {
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
252
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
255
256 /* Variable locations. */
257 shared_hash vars;
258
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
261 } dataflow_set;
262
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
266 {
267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
269
270 /* The IN and OUT set for dataflow analysis. */
271 dataflow_set in;
272 dataflow_set out;
273
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
278 dataflow_set *permp;
279
280 /* Has the block been visited in DFS? */
281 bool visited;
282
283 /* Has the block been flooded in VTA? */
284 bool flooded;
285
286 } *variable_tracking_info;
287
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
290 {
291 /* Next element in the chain. */
292 struct location_chain_def *next;
293
294 /* The location (REG, MEM or VALUE). */
295 rtx loc;
296
297 /* The "value" stored in this location. */
298 rtx set_src;
299
300 /* Initialized? */
301 enum var_init_status init;
302 } *location_chain;
303
304 /* A vector of loc_exp_dep holds the active dependencies of a one-part
305 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
306 location of DV. Each entry is also part of VALUE' s linked-list of
307 backlinks back to DV. */
308 typedef struct loc_exp_dep_s
309 {
310 /* The dependent DV. */
311 decl_or_value dv;
312 /* The dependency VALUE or DECL_DEBUG. */
313 rtx value;
314 /* The next entry in VALUE's backlinks list. */
315 struct loc_exp_dep_s *next;
316 /* A pointer to the pointer to this entry (head or prev's next) in
317 the doubly-linked list. */
318 struct loc_exp_dep_s **pprev;
319 } loc_exp_dep;
320
321 DEF_VEC_O (loc_exp_dep);
322
323 /* This data structure is allocated for one-part variables at the time
324 of emitting notes. */
325 struct onepart_aux
326 {
327 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
328 computation used the expansion of this variable, and that ought
329 to be notified should this variable change. If the DV's cur_loc
330 expanded to NULL, all components of the loc list are regarded as
331 active, so that any changes in them give us a chance to get a
332 location. Otherwise, only components of the loc that expanded to
333 non-NULL are regarded as active dependencies. */
334 loc_exp_dep *backlinks;
335 /* This holds the LOC that was expanded into cur_loc. We need only
336 mark a one-part variable as changed if the FROM loc is removed,
337 or if it has no known location and a loc is added, or if it gets
338 a change notification from any of its active dependencies. */
339 rtx from;
340 /* The depth of the cur_loc expression. */
341 int depth;
342 /* Dependencies actively used when expand FROM into cur_loc. */
343 VEC (loc_exp_dep, none) deps;
344 };
345
346 /* Structure describing one part of variable. */
347 typedef struct variable_part_def
348 {
349 /* Chain of locations of the part. */
350 location_chain loc_chain;
351
352 /* Location which was last emitted to location list. */
353 rtx cur_loc;
354
355 union variable_aux
356 {
357 /* The offset in the variable, if !var->onepart. */
358 HOST_WIDE_INT offset;
359
360 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
361 struct onepart_aux *onepaux;
362 } aux;
363 } variable_part;
364
365 /* Maximum number of location parts. */
366 #define MAX_VAR_PARTS 16
367
368 /* Enumeration type used to discriminate various types of one-part
369 variables. */
370 typedef enum onepart_enum
371 {
372 /* Not a one-part variable. */
373 NOT_ONEPART = 0,
374 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
375 ONEPART_VDECL = 1,
376 /* A DEBUG_EXPR_DECL. */
377 ONEPART_DEXPR = 2,
378 /* A VALUE. */
379 ONEPART_VALUE = 3
380 } onepart_enum_t;
381
382 /* Structure describing where the variable is located. */
383 typedef struct variable_def
384 {
385 /* The declaration of the variable, or an RTL value being handled
386 like a declaration. */
387 decl_or_value dv;
388
389 /* Reference count. */
390 int refcount;
391
392 /* Number of variable parts. */
393 char n_var_parts;
394
395 /* What type of DV this is, according to enum onepart_enum. */
396 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
397
398 /* True if this variable_def struct is currently in the
399 changed_variables hash table. */
400 bool in_changed_variables;
401
402 /* The variable parts. */
403 variable_part var_part[1];
404 } *variable;
405 typedef const struct variable_def *const_variable;
406
407 /* Pointer to the BB's information specific to variable tracking pass. */
408 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
409
410 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
411 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
412
413 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
414
415 /* Access VAR's Ith part's offset, checking that it's not a one-part
416 variable. */
417 #define VAR_PART_OFFSET(var, i) __extension__ \
418 (*({ variable const __v = (var); \
419 gcc_checking_assert (!__v->onepart); \
420 &__v->var_part[(i)].aux.offset; }))
421
422 /* Access VAR's one-part auxiliary data, checking that it is a
423 one-part variable. */
424 #define VAR_LOC_1PAUX(var) __extension__ \
425 (*({ variable const __v = (var); \
426 gcc_checking_assert (__v->onepart); \
427 &__v->var_part[0].aux.onepaux; }))
428
429 #else
430 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
431 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
432 #endif
433
434 /* These are accessor macros for the one-part auxiliary data. When
435 convenient for users, they're guarded by tests that the data was
436 allocated. */
437 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
438 ? VAR_LOC_1PAUX (var)->backlinks \
439 : NULL)
440 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
441 ? &VAR_LOC_1PAUX (var)->backlinks \
442 : NULL)
443 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
444 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
445 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
446 ? &VAR_LOC_1PAUX (var)->deps \
447 : NULL)
448
449 /* Alloc pool for struct attrs_def. */
450 static alloc_pool attrs_pool;
451
452 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
453 static alloc_pool var_pool;
454
455 /* Alloc pool for struct variable_def with a single var_part entry. */
456 static alloc_pool valvar_pool;
457
458 /* Alloc pool for struct location_chain_def. */
459 static alloc_pool loc_chain_pool;
460
461 /* Alloc pool for struct shared_hash_def. */
462 static alloc_pool shared_hash_pool;
463
464 /* Changed variables, notes will be emitted for them. */
465 static htab_t changed_variables;
466
467 /* Shall notes be emitted? */
468 static bool emit_notes;
469
470 /* Values whose dynamic location lists have gone empty, but whose
471 cselib location lists are still usable. Use this to hold the
472 current location, the backlinks, etc, during emit_notes. */
473 static htab_t dropped_values;
474
475 /* Empty shared hashtable. */
476 static shared_hash empty_shared_hash;
477
478 /* Scratch register bitmap used by cselib_expand_value_rtx. */
479 static bitmap scratch_regs = NULL;
480
481 #ifdef HAVE_window_save
482 typedef struct GTY(()) parm_reg {
483 rtx outgoing;
484 rtx incoming;
485 } parm_reg_t;
486
487 DEF_VEC_O(parm_reg_t);
488 DEF_VEC_ALLOC_O(parm_reg_t, gc);
489
490 /* Vector of windowed parameter registers, if any. */
491 static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
492 #endif
493
494 /* Variable used to tell whether cselib_process_insn called our hook. */
495 static bool cselib_hook_called;
496
497 /* Local function prototypes. */
498 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
499 HOST_WIDE_INT *);
500 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
501 HOST_WIDE_INT *);
502 static bool vt_stack_adjustments (void);
503 static hashval_t variable_htab_hash (const void *);
504 static int variable_htab_eq (const void *, const void *);
505 static void variable_htab_free (void *);
506
507 static void init_attrs_list_set (attrs *);
508 static void attrs_list_clear (attrs *);
509 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
510 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
511 static void attrs_list_copy (attrs *, attrs);
512 static void attrs_list_union (attrs *, attrs);
513
514 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
515 enum var_init_status);
516 static void vars_copy (htab_t, htab_t);
517 static tree var_debug_decl (tree);
518 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
519 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
520 enum var_init_status, rtx);
521 static void var_reg_delete (dataflow_set *, rtx, bool);
522 static void var_regno_delete (dataflow_set *, int);
523 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
524 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
525 enum var_init_status, rtx);
526 static void var_mem_delete (dataflow_set *, rtx, bool);
527
528 static void dataflow_set_init (dataflow_set *);
529 static void dataflow_set_clear (dataflow_set *);
530 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
531 static int variable_union_info_cmp_pos (const void *, const void *);
532 static void dataflow_set_union (dataflow_set *, dataflow_set *);
533 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
534 static bool canon_value_cmp (rtx, rtx);
535 static int loc_cmp (rtx, rtx);
536 static bool variable_part_different_p (variable_part *, variable_part *);
537 static bool onepart_variable_different_p (variable, variable);
538 static bool variable_different_p (variable, variable);
539 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
540 static void dataflow_set_destroy (dataflow_set *);
541
542 static bool contains_symbol_ref (rtx);
543 static bool track_expr_p (tree, bool);
544 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
545 static int add_uses (rtx *, void *);
546 static void add_uses_1 (rtx *, void *);
547 static void add_stores (rtx, const_rtx, void *);
548 static bool compute_bb_dataflow (basic_block);
549 static bool vt_find_locations (void);
550
551 static void dump_attrs_list (attrs);
552 static int dump_var_slot (void **, void *);
553 static void dump_var (variable);
554 static void dump_vars (htab_t);
555 static void dump_dataflow_set (dataflow_set *);
556 static void dump_dataflow_sets (void);
557
558 static void set_dv_changed (decl_or_value, bool);
559 static void variable_was_changed (variable, dataflow_set *);
560 static void **set_slot_part (dataflow_set *, rtx, void **,
561 decl_or_value, HOST_WIDE_INT,
562 enum var_init_status, rtx);
563 static void set_variable_part (dataflow_set *, rtx,
564 decl_or_value, HOST_WIDE_INT,
565 enum var_init_status, rtx, enum insert_option);
566 static void **clobber_slot_part (dataflow_set *, rtx,
567 void **, HOST_WIDE_INT, rtx);
568 static void clobber_variable_part (dataflow_set *, rtx,
569 decl_or_value, HOST_WIDE_INT, rtx);
570 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
571 static void delete_variable_part (dataflow_set *, rtx,
572 decl_or_value, HOST_WIDE_INT);
573 static int emit_note_insn_var_location (void **, void *);
574 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
575 static int emit_notes_for_differences_1 (void **, void *);
576 static int emit_notes_for_differences_2 (void **, void *);
577 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
578 static void emit_notes_in_bb (basic_block, dataflow_set *);
579 static void vt_emit_notes (void);
580
581 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
582 static void vt_add_function_parameters (void);
583 static bool vt_initialize (void);
584 static void vt_finalize (void);
585
586 /* Given a SET, calculate the amount of stack adjustment it contains
587 PRE- and POST-modifying stack pointer.
588 This function is similar to stack_adjust_offset. */
589
590 static void
591 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
592 HOST_WIDE_INT *post)
593 {
594 rtx src = SET_SRC (pattern);
595 rtx dest = SET_DEST (pattern);
596 enum rtx_code code;
597
598 if (dest == stack_pointer_rtx)
599 {
600 /* (set (reg sp) (plus (reg sp) (const_int))) */
601 code = GET_CODE (src);
602 if (! (code == PLUS || code == MINUS)
603 || XEXP (src, 0) != stack_pointer_rtx
604 || !CONST_INT_P (XEXP (src, 1)))
605 return;
606
607 if (code == MINUS)
608 *post += INTVAL (XEXP (src, 1));
609 else
610 *post -= INTVAL (XEXP (src, 1));
611 }
612 else if (MEM_P (dest))
613 {
614 /* (set (mem (pre_dec (reg sp))) (foo)) */
615 src = XEXP (dest, 0);
616 code = GET_CODE (src);
617
618 switch (code)
619 {
620 case PRE_MODIFY:
621 case POST_MODIFY:
622 if (XEXP (src, 0) == stack_pointer_rtx)
623 {
624 rtx val = XEXP (XEXP (src, 1), 1);
625 /* We handle only adjustments by constant amount. */
626 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
627 CONST_INT_P (val));
628
629 if (code == PRE_MODIFY)
630 *pre -= INTVAL (val);
631 else
632 *post -= INTVAL (val);
633 break;
634 }
635 return;
636
637 case PRE_DEC:
638 if (XEXP (src, 0) == stack_pointer_rtx)
639 {
640 *pre += GET_MODE_SIZE (GET_MODE (dest));
641 break;
642 }
643 return;
644
645 case POST_DEC:
646 if (XEXP (src, 0) == stack_pointer_rtx)
647 {
648 *post += GET_MODE_SIZE (GET_MODE (dest));
649 break;
650 }
651 return;
652
653 case PRE_INC:
654 if (XEXP (src, 0) == stack_pointer_rtx)
655 {
656 *pre -= GET_MODE_SIZE (GET_MODE (dest));
657 break;
658 }
659 return;
660
661 case POST_INC:
662 if (XEXP (src, 0) == stack_pointer_rtx)
663 {
664 *post -= GET_MODE_SIZE (GET_MODE (dest));
665 break;
666 }
667 return;
668
669 default:
670 return;
671 }
672 }
673 }
674
675 /* Given an INSN, calculate the amount of stack adjustment it contains
676 PRE- and POST-modifying stack pointer. */
677
678 static void
679 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
680 HOST_WIDE_INT *post)
681 {
682 rtx pattern;
683
684 *pre = 0;
685 *post = 0;
686
687 pattern = PATTERN (insn);
688 if (RTX_FRAME_RELATED_P (insn))
689 {
690 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
691 if (expr)
692 pattern = XEXP (expr, 0);
693 }
694
695 if (GET_CODE (pattern) == SET)
696 stack_adjust_offset_pre_post (pattern, pre, post);
697 else if (GET_CODE (pattern) == PARALLEL
698 || GET_CODE (pattern) == SEQUENCE)
699 {
700 int i;
701
702 /* There may be stack adjustments inside compound insns. Search
703 for them. */
704 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
705 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
706 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
707 }
708 }
709
710 /* Compute stack adjustments for all blocks by traversing DFS tree.
711 Return true when the adjustments on all incoming edges are consistent.
712 Heavily borrowed from pre_and_rev_post_order_compute. */
713
714 static bool
715 vt_stack_adjustments (void)
716 {
717 edge_iterator *stack;
718 int sp;
719
720 /* Initialize entry block. */
721 VTI (ENTRY_BLOCK_PTR)->visited = true;
722 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
723 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
724
725 /* Allocate stack for back-tracking up CFG. */
726 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
727 sp = 0;
728
729 /* Push the first edge on to the stack. */
730 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
731
732 while (sp)
733 {
734 edge_iterator ei;
735 basic_block src;
736 basic_block dest;
737
738 /* Look at the edge on the top of the stack. */
739 ei = stack[sp - 1];
740 src = ei_edge (ei)->src;
741 dest = ei_edge (ei)->dest;
742
743 /* Check if the edge destination has been visited yet. */
744 if (!VTI (dest)->visited)
745 {
746 rtx insn;
747 HOST_WIDE_INT pre, post, offset;
748 VTI (dest)->visited = true;
749 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
750
751 if (dest != EXIT_BLOCK_PTR)
752 for (insn = BB_HEAD (dest);
753 insn != NEXT_INSN (BB_END (dest));
754 insn = NEXT_INSN (insn))
755 if (INSN_P (insn))
756 {
757 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
758 offset += pre + post;
759 }
760
761 VTI (dest)->out.stack_adjust = offset;
762
763 if (EDGE_COUNT (dest->succs) > 0)
764 /* Since the DEST node has been visited for the first
765 time, check its successors. */
766 stack[sp++] = ei_start (dest->succs);
767 }
768 else
769 {
770 /* Check whether the adjustments on the edges are the same. */
771 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
772 {
773 free (stack);
774 return false;
775 }
776
777 if (! ei_one_before_end_p (ei))
778 /* Go to the next edge. */
779 ei_next (&stack[sp - 1]);
780 else
781 /* Return to previous level if there are no more edges. */
782 sp--;
783 }
784 }
785
786 free (stack);
787 return true;
788 }
789
790 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
791 hard_frame_pointer_rtx is being mapped to it and offset for it. */
792 static rtx cfa_base_rtx;
793 static HOST_WIDE_INT cfa_base_offset;
794
795 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
796 or hard_frame_pointer_rtx. */
797
798 static inline rtx
799 compute_cfa_pointer (HOST_WIDE_INT adjustment)
800 {
801 return plus_constant (cfa_base_rtx, adjustment + cfa_base_offset);
802 }
803
804 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
805 or -1 if the replacement shouldn't be done. */
806 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
807
808 /* Data for adjust_mems callback. */
809
810 struct adjust_mem_data
811 {
812 bool store;
813 enum machine_mode mem_mode;
814 HOST_WIDE_INT stack_adjust;
815 rtx side_effects;
816 };
817
818 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
819 transformation of wider mode arithmetics to narrower mode,
820 -1 if it is suitable and subexpressions shouldn't be
821 traversed and 0 if it is suitable and subexpressions should
822 be traversed. Called through for_each_rtx. */
823
824 static int
825 use_narrower_mode_test (rtx *loc, void *data)
826 {
827 rtx subreg = (rtx) data;
828
829 if (CONSTANT_P (*loc))
830 return -1;
831 switch (GET_CODE (*loc))
832 {
833 case REG:
834 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
835 return 1;
836 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
837 *loc, subreg_lowpart_offset (GET_MODE (subreg),
838 GET_MODE (*loc))))
839 return 1;
840 return -1;
841 case PLUS:
842 case MINUS:
843 case MULT:
844 return 0;
845 case ASHIFT:
846 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
847 return 1;
848 else
849 return -1;
850 default:
851 return 1;
852 }
853 }
854
855 /* Transform X into narrower mode MODE from wider mode WMODE. */
856
857 static rtx
858 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
859 {
860 rtx op0, op1;
861 if (CONSTANT_P (x))
862 return lowpart_subreg (mode, x, wmode);
863 switch (GET_CODE (x))
864 {
865 case REG:
866 return lowpart_subreg (mode, x, wmode);
867 case PLUS:
868 case MINUS:
869 case MULT:
870 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
871 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
872 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
873 case ASHIFT:
874 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
875 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
876 default:
877 gcc_unreachable ();
878 }
879 }
880
881 /* Helper function for adjusting used MEMs. */
882
883 static rtx
884 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
885 {
886 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
887 rtx mem, addr = loc, tem;
888 enum machine_mode mem_mode_save;
889 bool store_save;
890 switch (GET_CODE (loc))
891 {
892 case REG:
893 /* Don't do any sp or fp replacements outside of MEM addresses
894 on the LHS. */
895 if (amd->mem_mode == VOIDmode && amd->store)
896 return loc;
897 if (loc == stack_pointer_rtx
898 && !frame_pointer_needed
899 && cfa_base_rtx)
900 return compute_cfa_pointer (amd->stack_adjust);
901 else if (loc == hard_frame_pointer_rtx
902 && frame_pointer_needed
903 && hard_frame_pointer_adjustment != -1
904 && cfa_base_rtx)
905 return compute_cfa_pointer (hard_frame_pointer_adjustment);
906 gcc_checking_assert (loc != virtual_incoming_args_rtx);
907 return loc;
908 case MEM:
909 mem = loc;
910 if (!amd->store)
911 {
912 mem = targetm.delegitimize_address (mem);
913 if (mem != loc && !MEM_P (mem))
914 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
915 }
916
917 addr = XEXP (mem, 0);
918 mem_mode_save = amd->mem_mode;
919 amd->mem_mode = GET_MODE (mem);
920 store_save = amd->store;
921 amd->store = false;
922 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
923 amd->store = store_save;
924 amd->mem_mode = mem_mode_save;
925 if (mem == loc)
926 addr = targetm.delegitimize_address (addr);
927 if (addr != XEXP (mem, 0))
928 mem = replace_equiv_address_nv (mem, addr);
929 if (!amd->store)
930 mem = avoid_constant_pool_reference (mem);
931 return mem;
932 case PRE_INC:
933 case PRE_DEC:
934 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
935 GEN_INT (GET_CODE (loc) == PRE_INC
936 ? GET_MODE_SIZE (amd->mem_mode)
937 : -GET_MODE_SIZE (amd->mem_mode)));
938 case POST_INC:
939 case POST_DEC:
940 if (addr == loc)
941 addr = XEXP (loc, 0);
942 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
943 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
944 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
945 GEN_INT ((GET_CODE (loc) == PRE_INC
946 || GET_CODE (loc) == POST_INC)
947 ? GET_MODE_SIZE (amd->mem_mode)
948 : -GET_MODE_SIZE (amd->mem_mode)));
949 amd->side_effects = alloc_EXPR_LIST (0,
950 gen_rtx_SET (VOIDmode,
951 XEXP (loc, 0),
952 tem),
953 amd->side_effects);
954 return addr;
955 case PRE_MODIFY:
956 addr = XEXP (loc, 1);
957 case POST_MODIFY:
958 if (addr == loc)
959 addr = XEXP (loc, 0);
960 gcc_assert (amd->mem_mode != VOIDmode);
961 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
962 amd->side_effects = alloc_EXPR_LIST (0,
963 gen_rtx_SET (VOIDmode,
964 XEXP (loc, 0),
965 XEXP (loc, 1)),
966 amd->side_effects);
967 return addr;
968 case SUBREG:
969 /* First try without delegitimization of whole MEMs and
970 avoid_constant_pool_reference, which is more likely to succeed. */
971 store_save = amd->store;
972 amd->store = true;
973 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
974 data);
975 amd->store = store_save;
976 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
977 if (mem == SUBREG_REG (loc))
978 {
979 tem = loc;
980 goto finish_subreg;
981 }
982 tem = simplify_gen_subreg (GET_MODE (loc), mem,
983 GET_MODE (SUBREG_REG (loc)),
984 SUBREG_BYTE (loc));
985 if (tem)
986 goto finish_subreg;
987 tem = simplify_gen_subreg (GET_MODE (loc), addr,
988 GET_MODE (SUBREG_REG (loc)),
989 SUBREG_BYTE (loc));
990 if (tem == NULL_RTX)
991 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
992 finish_subreg:
993 if (MAY_HAVE_DEBUG_INSNS
994 && GET_CODE (tem) == SUBREG
995 && (GET_CODE (SUBREG_REG (tem)) == PLUS
996 || GET_CODE (SUBREG_REG (tem)) == MINUS
997 || GET_CODE (SUBREG_REG (tem)) == MULT
998 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
999 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1000 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1001 && GET_MODE_SIZE (GET_MODE (tem))
1002 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1003 && subreg_lowpart_p (tem)
1004 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1005 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1006 GET_MODE (SUBREG_REG (tem)));
1007 return tem;
1008 case ASM_OPERANDS:
1009 /* Don't do any replacements in second and following
1010 ASM_OPERANDS of inline-asm with multiple sets.
1011 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1012 and ASM_OPERANDS_LABEL_VEC need to be equal between
1013 all the ASM_OPERANDs in the insn and adjust_insn will
1014 fix this up. */
1015 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1016 return loc;
1017 break;
1018 default:
1019 break;
1020 }
1021 return NULL_RTX;
1022 }
1023
1024 /* Helper function for replacement of uses. */
1025
1026 static void
1027 adjust_mem_uses (rtx *x, void *data)
1028 {
1029 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1030 if (new_x != *x)
1031 validate_change (NULL_RTX, x, new_x, true);
1032 }
1033
1034 /* Helper function for replacement of stores. */
1035
1036 static void
1037 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1038 {
1039 if (MEM_P (loc))
1040 {
1041 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1042 adjust_mems, data);
1043 if (new_dest != SET_DEST (expr))
1044 {
1045 rtx xexpr = CONST_CAST_RTX (expr);
1046 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1047 }
1048 }
1049 }
1050
1051 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1052 replace them with their value in the insn and add the side-effects
1053 as other sets to the insn. */
1054
1055 static void
1056 adjust_insn (basic_block bb, rtx insn)
1057 {
1058 struct adjust_mem_data amd;
1059 rtx set;
1060
1061 #ifdef HAVE_window_save
1062 /* If the target machine has an explicit window save instruction, the
1063 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1064 if (RTX_FRAME_RELATED_P (insn)
1065 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1066 {
1067 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
1068 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1069 parm_reg_t *p;
1070
1071 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
1072 {
1073 XVECEXP (rtl, 0, i * 2)
1074 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1075 /* Do not clobber the attached DECL, but only the REG. */
1076 XVECEXP (rtl, 0, i * 2 + 1)
1077 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1078 gen_raw_REG (GET_MODE (p->outgoing),
1079 REGNO (p->outgoing)));
1080 }
1081
1082 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1083 return;
1084 }
1085 #endif
1086
1087 amd.mem_mode = VOIDmode;
1088 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1089 amd.side_effects = NULL_RTX;
1090
1091 amd.store = true;
1092 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1093
1094 amd.store = false;
1095 if (GET_CODE (PATTERN (insn)) == PARALLEL
1096 && asm_noperands (PATTERN (insn)) > 0
1097 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1098 {
1099 rtx body, set0;
1100 int i;
1101
1102 /* inline-asm with multiple sets is tiny bit more complicated,
1103 because the 3 vectors in ASM_OPERANDS need to be shared between
1104 all ASM_OPERANDS in the instruction. adjust_mems will
1105 not touch ASM_OPERANDS other than the first one, asm_noperands
1106 test above needs to be called before that (otherwise it would fail)
1107 and afterwards this code fixes it up. */
1108 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1109 body = PATTERN (insn);
1110 set0 = XVECEXP (body, 0, 0);
1111 gcc_checking_assert (GET_CODE (set0) == SET
1112 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1113 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1114 for (i = 1; i < XVECLEN (body, 0); i++)
1115 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1116 break;
1117 else
1118 {
1119 set = XVECEXP (body, 0, i);
1120 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1121 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1122 == i);
1123 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1124 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1125 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1126 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1127 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1128 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1129 {
1130 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1131 ASM_OPERANDS_INPUT_VEC (newsrc)
1132 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1133 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1134 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1135 ASM_OPERANDS_LABEL_VEC (newsrc)
1136 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1137 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1138 }
1139 }
1140 }
1141 else
1142 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1143
1144 /* For read-only MEMs containing some constant, prefer those
1145 constants. */
1146 set = single_set (insn);
1147 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1148 {
1149 rtx note = find_reg_equal_equiv_note (insn);
1150
1151 if (note && CONSTANT_P (XEXP (note, 0)))
1152 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1153 }
1154
1155 if (amd.side_effects)
1156 {
1157 rtx *pat, new_pat, s;
1158 int i, oldn, newn;
1159
1160 pat = &PATTERN (insn);
1161 if (GET_CODE (*pat) == COND_EXEC)
1162 pat = &COND_EXEC_CODE (*pat);
1163 if (GET_CODE (*pat) == PARALLEL)
1164 oldn = XVECLEN (*pat, 0);
1165 else
1166 oldn = 1;
1167 for (s = amd.side_effects, newn = 0; s; newn++)
1168 s = XEXP (s, 1);
1169 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1170 if (GET_CODE (*pat) == PARALLEL)
1171 for (i = 0; i < oldn; i++)
1172 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1173 else
1174 XVECEXP (new_pat, 0, 0) = *pat;
1175 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1176 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1177 free_EXPR_LIST_list (&amd.side_effects);
1178 validate_change (NULL_RTX, pat, new_pat, true);
1179 }
1180 }
1181
1182 /* Return true if a decl_or_value DV is a DECL or NULL. */
1183 static inline bool
1184 dv_is_decl_p (decl_or_value dv)
1185 {
1186 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1187 }
1188
1189 /* Return true if a decl_or_value is a VALUE rtl. */
1190 static inline bool
1191 dv_is_value_p (decl_or_value dv)
1192 {
1193 return dv && !dv_is_decl_p (dv);
1194 }
1195
1196 /* Return the decl in the decl_or_value. */
1197 static inline tree
1198 dv_as_decl (decl_or_value dv)
1199 {
1200 gcc_checking_assert (dv_is_decl_p (dv));
1201 return (tree) dv;
1202 }
1203
1204 /* Return the value in the decl_or_value. */
1205 static inline rtx
1206 dv_as_value (decl_or_value dv)
1207 {
1208 gcc_checking_assert (dv_is_value_p (dv));
1209 return (rtx)dv;
1210 }
1211
1212 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1213 static inline rtx
1214 dv_as_rtx (decl_or_value dv)
1215 {
1216 tree decl;
1217
1218 if (dv_is_value_p (dv))
1219 return dv_as_value (dv);
1220
1221 decl = dv_as_decl (dv);
1222
1223 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1224 return DECL_RTL_KNOWN_SET (decl);
1225 }
1226
1227 /* Return the opaque pointer in the decl_or_value. */
1228 static inline void *
1229 dv_as_opaque (decl_or_value dv)
1230 {
1231 return dv;
1232 }
1233
1234 /* Return nonzero if a decl_or_value must not have more than one
1235 variable part. The returned value discriminates among various
1236 kinds of one-part DVs ccording to enum onepart_enum. */
1237 static inline onepart_enum_t
1238 dv_onepart_p (decl_or_value dv)
1239 {
1240 tree decl;
1241
1242 if (!MAY_HAVE_DEBUG_INSNS)
1243 return NOT_ONEPART;
1244
1245 if (dv_is_value_p (dv))
1246 return ONEPART_VALUE;
1247
1248 decl = dv_as_decl (dv);
1249
1250 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1251 return ONEPART_DEXPR;
1252
1253 if (target_for_debug_bind (decl) != NULL_TREE)
1254 return ONEPART_VDECL;
1255
1256 return NOT_ONEPART;
1257 }
1258
1259 /* Return the variable pool to be used for a dv of type ONEPART. */
1260 static inline alloc_pool
1261 onepart_pool (onepart_enum_t onepart)
1262 {
1263 return onepart ? valvar_pool : var_pool;
1264 }
1265
1266 /* Build a decl_or_value out of a decl. */
1267 static inline decl_or_value
1268 dv_from_decl (tree decl)
1269 {
1270 decl_or_value dv;
1271 dv = decl;
1272 gcc_checking_assert (dv_is_decl_p (dv));
1273 return dv;
1274 }
1275
1276 /* Build a decl_or_value out of a value. */
1277 static inline decl_or_value
1278 dv_from_value (rtx value)
1279 {
1280 decl_or_value dv;
1281 dv = value;
1282 gcc_checking_assert (dv_is_value_p (dv));
1283 return dv;
1284 }
1285
1286 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1287 static inline decl_or_value
1288 dv_from_rtx (rtx x)
1289 {
1290 decl_or_value dv;
1291
1292 switch (GET_CODE (x))
1293 {
1294 case DEBUG_EXPR:
1295 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1296 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1297 break;
1298
1299 case VALUE:
1300 dv = dv_from_value (x);
1301 break;
1302
1303 default:
1304 gcc_unreachable ();
1305 }
1306
1307 return dv;
1308 }
1309
1310 extern void debug_dv (decl_or_value dv);
1311
1312 DEBUG_FUNCTION void
1313 debug_dv (decl_or_value dv)
1314 {
1315 if (dv_is_value_p (dv))
1316 debug_rtx (dv_as_value (dv));
1317 else
1318 debug_generic_stmt (dv_as_decl (dv));
1319 }
1320
1321 typedef unsigned int dvuid;
1322
1323 /* Return the uid of DV. */
1324
1325 static inline dvuid
1326 dv_uid (decl_or_value dv)
1327 {
1328 if (dv_is_value_p (dv))
1329 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1330 else
1331 return DECL_UID (dv_as_decl (dv));
1332 }
1333
1334 /* Compute the hash from the uid. */
1335
1336 static inline hashval_t
1337 dv_uid2hash (dvuid uid)
1338 {
1339 return uid;
1340 }
1341
1342 /* The hash function for a mask table in a shared_htab chain. */
1343
1344 static inline hashval_t
1345 dv_htab_hash (decl_or_value dv)
1346 {
1347 return dv_uid2hash (dv_uid (dv));
1348 }
1349
1350 /* The hash function for variable_htab, computes the hash value
1351 from the declaration of variable X. */
1352
1353 static hashval_t
1354 variable_htab_hash (const void *x)
1355 {
1356 const_variable const v = (const_variable) x;
1357
1358 return dv_htab_hash (v->dv);
1359 }
1360
1361 /* Compare the declaration of variable X with declaration Y. */
1362
1363 static int
1364 variable_htab_eq (const void *x, const void *y)
1365 {
1366 const_variable const v = (const_variable) x;
1367 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1368
1369 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1370 }
1371
1372 static void loc_exp_dep_clear (variable var);
1373
1374 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1375
1376 static void
1377 variable_htab_free (void *elem)
1378 {
1379 int i;
1380 variable var = (variable) elem;
1381 location_chain node, next;
1382
1383 gcc_checking_assert (var->refcount > 0);
1384
1385 var->refcount--;
1386 if (var->refcount > 0)
1387 return;
1388
1389 for (i = 0; i < var->n_var_parts; i++)
1390 {
1391 for (node = var->var_part[i].loc_chain; node; node = next)
1392 {
1393 next = node->next;
1394 pool_free (loc_chain_pool, node);
1395 }
1396 var->var_part[i].loc_chain = NULL;
1397 }
1398 if (var->onepart && VAR_LOC_1PAUX (var))
1399 {
1400 loc_exp_dep_clear (var);
1401 if (VAR_LOC_DEP_LST (var))
1402 VAR_LOC_DEP_LST (var)->pprev = NULL;
1403 XDELETE (VAR_LOC_1PAUX (var));
1404 /* These may be reused across functions, so reset
1405 e.g. NO_LOC_P. */
1406 if (var->onepart == ONEPART_DEXPR)
1407 set_dv_changed (var->dv, true);
1408 }
1409 pool_free (onepart_pool (var->onepart), var);
1410 }
1411
1412 /* Initialize the set (array) SET of attrs to empty lists. */
1413
1414 static void
1415 init_attrs_list_set (attrs *set)
1416 {
1417 int i;
1418
1419 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1420 set[i] = NULL;
1421 }
1422
1423 /* Make the list *LISTP empty. */
1424
1425 static void
1426 attrs_list_clear (attrs *listp)
1427 {
1428 attrs list, next;
1429
1430 for (list = *listp; list; list = next)
1431 {
1432 next = list->next;
1433 pool_free (attrs_pool, list);
1434 }
1435 *listp = NULL;
1436 }
1437
1438 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1439
1440 static attrs
1441 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1442 {
1443 for (; list; list = list->next)
1444 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1445 return list;
1446 return NULL;
1447 }
1448
1449 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1450
1451 static void
1452 attrs_list_insert (attrs *listp, decl_or_value dv,
1453 HOST_WIDE_INT offset, rtx loc)
1454 {
1455 attrs list;
1456
1457 list = (attrs) pool_alloc (attrs_pool);
1458 list->loc = loc;
1459 list->dv = dv;
1460 list->offset = offset;
1461 list->next = *listp;
1462 *listp = list;
1463 }
1464
1465 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1466
1467 static void
1468 attrs_list_copy (attrs *dstp, attrs src)
1469 {
1470 attrs n;
1471
1472 attrs_list_clear (dstp);
1473 for (; src; src = src->next)
1474 {
1475 n = (attrs) pool_alloc (attrs_pool);
1476 n->loc = src->loc;
1477 n->dv = src->dv;
1478 n->offset = src->offset;
1479 n->next = *dstp;
1480 *dstp = n;
1481 }
1482 }
1483
1484 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1485
1486 static void
1487 attrs_list_union (attrs *dstp, attrs src)
1488 {
1489 for (; src; src = src->next)
1490 {
1491 if (!attrs_list_member (*dstp, src->dv, src->offset))
1492 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1493 }
1494 }
1495
1496 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1497 *DSTP. */
1498
1499 static void
1500 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1501 {
1502 gcc_assert (!*dstp);
1503 for (; src; src = src->next)
1504 {
1505 if (!dv_onepart_p (src->dv))
1506 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1507 }
1508 for (src = src2; src; src = src->next)
1509 {
1510 if (!dv_onepart_p (src->dv)
1511 && !attrs_list_member (*dstp, src->dv, src->offset))
1512 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1513 }
1514 }
1515
1516 /* Shared hashtable support. */
1517
1518 /* Return true if VARS is shared. */
1519
1520 static inline bool
1521 shared_hash_shared (shared_hash vars)
1522 {
1523 return vars->refcount > 1;
1524 }
1525
1526 /* Return the hash table for VARS. */
1527
1528 static inline htab_t
1529 shared_hash_htab (shared_hash vars)
1530 {
1531 return vars->htab;
1532 }
1533
1534 /* Return true if VAR is shared, or maybe because VARS is shared. */
1535
1536 static inline bool
1537 shared_var_p (variable var, shared_hash vars)
1538 {
1539 /* Don't count an entry in the changed_variables table as a duplicate. */
1540 return ((var->refcount > 1 + (int) var->in_changed_variables)
1541 || shared_hash_shared (vars));
1542 }
1543
1544 /* Copy variables into a new hash table. */
1545
1546 static shared_hash
1547 shared_hash_unshare (shared_hash vars)
1548 {
1549 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1550 gcc_assert (vars->refcount > 1);
1551 new_vars->refcount = 1;
1552 new_vars->htab
1553 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1554 variable_htab_eq, variable_htab_free);
1555 vars_copy (new_vars->htab, vars->htab);
1556 vars->refcount--;
1557 return new_vars;
1558 }
1559
1560 /* Increment reference counter on VARS and return it. */
1561
1562 static inline shared_hash
1563 shared_hash_copy (shared_hash vars)
1564 {
1565 vars->refcount++;
1566 return vars;
1567 }
1568
1569 /* Decrement reference counter and destroy hash table if not shared
1570 anymore. */
1571
1572 static void
1573 shared_hash_destroy (shared_hash vars)
1574 {
1575 gcc_checking_assert (vars->refcount > 0);
1576 if (--vars->refcount == 0)
1577 {
1578 htab_delete (vars->htab);
1579 pool_free (shared_hash_pool, vars);
1580 }
1581 }
1582
1583 /* Unshare *PVARS if shared and return slot for DV. If INS is
1584 INSERT, insert it if not already present. */
1585
1586 static inline void **
1587 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1588 hashval_t dvhash, enum insert_option ins)
1589 {
1590 if (shared_hash_shared (*pvars))
1591 *pvars = shared_hash_unshare (*pvars);
1592 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1593 }
1594
1595 static inline void **
1596 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1597 enum insert_option ins)
1598 {
1599 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1600 }
1601
1602 /* Return slot for DV, if it is already present in the hash table.
1603 If it is not present, insert it only VARS is not shared, otherwise
1604 return NULL. */
1605
1606 static inline void **
1607 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1608 {
1609 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1610 shared_hash_shared (vars)
1611 ? NO_INSERT : INSERT);
1612 }
1613
1614 static inline void **
1615 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1616 {
1617 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1618 }
1619
1620 /* Return slot for DV only if it is already present in the hash table. */
1621
1622 static inline void **
1623 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1624 hashval_t dvhash)
1625 {
1626 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1627 NO_INSERT);
1628 }
1629
1630 static inline void **
1631 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1632 {
1633 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1634 }
1635
1636 /* Return variable for DV or NULL if not already present in the hash
1637 table. */
1638
1639 static inline variable
1640 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1641 {
1642 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1643 }
1644
1645 static inline variable
1646 shared_hash_find (shared_hash vars, decl_or_value dv)
1647 {
1648 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1649 }
1650
1651 /* Return true if TVAL is better than CVAL as a canonival value. We
1652 choose lowest-numbered VALUEs, using the RTX address as a
1653 tie-breaker. The idea is to arrange them into a star topology,
1654 such that all of them are at most one step away from the canonical
1655 value, and the canonical value has backlinks to all of them, in
1656 addition to all the actual locations. We don't enforce this
1657 topology throughout the entire dataflow analysis, though.
1658 */
1659
1660 static inline bool
1661 canon_value_cmp (rtx tval, rtx cval)
1662 {
1663 return !cval
1664 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1665 }
1666
1667 static bool dst_can_be_shared;
1668
1669 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1670
1671 static void **
1672 unshare_variable (dataflow_set *set, void **slot, variable var,
1673 enum var_init_status initialized)
1674 {
1675 variable new_var;
1676 int i;
1677
1678 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1679 new_var->dv = var->dv;
1680 new_var->refcount = 1;
1681 var->refcount--;
1682 new_var->n_var_parts = var->n_var_parts;
1683 new_var->onepart = var->onepart;
1684 new_var->in_changed_variables = false;
1685
1686 if (! flag_var_tracking_uninit)
1687 initialized = VAR_INIT_STATUS_INITIALIZED;
1688
1689 for (i = 0; i < var->n_var_parts; i++)
1690 {
1691 location_chain node;
1692 location_chain *nextp;
1693
1694 if (i == 0 && var->onepart)
1695 {
1696 /* One-part auxiliary data is only used while emitting
1697 notes, so propagate it to the new variable in the active
1698 dataflow set. If we're not emitting notes, this will be
1699 a no-op. */
1700 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1701 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1702 VAR_LOC_1PAUX (var) = NULL;
1703 }
1704 else
1705 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1706 nextp = &new_var->var_part[i].loc_chain;
1707 for (node = var->var_part[i].loc_chain; node; node = node->next)
1708 {
1709 location_chain new_lc;
1710
1711 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1712 new_lc->next = NULL;
1713 if (node->init > initialized)
1714 new_lc->init = node->init;
1715 else
1716 new_lc->init = initialized;
1717 if (node->set_src && !(MEM_P (node->set_src)))
1718 new_lc->set_src = node->set_src;
1719 else
1720 new_lc->set_src = NULL;
1721 new_lc->loc = node->loc;
1722
1723 *nextp = new_lc;
1724 nextp = &new_lc->next;
1725 }
1726
1727 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1728 }
1729
1730 dst_can_be_shared = false;
1731 if (shared_hash_shared (set->vars))
1732 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1733 else if (set->traversed_vars && set->vars != set->traversed_vars)
1734 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1735 *slot = new_var;
1736 if (var->in_changed_variables)
1737 {
1738 void **cslot
1739 = htab_find_slot_with_hash (changed_variables, var->dv,
1740 dv_htab_hash (var->dv), NO_INSERT);
1741 gcc_assert (*cslot == (void *) var);
1742 var->in_changed_variables = false;
1743 variable_htab_free (var);
1744 *cslot = new_var;
1745 new_var->in_changed_variables = true;
1746 }
1747 return slot;
1748 }
1749
1750 /* Copy all variables from hash table SRC to hash table DST. */
1751
1752 static void
1753 vars_copy (htab_t dst, htab_t src)
1754 {
1755 htab_iterator hi;
1756 variable var;
1757
1758 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1759 {
1760 void **dstp;
1761 var->refcount++;
1762 dstp = htab_find_slot_with_hash (dst, var->dv,
1763 dv_htab_hash (var->dv),
1764 INSERT);
1765 *dstp = var;
1766 }
1767 }
1768
1769 /* Map a decl to its main debug decl. */
1770
1771 static inline tree
1772 var_debug_decl (tree decl)
1773 {
1774 if (decl && DECL_P (decl)
1775 && DECL_DEBUG_EXPR_IS_FROM (decl))
1776 {
1777 tree debugdecl = DECL_DEBUG_EXPR (decl);
1778 if (debugdecl && DECL_P (debugdecl))
1779 decl = debugdecl;
1780 }
1781
1782 return decl;
1783 }
1784
1785 /* Set the register LOC to contain DV, OFFSET. */
1786
1787 static void
1788 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1789 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1790 enum insert_option iopt)
1791 {
1792 attrs node;
1793 bool decl_p = dv_is_decl_p (dv);
1794
1795 if (decl_p)
1796 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1797
1798 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1799 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1800 && node->offset == offset)
1801 break;
1802 if (!node)
1803 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1804 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1805 }
1806
1807 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1808
1809 static void
1810 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1811 rtx set_src)
1812 {
1813 tree decl = REG_EXPR (loc);
1814 HOST_WIDE_INT offset = REG_OFFSET (loc);
1815
1816 var_reg_decl_set (set, loc, initialized,
1817 dv_from_decl (decl), offset, set_src, INSERT);
1818 }
1819
1820 static enum var_init_status
1821 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1822 {
1823 variable var;
1824 int i;
1825 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1826
1827 if (! flag_var_tracking_uninit)
1828 return VAR_INIT_STATUS_INITIALIZED;
1829
1830 var = shared_hash_find (set->vars, dv);
1831 if (var)
1832 {
1833 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1834 {
1835 location_chain nextp;
1836 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1837 if (rtx_equal_p (nextp->loc, loc))
1838 {
1839 ret_val = nextp->init;
1840 break;
1841 }
1842 }
1843 }
1844
1845 return ret_val;
1846 }
1847
1848 /* Delete current content of register LOC in dataflow set SET and set
1849 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1850 MODIFY is true, any other live copies of the same variable part are
1851 also deleted from the dataflow set, otherwise the variable part is
1852 assumed to be copied from another location holding the same
1853 part. */
1854
1855 static void
1856 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1857 enum var_init_status initialized, rtx set_src)
1858 {
1859 tree decl = REG_EXPR (loc);
1860 HOST_WIDE_INT offset = REG_OFFSET (loc);
1861 attrs node, next;
1862 attrs *nextp;
1863
1864 decl = var_debug_decl (decl);
1865
1866 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1867 initialized = get_init_value (set, loc, dv_from_decl (decl));
1868
1869 nextp = &set->regs[REGNO (loc)];
1870 for (node = *nextp; node; node = next)
1871 {
1872 next = node->next;
1873 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1874 {
1875 delete_variable_part (set, node->loc, node->dv, node->offset);
1876 pool_free (attrs_pool, node);
1877 *nextp = next;
1878 }
1879 else
1880 {
1881 node->loc = loc;
1882 nextp = &node->next;
1883 }
1884 }
1885 if (modify)
1886 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1887 var_reg_set (set, loc, initialized, set_src);
1888 }
1889
1890 /* Delete the association of register LOC in dataflow set SET with any
1891 variables that aren't onepart. If CLOBBER is true, also delete any
1892 other live copies of the same variable part, and delete the
1893 association with onepart dvs too. */
1894
1895 static void
1896 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1897 {
1898 attrs *nextp = &set->regs[REGNO (loc)];
1899 attrs node, next;
1900
1901 if (clobber)
1902 {
1903 tree decl = REG_EXPR (loc);
1904 HOST_WIDE_INT offset = REG_OFFSET (loc);
1905
1906 decl = var_debug_decl (decl);
1907
1908 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1909 }
1910
1911 for (node = *nextp; node; node = next)
1912 {
1913 next = node->next;
1914 if (clobber || !dv_onepart_p (node->dv))
1915 {
1916 delete_variable_part (set, node->loc, node->dv, node->offset);
1917 pool_free (attrs_pool, node);
1918 *nextp = next;
1919 }
1920 else
1921 nextp = &node->next;
1922 }
1923 }
1924
1925 /* Delete content of register with number REGNO in dataflow set SET. */
1926
1927 static void
1928 var_regno_delete (dataflow_set *set, int regno)
1929 {
1930 attrs *reg = &set->regs[regno];
1931 attrs node, next;
1932
1933 for (node = *reg; node; node = next)
1934 {
1935 next = node->next;
1936 delete_variable_part (set, node->loc, node->dv, node->offset);
1937 pool_free (attrs_pool, node);
1938 }
1939 *reg = NULL;
1940 }
1941
1942 /* Set the location of DV, OFFSET as the MEM LOC. */
1943
1944 static void
1945 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1946 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1947 enum insert_option iopt)
1948 {
1949 if (dv_is_decl_p (dv))
1950 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1951
1952 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1953 }
1954
1955 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1956 SET to LOC.
1957 Adjust the address first if it is stack pointer based. */
1958
1959 static void
1960 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1961 rtx set_src)
1962 {
1963 tree decl = MEM_EXPR (loc);
1964 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1965
1966 var_mem_decl_set (set, loc, initialized,
1967 dv_from_decl (decl), offset, set_src, INSERT);
1968 }
1969
1970 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1971 dataflow set SET to LOC. If MODIFY is true, any other live copies
1972 of the same variable part are also deleted from the dataflow set,
1973 otherwise the variable part is assumed to be copied from another
1974 location holding the same part.
1975 Adjust the address first if it is stack pointer based. */
1976
1977 static void
1978 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1979 enum var_init_status initialized, rtx set_src)
1980 {
1981 tree decl = MEM_EXPR (loc);
1982 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1983
1984 decl = var_debug_decl (decl);
1985
1986 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1987 initialized = get_init_value (set, loc, dv_from_decl (decl));
1988
1989 if (modify)
1990 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1991 var_mem_set (set, loc, initialized, set_src);
1992 }
1993
1994 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1995 true, also delete any other live copies of the same variable part.
1996 Adjust the address first if it is stack pointer based. */
1997
1998 static void
1999 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2000 {
2001 tree decl = MEM_EXPR (loc);
2002 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2003
2004 decl = var_debug_decl (decl);
2005 if (clobber)
2006 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2007 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2008 }
2009
2010 /* Return true if LOC should not be expanded for location expressions,
2011 or used in them. */
2012
2013 static inline bool
2014 unsuitable_loc (rtx loc)
2015 {
2016 switch (GET_CODE (loc))
2017 {
2018 case PC:
2019 case SCRATCH:
2020 case CC0:
2021 case ASM_INPUT:
2022 case ASM_OPERANDS:
2023 return true;
2024
2025 default:
2026 return false;
2027 }
2028 }
2029
2030 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2031 bound to it. */
2032
2033 static inline void
2034 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2035 {
2036 if (REG_P (loc))
2037 {
2038 if (modified)
2039 var_regno_delete (set, REGNO (loc));
2040 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2041 dv_from_value (val), 0, NULL_RTX, INSERT);
2042 }
2043 else if (MEM_P (loc))
2044 {
2045 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2046
2047 if (l && GET_CODE (l->loc) == VALUE)
2048 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2049
2050 /* If this MEM is a global constant, we don't need it in the
2051 dynamic tables. ??? We should test this before emitting the
2052 micro-op in the first place. */
2053 while (l)
2054 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2055 break;
2056 else
2057 l = l->next;
2058
2059 if (!l)
2060 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2061 dv_from_value (val), 0, NULL_RTX, INSERT);
2062 }
2063 else
2064 {
2065 /* Other kinds of equivalences are necessarily static, at least
2066 so long as we do not perform substitutions while merging
2067 expressions. */
2068 gcc_unreachable ();
2069 set_variable_part (set, loc, dv_from_value (val), 0,
2070 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2071 }
2072 }
2073
2074 /* Bind a value to a location it was just stored in. If MODIFIED
2075 holds, assume the location was modified, detaching it from any
2076 values bound to it. */
2077
2078 static void
2079 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2080 {
2081 cselib_val *v = CSELIB_VAL_PTR (val);
2082
2083 gcc_assert (cselib_preserved_value_p (v));
2084
2085 if (dump_file)
2086 {
2087 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2088 print_inline_rtx (dump_file, loc, 0);
2089 fprintf (dump_file, " evaluates to ");
2090 print_inline_rtx (dump_file, val, 0);
2091 if (v->locs)
2092 {
2093 struct elt_loc_list *l;
2094 for (l = v->locs; l; l = l->next)
2095 {
2096 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2097 print_inline_rtx (dump_file, l->loc, 0);
2098 }
2099 }
2100 fprintf (dump_file, "\n");
2101 }
2102
2103 gcc_checking_assert (!unsuitable_loc (loc));
2104
2105 val_bind (set, val, loc, modified);
2106 }
2107
2108 /* Reset this node, detaching all its equivalences. Return the slot
2109 in the variable hash table that holds dv, if there is one. */
2110
2111 static void
2112 val_reset (dataflow_set *set, decl_or_value dv)
2113 {
2114 variable var = shared_hash_find (set->vars, dv) ;
2115 location_chain node;
2116 rtx cval;
2117
2118 if (!var || !var->n_var_parts)
2119 return;
2120
2121 gcc_assert (var->n_var_parts == 1);
2122
2123 cval = NULL;
2124 for (node = var->var_part[0].loc_chain; node; node = node->next)
2125 if (GET_CODE (node->loc) == VALUE
2126 && canon_value_cmp (node->loc, cval))
2127 cval = node->loc;
2128
2129 for (node = var->var_part[0].loc_chain; node; node = node->next)
2130 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2131 {
2132 /* Redirect the equivalence link to the new canonical
2133 value, or simply remove it if it would point at
2134 itself. */
2135 if (cval)
2136 set_variable_part (set, cval, dv_from_value (node->loc),
2137 0, node->init, node->set_src, NO_INSERT);
2138 delete_variable_part (set, dv_as_value (dv),
2139 dv_from_value (node->loc), 0);
2140 }
2141
2142 if (cval)
2143 {
2144 decl_or_value cdv = dv_from_value (cval);
2145
2146 /* Keep the remaining values connected, accummulating links
2147 in the canonical value. */
2148 for (node = var->var_part[0].loc_chain; node; node = node->next)
2149 {
2150 if (node->loc == cval)
2151 continue;
2152 else if (GET_CODE (node->loc) == REG)
2153 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2154 node->set_src, NO_INSERT);
2155 else if (GET_CODE (node->loc) == MEM)
2156 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2157 node->set_src, NO_INSERT);
2158 else
2159 set_variable_part (set, node->loc, cdv, 0,
2160 node->init, node->set_src, NO_INSERT);
2161 }
2162 }
2163
2164 /* We remove this last, to make sure that the canonical value is not
2165 removed to the point of requiring reinsertion. */
2166 if (cval)
2167 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2168
2169 clobber_variable_part (set, NULL, dv, 0, NULL);
2170 }
2171
2172 /* Find the values in a given location and map the val to another
2173 value, if it is unique, or add the location as one holding the
2174 value. */
2175
2176 static void
2177 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2178 {
2179 decl_or_value dv = dv_from_value (val);
2180
2181 if (dump_file && (dump_flags & TDF_DETAILS))
2182 {
2183 if (insn)
2184 fprintf (dump_file, "%i: ", INSN_UID (insn));
2185 else
2186 fprintf (dump_file, "head: ");
2187 print_inline_rtx (dump_file, val, 0);
2188 fputs (" is at ", dump_file);
2189 print_inline_rtx (dump_file, loc, 0);
2190 fputc ('\n', dump_file);
2191 }
2192
2193 val_reset (set, dv);
2194
2195 gcc_checking_assert (!unsuitable_loc (loc));
2196
2197 if (REG_P (loc))
2198 {
2199 attrs node, found = NULL;
2200
2201 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2202 if (dv_is_value_p (node->dv)
2203 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2204 {
2205 found = node;
2206
2207 /* Map incoming equivalences. ??? Wouldn't it be nice if
2208 we just started sharing the location lists? Maybe a
2209 circular list ending at the value itself or some
2210 such. */
2211 set_variable_part (set, dv_as_value (node->dv),
2212 dv_from_value (val), node->offset,
2213 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2214 set_variable_part (set, val, node->dv, node->offset,
2215 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2216 }
2217
2218 /* If we didn't find any equivalence, we need to remember that
2219 this value is held in the named register. */
2220 if (found)
2221 return;
2222 }
2223 /* ??? Attempt to find and merge equivalent MEMs or other
2224 expressions too. */
2225
2226 val_bind (set, val, loc, false);
2227 }
2228
2229 /* Initialize dataflow set SET to be empty.
2230 VARS_SIZE is the initial size of hash table VARS. */
2231
2232 static void
2233 dataflow_set_init (dataflow_set *set)
2234 {
2235 init_attrs_list_set (set->regs);
2236 set->vars = shared_hash_copy (empty_shared_hash);
2237 set->stack_adjust = 0;
2238 set->traversed_vars = NULL;
2239 }
2240
2241 /* Delete the contents of dataflow set SET. */
2242
2243 static void
2244 dataflow_set_clear (dataflow_set *set)
2245 {
2246 int i;
2247
2248 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2249 attrs_list_clear (&set->regs[i]);
2250
2251 shared_hash_destroy (set->vars);
2252 set->vars = shared_hash_copy (empty_shared_hash);
2253 }
2254
2255 /* Copy the contents of dataflow set SRC to DST. */
2256
2257 static void
2258 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2259 {
2260 int i;
2261
2262 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2263 attrs_list_copy (&dst->regs[i], src->regs[i]);
2264
2265 shared_hash_destroy (dst->vars);
2266 dst->vars = shared_hash_copy (src->vars);
2267 dst->stack_adjust = src->stack_adjust;
2268 }
2269
2270 /* Information for merging lists of locations for a given offset of variable.
2271 */
2272 struct variable_union_info
2273 {
2274 /* Node of the location chain. */
2275 location_chain lc;
2276
2277 /* The sum of positions in the input chains. */
2278 int pos;
2279
2280 /* The position in the chain of DST dataflow set. */
2281 int pos_dst;
2282 };
2283
2284 /* Buffer for location list sorting and its allocated size. */
2285 static struct variable_union_info *vui_vec;
2286 static int vui_allocated;
2287
2288 /* Compare function for qsort, order the structures by POS element. */
2289
2290 static int
2291 variable_union_info_cmp_pos (const void *n1, const void *n2)
2292 {
2293 const struct variable_union_info *const i1 =
2294 (const struct variable_union_info *) n1;
2295 const struct variable_union_info *const i2 =
2296 ( const struct variable_union_info *) n2;
2297
2298 if (i1->pos != i2->pos)
2299 return i1->pos - i2->pos;
2300
2301 return (i1->pos_dst - i2->pos_dst);
2302 }
2303
2304 /* Compute union of location parts of variable *SLOT and the same variable
2305 from hash table DATA. Compute "sorted" union of the location chains
2306 for common offsets, i.e. the locations of a variable part are sorted by
2307 a priority where the priority is the sum of the positions in the 2 chains
2308 (if a location is only in one list the position in the second list is
2309 defined to be larger than the length of the chains).
2310 When we are updating the location parts the newest location is in the
2311 beginning of the chain, so when we do the described "sorted" union
2312 we keep the newest locations in the beginning. */
2313
2314 static int
2315 variable_union (variable src, dataflow_set *set)
2316 {
2317 variable dst;
2318 void **dstp;
2319 int i, j, k;
2320
2321 dstp = shared_hash_find_slot (set->vars, src->dv);
2322 if (!dstp || !*dstp)
2323 {
2324 src->refcount++;
2325
2326 dst_can_be_shared = false;
2327 if (!dstp)
2328 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2329
2330 *dstp = src;
2331
2332 /* Continue traversing the hash table. */
2333 return 1;
2334 }
2335 else
2336 dst = (variable) *dstp;
2337
2338 gcc_assert (src->n_var_parts);
2339 gcc_checking_assert (src->onepart == dst->onepart);
2340
2341 /* We can combine one-part variables very efficiently, because their
2342 entries are in canonical order. */
2343 if (src->onepart)
2344 {
2345 location_chain *nodep, dnode, snode;
2346
2347 gcc_assert (src->n_var_parts == 1
2348 && dst->n_var_parts == 1);
2349
2350 snode = src->var_part[0].loc_chain;
2351 gcc_assert (snode);
2352
2353 restart_onepart_unshared:
2354 nodep = &dst->var_part[0].loc_chain;
2355 dnode = *nodep;
2356 gcc_assert (dnode);
2357
2358 while (snode)
2359 {
2360 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2361
2362 if (r > 0)
2363 {
2364 location_chain nnode;
2365
2366 if (shared_var_p (dst, set->vars))
2367 {
2368 dstp = unshare_variable (set, dstp, dst,
2369 VAR_INIT_STATUS_INITIALIZED);
2370 dst = (variable)*dstp;
2371 goto restart_onepart_unshared;
2372 }
2373
2374 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2375 nnode->loc = snode->loc;
2376 nnode->init = snode->init;
2377 if (!snode->set_src || MEM_P (snode->set_src))
2378 nnode->set_src = NULL;
2379 else
2380 nnode->set_src = snode->set_src;
2381 nnode->next = dnode;
2382 dnode = nnode;
2383 }
2384 else if (r == 0)
2385 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2386
2387 if (r >= 0)
2388 snode = snode->next;
2389
2390 nodep = &dnode->next;
2391 dnode = *nodep;
2392 }
2393
2394 return 1;
2395 }
2396
2397 gcc_checking_assert (!src->onepart);
2398
2399 /* Count the number of location parts, result is K. */
2400 for (i = 0, j = 0, k = 0;
2401 i < src->n_var_parts && j < dst->n_var_parts; k++)
2402 {
2403 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2404 {
2405 i++;
2406 j++;
2407 }
2408 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2409 i++;
2410 else
2411 j++;
2412 }
2413 k += src->n_var_parts - i;
2414 k += dst->n_var_parts - j;
2415
2416 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2417 thus there are at most MAX_VAR_PARTS different offsets. */
2418 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2419
2420 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2421 {
2422 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2423 dst = (variable)*dstp;
2424 }
2425
2426 i = src->n_var_parts - 1;
2427 j = dst->n_var_parts - 1;
2428 dst->n_var_parts = k;
2429
2430 for (k--; k >= 0; k--)
2431 {
2432 location_chain node, node2;
2433
2434 if (i >= 0 && j >= 0
2435 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2436 {
2437 /* Compute the "sorted" union of the chains, i.e. the locations which
2438 are in both chains go first, they are sorted by the sum of
2439 positions in the chains. */
2440 int dst_l, src_l;
2441 int ii, jj, n;
2442 struct variable_union_info *vui;
2443
2444 /* If DST is shared compare the location chains.
2445 If they are different we will modify the chain in DST with
2446 high probability so make a copy of DST. */
2447 if (shared_var_p (dst, set->vars))
2448 {
2449 for (node = src->var_part[i].loc_chain,
2450 node2 = dst->var_part[j].loc_chain; node && node2;
2451 node = node->next, node2 = node2->next)
2452 {
2453 if (!((REG_P (node2->loc)
2454 && REG_P (node->loc)
2455 && REGNO (node2->loc) == REGNO (node->loc))
2456 || rtx_equal_p (node2->loc, node->loc)))
2457 {
2458 if (node2->init < node->init)
2459 node2->init = node->init;
2460 break;
2461 }
2462 }
2463 if (node || node2)
2464 {
2465 dstp = unshare_variable (set, dstp, dst,
2466 VAR_INIT_STATUS_UNKNOWN);
2467 dst = (variable)*dstp;
2468 }
2469 }
2470
2471 src_l = 0;
2472 for (node = src->var_part[i].loc_chain; node; node = node->next)
2473 src_l++;
2474 dst_l = 0;
2475 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2476 dst_l++;
2477
2478 if (dst_l == 1)
2479 {
2480 /* The most common case, much simpler, no qsort is needed. */
2481 location_chain dstnode = dst->var_part[j].loc_chain;
2482 dst->var_part[k].loc_chain = dstnode;
2483 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
2484 node2 = dstnode;
2485 for (node = src->var_part[i].loc_chain; node; node = node->next)
2486 if (!((REG_P (dstnode->loc)
2487 && REG_P (node->loc)
2488 && REGNO (dstnode->loc) == REGNO (node->loc))
2489 || rtx_equal_p (dstnode->loc, node->loc)))
2490 {
2491 location_chain new_node;
2492
2493 /* Copy the location from SRC. */
2494 new_node = (location_chain) pool_alloc (loc_chain_pool);
2495 new_node->loc = node->loc;
2496 new_node->init = node->init;
2497 if (!node->set_src || MEM_P (node->set_src))
2498 new_node->set_src = NULL;
2499 else
2500 new_node->set_src = node->set_src;
2501 node2->next = new_node;
2502 node2 = new_node;
2503 }
2504 node2->next = NULL;
2505 }
2506 else
2507 {
2508 if (src_l + dst_l > vui_allocated)
2509 {
2510 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2511 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2512 vui_allocated);
2513 }
2514 vui = vui_vec;
2515
2516 /* Fill in the locations from DST. */
2517 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2518 node = node->next, jj++)
2519 {
2520 vui[jj].lc = node;
2521 vui[jj].pos_dst = jj;
2522
2523 /* Pos plus value larger than a sum of 2 valid positions. */
2524 vui[jj].pos = jj + src_l + dst_l;
2525 }
2526
2527 /* Fill in the locations from SRC. */
2528 n = dst_l;
2529 for (node = src->var_part[i].loc_chain, ii = 0; node;
2530 node = node->next, ii++)
2531 {
2532 /* Find location from NODE. */
2533 for (jj = 0; jj < dst_l; jj++)
2534 {
2535 if ((REG_P (vui[jj].lc->loc)
2536 && REG_P (node->loc)
2537 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2538 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2539 {
2540 vui[jj].pos = jj + ii;
2541 break;
2542 }
2543 }
2544 if (jj >= dst_l) /* The location has not been found. */
2545 {
2546 location_chain new_node;
2547
2548 /* Copy the location from SRC. */
2549 new_node = (location_chain) pool_alloc (loc_chain_pool);
2550 new_node->loc = node->loc;
2551 new_node->init = node->init;
2552 if (!node->set_src || MEM_P (node->set_src))
2553 new_node->set_src = NULL;
2554 else
2555 new_node->set_src = node->set_src;
2556 vui[n].lc = new_node;
2557 vui[n].pos_dst = src_l + dst_l;
2558 vui[n].pos = ii + src_l + dst_l;
2559 n++;
2560 }
2561 }
2562
2563 if (dst_l == 2)
2564 {
2565 /* Special case still very common case. For dst_l == 2
2566 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2567 vui[i].pos == i + src_l + dst_l. */
2568 if (vui[0].pos > vui[1].pos)
2569 {
2570 /* Order should be 1, 0, 2... */
2571 dst->var_part[k].loc_chain = vui[1].lc;
2572 vui[1].lc->next = vui[0].lc;
2573 if (n >= 3)
2574 {
2575 vui[0].lc->next = vui[2].lc;
2576 vui[n - 1].lc->next = NULL;
2577 }
2578 else
2579 vui[0].lc->next = NULL;
2580 ii = 3;
2581 }
2582 else
2583 {
2584 dst->var_part[k].loc_chain = vui[0].lc;
2585 if (n >= 3 && vui[2].pos < vui[1].pos)
2586 {
2587 /* Order should be 0, 2, 1, 3... */
2588 vui[0].lc->next = vui[2].lc;
2589 vui[2].lc->next = vui[1].lc;
2590 if (n >= 4)
2591 {
2592 vui[1].lc->next = vui[3].lc;
2593 vui[n - 1].lc->next = NULL;
2594 }
2595 else
2596 vui[1].lc->next = NULL;
2597 ii = 4;
2598 }
2599 else
2600 {
2601 /* Order should be 0, 1, 2... */
2602 ii = 1;
2603 vui[n - 1].lc->next = NULL;
2604 }
2605 }
2606 for (; ii < n; ii++)
2607 vui[ii - 1].lc->next = vui[ii].lc;
2608 }
2609 else
2610 {
2611 qsort (vui, n, sizeof (struct variable_union_info),
2612 variable_union_info_cmp_pos);
2613
2614 /* Reconnect the nodes in sorted order. */
2615 for (ii = 1; ii < n; ii++)
2616 vui[ii - 1].lc->next = vui[ii].lc;
2617 vui[n - 1].lc->next = NULL;
2618 dst->var_part[k].loc_chain = vui[0].lc;
2619 }
2620
2621 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2622 }
2623 i--;
2624 j--;
2625 }
2626 else if ((i >= 0 && j >= 0
2627 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2628 || i < 0)
2629 {
2630 dst->var_part[k] = dst->var_part[j];
2631 j--;
2632 }
2633 else if ((i >= 0 && j >= 0
2634 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
2635 || j < 0)
2636 {
2637 location_chain *nextp;
2638
2639 /* Copy the chain from SRC. */
2640 nextp = &dst->var_part[k].loc_chain;
2641 for (node = src->var_part[i].loc_chain; node; node = node->next)
2642 {
2643 location_chain new_lc;
2644
2645 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2646 new_lc->next = NULL;
2647 new_lc->init = node->init;
2648 if (!node->set_src || MEM_P (node->set_src))
2649 new_lc->set_src = NULL;
2650 else
2651 new_lc->set_src = node->set_src;
2652 new_lc->loc = node->loc;
2653
2654 *nextp = new_lc;
2655 nextp = &new_lc->next;
2656 }
2657
2658 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
2659 i--;
2660 }
2661 dst->var_part[k].cur_loc = NULL;
2662 }
2663
2664 if (flag_var_tracking_uninit)
2665 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2666 {
2667 location_chain node, node2;
2668 for (node = src->var_part[i].loc_chain; node; node = node->next)
2669 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2670 if (rtx_equal_p (node->loc, node2->loc))
2671 {
2672 if (node->init > node2->init)
2673 node2->init = node->init;
2674 }
2675 }
2676
2677 /* Continue traversing the hash table. */
2678 return 1;
2679 }
2680
2681 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2682
2683 static void
2684 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2685 {
2686 int i;
2687
2688 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2689 attrs_list_union (&dst->regs[i], src->regs[i]);
2690
2691 if (dst->vars == empty_shared_hash)
2692 {
2693 shared_hash_destroy (dst->vars);
2694 dst->vars = shared_hash_copy (src->vars);
2695 }
2696 else
2697 {
2698 htab_iterator hi;
2699 variable var;
2700
2701 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2702 variable_union (var, dst);
2703 }
2704 }
2705
2706 /* Whether the value is currently being expanded. */
2707 #define VALUE_RECURSED_INTO(x) \
2708 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2709
2710 /* Whether no expansion was found, saving useless lookups.
2711 It must only be set when VALUE_CHANGED is clear. */
2712 #define NO_LOC_P(x) \
2713 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
2714
2715 /* Whether cur_loc in the value needs to be (re)computed. */
2716 #define VALUE_CHANGED(x) \
2717 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2718 /* Whether cur_loc in the decl needs to be (re)computed. */
2719 #define DECL_CHANGED(x) TREE_VISITED (x)
2720
2721 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
2722 user DECLs, this means they're in changed_variables. Values and
2723 debug exprs may be left with this flag set if no user variable
2724 requires them to be evaluated. */
2725
2726 static inline void
2727 set_dv_changed (decl_or_value dv, bool newv)
2728 {
2729 switch (dv_onepart_p (dv))
2730 {
2731 case ONEPART_VALUE:
2732 if (newv)
2733 NO_LOC_P (dv_as_value (dv)) = false;
2734 VALUE_CHANGED (dv_as_value (dv)) = newv;
2735 break;
2736
2737 case ONEPART_DEXPR:
2738 if (newv)
2739 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
2740 /* Fall through... */
2741
2742 default:
2743 DECL_CHANGED (dv_as_decl (dv)) = newv;
2744 break;
2745 }
2746 }
2747
2748 /* Return true if DV needs to have its cur_loc recomputed. */
2749
2750 static inline bool
2751 dv_changed_p (decl_or_value dv)
2752 {
2753 return (dv_is_value_p (dv)
2754 ? VALUE_CHANGED (dv_as_value (dv))
2755 : DECL_CHANGED (dv_as_decl (dv)));
2756 }
2757
2758 /* Return a location list node whose loc is rtx_equal to LOC, in the
2759 location list of a one-part variable or value VAR, or in that of
2760 any values recursively mentioned in the location lists. VARS must
2761 be in star-canonical form. */
2762
2763 static location_chain
2764 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2765 {
2766 location_chain node;
2767 enum rtx_code loc_code;
2768
2769 if (!var)
2770 return NULL;
2771
2772 gcc_checking_assert (var->onepart);
2773
2774 if (!var->n_var_parts)
2775 return NULL;
2776
2777 gcc_checking_assert (loc != dv_as_opaque (var->dv));
2778
2779 loc_code = GET_CODE (loc);
2780 for (node = var->var_part[0].loc_chain; node; node = node->next)
2781 {
2782 decl_or_value dv;
2783 variable rvar;
2784
2785 if (GET_CODE (node->loc) != loc_code)
2786 {
2787 if (GET_CODE (node->loc) != VALUE)
2788 continue;
2789 }
2790 else if (loc == node->loc)
2791 return node;
2792 else if (loc_code != VALUE)
2793 {
2794 if (rtx_equal_p (loc, node->loc))
2795 return node;
2796 continue;
2797 }
2798
2799 /* Since we're in star-canonical form, we don't need to visit
2800 non-canonical nodes: one-part variables and non-canonical
2801 values would only point back to the canonical node. */
2802 if (dv_is_value_p (var->dv)
2803 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
2804 {
2805 /* Skip all subsequent VALUEs. */
2806 while (node->next && GET_CODE (node->next->loc) == VALUE)
2807 {
2808 node = node->next;
2809 gcc_checking_assert (!canon_value_cmp (node->loc,
2810 dv_as_value (var->dv)));
2811 if (loc == node->loc)
2812 return node;
2813 }
2814 continue;
2815 }
2816
2817 gcc_checking_assert (node == var->var_part[0].loc_chain);
2818 gcc_checking_assert (!node->next);
2819
2820 dv = dv_from_value (node->loc);
2821 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2822 return find_loc_in_1pdv (loc, rvar, vars);
2823 }
2824
2825 /* ??? Gotta look in cselib_val locations too. */
2826
2827 return NULL;
2828 }
2829
2830 /* Hash table iteration argument passed to variable_merge. */
2831 struct dfset_merge
2832 {
2833 /* The set in which the merge is to be inserted. */
2834 dataflow_set *dst;
2835 /* The set that we're iterating in. */
2836 dataflow_set *cur;
2837 /* The set that may contain the other dv we are to merge with. */
2838 dataflow_set *src;
2839 /* Number of onepart dvs in src. */
2840 int src_onepart_cnt;
2841 };
2842
2843 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2844 loc_cmp order, and it is maintained as such. */
2845
2846 static void
2847 insert_into_intersection (location_chain *nodep, rtx loc,
2848 enum var_init_status status)
2849 {
2850 location_chain node;
2851 int r;
2852
2853 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2854 if ((r = loc_cmp (node->loc, loc)) == 0)
2855 {
2856 node->init = MIN (node->init, status);
2857 return;
2858 }
2859 else if (r > 0)
2860 break;
2861
2862 node = (location_chain) pool_alloc (loc_chain_pool);
2863
2864 node->loc = loc;
2865 node->set_src = NULL;
2866 node->init = status;
2867 node->next = *nodep;
2868 *nodep = node;
2869 }
2870
2871 /* Insert in DEST the intersection of the locations present in both
2872 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2873 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2874 DSM->dst. */
2875
2876 static void
2877 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2878 location_chain s1node, variable s2var)
2879 {
2880 dataflow_set *s1set = dsm->cur;
2881 dataflow_set *s2set = dsm->src;
2882 location_chain found;
2883
2884 if (s2var)
2885 {
2886 location_chain s2node;
2887
2888 gcc_checking_assert (s2var->onepart);
2889
2890 if (s2var->n_var_parts)
2891 {
2892 s2node = s2var->var_part[0].loc_chain;
2893
2894 for (; s1node && s2node;
2895 s1node = s1node->next, s2node = s2node->next)
2896 if (s1node->loc != s2node->loc)
2897 break;
2898 else if (s1node->loc == val)
2899 continue;
2900 else
2901 insert_into_intersection (dest, s1node->loc,
2902 MIN (s1node->init, s2node->init));
2903 }
2904 }
2905
2906 for (; s1node; s1node = s1node->next)
2907 {
2908 if (s1node->loc == val)
2909 continue;
2910
2911 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2912 shared_hash_htab (s2set->vars))))
2913 {
2914 insert_into_intersection (dest, s1node->loc,
2915 MIN (s1node->init, found->init));
2916 continue;
2917 }
2918
2919 if (GET_CODE (s1node->loc) == VALUE
2920 && !VALUE_RECURSED_INTO (s1node->loc))
2921 {
2922 decl_or_value dv = dv_from_value (s1node->loc);
2923 variable svar = shared_hash_find (s1set->vars, dv);
2924 if (svar)
2925 {
2926 if (svar->n_var_parts == 1)
2927 {
2928 VALUE_RECURSED_INTO (s1node->loc) = true;
2929 intersect_loc_chains (val, dest, dsm,
2930 svar->var_part[0].loc_chain,
2931 s2var);
2932 VALUE_RECURSED_INTO (s1node->loc) = false;
2933 }
2934 }
2935 }
2936
2937 /* ??? gotta look in cselib_val locations too. */
2938
2939 /* ??? if the location is equivalent to any location in src,
2940 searched recursively
2941
2942 add to dst the values needed to represent the equivalence
2943
2944 telling whether locations S is equivalent to another dv's
2945 location list:
2946
2947 for each location D in the list
2948
2949 if S and D satisfy rtx_equal_p, then it is present
2950
2951 else if D is a value, recurse without cycles
2952
2953 else if S and D have the same CODE and MODE
2954
2955 for each operand oS and the corresponding oD
2956
2957 if oS and oD are not equivalent, then S an D are not equivalent
2958
2959 else if they are RTX vectors
2960
2961 if any vector oS element is not equivalent to its respective oD,
2962 then S and D are not equivalent
2963
2964 */
2965
2966
2967 }
2968 }
2969
2970 /* Return -1 if X should be before Y in a location list for a 1-part
2971 variable, 1 if Y should be before X, and 0 if they're equivalent
2972 and should not appear in the list. */
2973
2974 static int
2975 loc_cmp (rtx x, rtx y)
2976 {
2977 int i, j, r;
2978 RTX_CODE code = GET_CODE (x);
2979 const char *fmt;
2980
2981 if (x == y)
2982 return 0;
2983
2984 if (REG_P (x))
2985 {
2986 if (!REG_P (y))
2987 return -1;
2988 gcc_assert (GET_MODE (x) == GET_MODE (y));
2989 if (REGNO (x) == REGNO (y))
2990 return 0;
2991 else if (REGNO (x) < REGNO (y))
2992 return -1;
2993 else
2994 return 1;
2995 }
2996
2997 if (REG_P (y))
2998 return 1;
2999
3000 if (MEM_P (x))
3001 {
3002 if (!MEM_P (y))
3003 return -1;
3004 gcc_assert (GET_MODE (x) == GET_MODE (y));
3005 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3006 }
3007
3008 if (MEM_P (y))
3009 return 1;
3010
3011 if (GET_CODE (x) == VALUE)
3012 {
3013 if (GET_CODE (y) != VALUE)
3014 return -1;
3015 /* Don't assert the modes are the same, that is true only
3016 when not recursing. (subreg:QI (value:SI 1:1) 0)
3017 and (subreg:QI (value:DI 2:2) 0) can be compared,
3018 even when the modes are different. */
3019 if (canon_value_cmp (x, y))
3020 return -1;
3021 else
3022 return 1;
3023 }
3024
3025 if (GET_CODE (y) == VALUE)
3026 return 1;
3027
3028 /* Entry value is the least preferable kind of expression. */
3029 if (GET_CODE (x) == ENTRY_VALUE)
3030 {
3031 if (GET_CODE (y) != ENTRY_VALUE)
3032 return 1;
3033 gcc_assert (GET_MODE (x) == GET_MODE (y));
3034 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3035 }
3036
3037 if (GET_CODE (y) == ENTRY_VALUE)
3038 return -1;
3039
3040 if (GET_CODE (x) == GET_CODE (y))
3041 /* Compare operands below. */;
3042 else if (GET_CODE (x) < GET_CODE (y))
3043 return -1;
3044 else
3045 return 1;
3046
3047 gcc_assert (GET_MODE (x) == GET_MODE (y));
3048
3049 if (GET_CODE (x) == DEBUG_EXPR)
3050 {
3051 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3052 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3053 return -1;
3054 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3055 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3056 return 1;
3057 }
3058
3059 fmt = GET_RTX_FORMAT (code);
3060 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3061 switch (fmt[i])
3062 {
3063 case 'w':
3064 if (XWINT (x, i) == XWINT (y, i))
3065 break;
3066 else if (XWINT (x, i) < XWINT (y, i))
3067 return -1;
3068 else
3069 return 1;
3070
3071 case 'n':
3072 case 'i':
3073 if (XINT (x, i) == XINT (y, i))
3074 break;
3075 else if (XINT (x, i) < XINT (y, i))
3076 return -1;
3077 else
3078 return 1;
3079
3080 case 'V':
3081 case 'E':
3082 /* Compare the vector length first. */
3083 if (XVECLEN (x, i) == XVECLEN (y, i))
3084 /* Compare the vectors elements. */;
3085 else if (XVECLEN (x, i) < XVECLEN (y, i))
3086 return -1;
3087 else
3088 return 1;
3089
3090 for (j = 0; j < XVECLEN (x, i); j++)
3091 if ((r = loc_cmp (XVECEXP (x, i, j),
3092 XVECEXP (y, i, j))))
3093 return r;
3094 break;
3095
3096 case 'e':
3097 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3098 return r;
3099 break;
3100
3101 case 'S':
3102 case 's':
3103 if (XSTR (x, i) == XSTR (y, i))
3104 break;
3105 if (!XSTR (x, i))
3106 return -1;
3107 if (!XSTR (y, i))
3108 return 1;
3109 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3110 break;
3111 else if (r < 0)
3112 return -1;
3113 else
3114 return 1;
3115
3116 case 'u':
3117 /* These are just backpointers, so they don't matter. */
3118 break;
3119
3120 case '0':
3121 case 't':
3122 break;
3123
3124 /* It is believed that rtx's at this level will never
3125 contain anything but integers and other rtx's,
3126 except for within LABEL_REFs and SYMBOL_REFs. */
3127 default:
3128 gcc_unreachable ();
3129 }
3130
3131 return 0;
3132 }
3133
3134 #if ENABLE_CHECKING
3135 /* Check the order of entries in one-part variables. */
3136
3137 static int
3138 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3139 {
3140 variable var = (variable) *slot;
3141 location_chain node, next;
3142
3143 #ifdef ENABLE_RTL_CHECKING
3144 int i;
3145 for (i = 0; i < var->n_var_parts; i++)
3146 gcc_assert (var->var_part[0].cur_loc == NULL);
3147 gcc_assert (!var->in_changed_variables);
3148 #endif
3149
3150 if (!var->onepart)
3151 return 1;
3152
3153 gcc_assert (var->n_var_parts == 1);
3154 node = var->var_part[0].loc_chain;
3155 gcc_assert (node);
3156
3157 while ((next = node->next))
3158 {
3159 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3160 node = next;
3161 }
3162
3163 return 1;
3164 }
3165 #endif
3166
3167 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3168 more likely to be chosen as canonical for an equivalence set.
3169 Ensure less likely values can reach more likely neighbors, making
3170 the connections bidirectional. */
3171
3172 static int
3173 canonicalize_values_mark (void **slot, void *data)
3174 {
3175 dataflow_set *set = (dataflow_set *)data;
3176 variable var = (variable) *slot;
3177 decl_or_value dv = var->dv;
3178 rtx val;
3179 location_chain node;
3180
3181 if (!dv_is_value_p (dv))
3182 return 1;
3183
3184 gcc_checking_assert (var->n_var_parts == 1);
3185
3186 val = dv_as_value (dv);
3187
3188 for (node = var->var_part[0].loc_chain; node; node = node->next)
3189 if (GET_CODE (node->loc) == VALUE)
3190 {
3191 if (canon_value_cmp (node->loc, val))
3192 VALUE_RECURSED_INTO (val) = true;
3193 else
3194 {
3195 decl_or_value odv = dv_from_value (node->loc);
3196 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3197
3198 set_slot_part (set, val, oslot, odv, 0,
3199 node->init, NULL_RTX);
3200
3201 VALUE_RECURSED_INTO (node->loc) = true;
3202 }
3203 }
3204
3205 return 1;
3206 }
3207
3208 /* Remove redundant entries from equivalence lists in onepart
3209 variables, canonicalizing equivalence sets into star shapes. */
3210
3211 static int
3212 canonicalize_values_star (void **slot, void *data)
3213 {
3214 dataflow_set *set = (dataflow_set *)data;
3215 variable var = (variable) *slot;
3216 decl_or_value dv = var->dv;
3217 location_chain node;
3218 decl_or_value cdv;
3219 rtx val, cval;
3220 void **cslot;
3221 bool has_value;
3222 bool has_marks;
3223
3224 if (!var->onepart)
3225 return 1;
3226
3227 gcc_checking_assert (var->n_var_parts == 1);
3228
3229 if (dv_is_value_p (dv))
3230 {
3231 cval = dv_as_value (dv);
3232 if (!VALUE_RECURSED_INTO (cval))
3233 return 1;
3234 VALUE_RECURSED_INTO (cval) = false;
3235 }
3236 else
3237 cval = NULL_RTX;
3238
3239 restart:
3240 val = cval;
3241 has_value = false;
3242 has_marks = false;
3243
3244 gcc_assert (var->n_var_parts == 1);
3245
3246 for (node = var->var_part[0].loc_chain; node; node = node->next)
3247 if (GET_CODE (node->loc) == VALUE)
3248 {
3249 has_value = true;
3250 if (VALUE_RECURSED_INTO (node->loc))
3251 has_marks = true;
3252 if (canon_value_cmp (node->loc, cval))
3253 cval = node->loc;
3254 }
3255
3256 if (!has_value)
3257 return 1;
3258
3259 if (cval == val)
3260 {
3261 if (!has_marks || dv_is_decl_p (dv))
3262 return 1;
3263
3264 /* Keep it marked so that we revisit it, either after visiting a
3265 child node, or after visiting a new parent that might be
3266 found out. */
3267 VALUE_RECURSED_INTO (val) = true;
3268
3269 for (node = var->var_part[0].loc_chain; node; node = node->next)
3270 if (GET_CODE (node->loc) == VALUE
3271 && VALUE_RECURSED_INTO (node->loc))
3272 {
3273 cval = node->loc;
3274 restart_with_cval:
3275 VALUE_RECURSED_INTO (cval) = false;
3276 dv = dv_from_value (cval);
3277 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3278 if (!slot)
3279 {
3280 gcc_assert (dv_is_decl_p (var->dv));
3281 /* The canonical value was reset and dropped.
3282 Remove it. */
3283 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3284 return 1;
3285 }
3286 var = (variable)*slot;
3287 gcc_assert (dv_is_value_p (var->dv));
3288 if (var->n_var_parts == 0)
3289 return 1;
3290 gcc_assert (var->n_var_parts == 1);
3291 goto restart;
3292 }
3293
3294 VALUE_RECURSED_INTO (val) = false;
3295
3296 return 1;
3297 }
3298
3299 /* Push values to the canonical one. */
3300 cdv = dv_from_value (cval);
3301 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3302
3303 for (node = var->var_part[0].loc_chain; node; node = node->next)
3304 if (node->loc != cval)
3305 {
3306 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3307 node->init, NULL_RTX);
3308 if (GET_CODE (node->loc) == VALUE)
3309 {
3310 decl_or_value ndv = dv_from_value (node->loc);
3311
3312 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3313 NO_INSERT);
3314
3315 if (canon_value_cmp (node->loc, val))
3316 {
3317 /* If it could have been a local minimum, it's not any more,
3318 since it's now neighbor to cval, so it may have to push
3319 to it. Conversely, if it wouldn't have prevailed over
3320 val, then whatever mark it has is fine: if it was to
3321 push, it will now push to a more canonical node, but if
3322 it wasn't, then it has already pushed any values it might
3323 have to. */
3324 VALUE_RECURSED_INTO (node->loc) = true;
3325 /* Make sure we visit node->loc by ensuring we cval is
3326 visited too. */
3327 VALUE_RECURSED_INTO (cval) = true;
3328 }
3329 else if (!VALUE_RECURSED_INTO (node->loc))
3330 /* If we have no need to "recurse" into this node, it's
3331 already "canonicalized", so drop the link to the old
3332 parent. */
3333 clobber_variable_part (set, cval, ndv, 0, NULL);
3334 }
3335 else if (GET_CODE (node->loc) == REG)
3336 {
3337 attrs list = set->regs[REGNO (node->loc)], *listp;
3338
3339 /* Change an existing attribute referring to dv so that it
3340 refers to cdv, removing any duplicate this might
3341 introduce, and checking that no previous duplicates
3342 existed, all in a single pass. */
3343
3344 while (list)
3345 {
3346 if (list->offset == 0
3347 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3348 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3349 break;
3350
3351 list = list->next;
3352 }
3353
3354 gcc_assert (list);
3355 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3356 {
3357 list->dv = cdv;
3358 for (listp = &list->next; (list = *listp); listp = &list->next)
3359 {
3360 if (list->offset)
3361 continue;
3362
3363 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3364 {
3365 *listp = list->next;
3366 pool_free (attrs_pool, list);
3367 list = *listp;
3368 break;
3369 }
3370
3371 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3372 }
3373 }
3374 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3375 {
3376 for (listp = &list->next; (list = *listp); listp = &list->next)
3377 {
3378 if (list->offset)
3379 continue;
3380
3381 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3382 {
3383 *listp = list->next;
3384 pool_free (attrs_pool, list);
3385 list = *listp;
3386 break;
3387 }
3388
3389 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3390 }
3391 }
3392 else
3393 gcc_unreachable ();
3394
3395 #if ENABLE_CHECKING
3396 while (list)
3397 {
3398 if (list->offset == 0
3399 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3400 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3401 gcc_unreachable ();
3402
3403 list = list->next;
3404 }
3405 #endif
3406 }
3407 }
3408
3409 if (val)
3410 set_slot_part (set, val, cslot, cdv, 0,
3411 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3412
3413 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3414
3415 /* Variable may have been unshared. */
3416 var = (variable)*slot;
3417 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3418 && var->var_part[0].loc_chain->next == NULL);
3419
3420 if (VALUE_RECURSED_INTO (cval))
3421 goto restart_with_cval;
3422
3423 return 1;
3424 }
3425
3426 /* Bind one-part variables to the canonical value in an equivalence
3427 set. Not doing this causes dataflow convergence failure in rare
3428 circumstances, see PR42873. Unfortunately we can't do this
3429 efficiently as part of canonicalize_values_star, since we may not
3430 have determined or even seen the canonical value of a set when we
3431 get to a variable that references another member of the set. */
3432
3433 static int
3434 canonicalize_vars_star (void **slot, void *data)
3435 {
3436 dataflow_set *set = (dataflow_set *)data;
3437 variable var = (variable) *slot;
3438 decl_or_value dv = var->dv;
3439 location_chain node;
3440 rtx cval;
3441 decl_or_value cdv;
3442 void **cslot;
3443 variable cvar;
3444 location_chain cnode;
3445
3446 if (!var->onepart || var->onepart == ONEPART_VALUE)
3447 return 1;
3448
3449 gcc_assert (var->n_var_parts == 1);
3450
3451 node = var->var_part[0].loc_chain;
3452
3453 if (GET_CODE (node->loc) != VALUE)
3454 return 1;
3455
3456 gcc_assert (!node->next);
3457 cval = node->loc;
3458
3459 /* Push values to the canonical one. */
3460 cdv = dv_from_value (cval);
3461 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3462 if (!cslot)
3463 return 1;
3464 cvar = (variable)*cslot;
3465 gcc_assert (cvar->n_var_parts == 1);
3466
3467 cnode = cvar->var_part[0].loc_chain;
3468
3469 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3470 that are not “more canonical” than it. */
3471 if (GET_CODE (cnode->loc) != VALUE
3472 || !canon_value_cmp (cnode->loc, cval))
3473 return 1;
3474
3475 /* CVAL was found to be non-canonical. Change the variable to point
3476 to the canonical VALUE. */
3477 gcc_assert (!cnode->next);
3478 cval = cnode->loc;
3479
3480 slot = set_slot_part (set, cval, slot, dv, 0,
3481 node->init, node->set_src);
3482 clobber_slot_part (set, cval, slot, 0, node->set_src);
3483
3484 return 1;
3485 }
3486
3487 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3488 corresponding entry in DSM->src. Multi-part variables are combined
3489 with variable_union, whereas onepart dvs are combined with
3490 intersection. */
3491
3492 static int
3493 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3494 {
3495 dataflow_set *dst = dsm->dst;
3496 void **dstslot;
3497 variable s2var, dvar = NULL;
3498 decl_or_value dv = s1var->dv;
3499 onepart_enum_t onepart = s1var->onepart;
3500 rtx val;
3501 hashval_t dvhash;
3502 location_chain node, *nodep;
3503
3504 /* If the incoming onepart variable has an empty location list, then
3505 the intersection will be just as empty. For other variables,
3506 it's always union. */
3507 gcc_checking_assert (s1var->n_var_parts
3508 && s1var->var_part[0].loc_chain);
3509
3510 if (!onepart)
3511 return variable_union (s1var, dst);
3512
3513 gcc_checking_assert (s1var->n_var_parts == 1);
3514
3515 dvhash = dv_htab_hash (dv);
3516 if (dv_is_value_p (dv))
3517 val = dv_as_value (dv);
3518 else
3519 val = NULL;
3520
3521 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3522 if (!s2var)
3523 {
3524 dst_can_be_shared = false;
3525 return 1;
3526 }
3527
3528 dsm->src_onepart_cnt--;
3529 gcc_assert (s2var->var_part[0].loc_chain
3530 && s2var->onepart == onepart
3531 && s2var->n_var_parts == 1);
3532
3533 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3534 if (dstslot)
3535 {
3536 dvar = (variable)*dstslot;
3537 gcc_assert (dvar->refcount == 1
3538 && dvar->onepart == onepart
3539 && dvar->n_var_parts == 1);
3540 nodep = &dvar->var_part[0].loc_chain;
3541 }
3542 else
3543 {
3544 nodep = &node;
3545 node = NULL;
3546 }
3547
3548 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3549 {
3550 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3551 dvhash, INSERT);
3552 *dstslot = dvar = s2var;
3553 dvar->refcount++;
3554 }
3555 else
3556 {
3557 dst_can_be_shared = false;
3558
3559 intersect_loc_chains (val, nodep, dsm,
3560 s1var->var_part[0].loc_chain, s2var);
3561
3562 if (!dstslot)
3563 {
3564 if (node)
3565 {
3566 dvar = (variable) pool_alloc (onepart_pool (onepart));
3567 dvar->dv = dv;
3568 dvar->refcount = 1;
3569 dvar->n_var_parts = 1;
3570 dvar->onepart = onepart;
3571 dvar->in_changed_variables = false;
3572 dvar->var_part[0].loc_chain = node;
3573 dvar->var_part[0].cur_loc = NULL;
3574 if (onepart)
3575 VAR_LOC_1PAUX (dvar) = NULL;
3576 else
3577 VAR_PART_OFFSET (dvar, 0) = 0;
3578
3579 dstslot
3580 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3581 INSERT);
3582 gcc_assert (!*dstslot);
3583 *dstslot = dvar;
3584 }
3585 else
3586 return 1;
3587 }
3588 }
3589
3590 nodep = &dvar->var_part[0].loc_chain;
3591 while ((node = *nodep))
3592 {
3593 location_chain *nextp = &node->next;
3594
3595 if (GET_CODE (node->loc) == REG)
3596 {
3597 attrs list;
3598
3599 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3600 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3601 && dv_is_value_p (list->dv))
3602 break;
3603
3604 if (!list)
3605 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3606 dv, 0, node->loc);
3607 /* If this value became canonical for another value that had
3608 this register, we want to leave it alone. */
3609 else if (dv_as_value (list->dv) != val)
3610 {
3611 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3612 dstslot, dv, 0,
3613 node->init, NULL_RTX);
3614 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3615
3616 /* Since nextp points into the removed node, we can't
3617 use it. The pointer to the next node moved to nodep.
3618 However, if the variable we're walking is unshared
3619 during our walk, we'll keep walking the location list
3620 of the previously-shared variable, in which case the
3621 node won't have been removed, and we'll want to skip
3622 it. That's why we test *nodep here. */
3623 if (*nodep != node)
3624 nextp = nodep;
3625 }
3626 }
3627 else
3628 /* Canonicalization puts registers first, so we don't have to
3629 walk it all. */
3630 break;
3631 nodep = nextp;
3632 }
3633
3634 if (dvar != (variable)*dstslot)
3635 dvar = (variable)*dstslot;
3636 nodep = &dvar->var_part[0].loc_chain;
3637
3638 if (val)
3639 {
3640 /* Mark all referenced nodes for canonicalization, and make sure
3641 we have mutual equivalence links. */
3642 VALUE_RECURSED_INTO (val) = true;
3643 for (node = *nodep; node; node = node->next)
3644 if (GET_CODE (node->loc) == VALUE)
3645 {
3646 VALUE_RECURSED_INTO (node->loc) = true;
3647 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3648 node->init, NULL, INSERT);
3649 }
3650
3651 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3652 gcc_assert (*dstslot == dvar);
3653 canonicalize_values_star (dstslot, dst);
3654 gcc_checking_assert (dstslot
3655 == shared_hash_find_slot_noinsert_1 (dst->vars,
3656 dv, dvhash));
3657 dvar = (variable)*dstslot;
3658 }
3659 else
3660 {
3661 bool has_value = false, has_other = false;
3662
3663 /* If we have one value and anything else, we're going to
3664 canonicalize this, so make sure all values have an entry in
3665 the table and are marked for canonicalization. */
3666 for (node = *nodep; node; node = node->next)
3667 {
3668 if (GET_CODE (node->loc) == VALUE)
3669 {
3670 /* If this was marked during register canonicalization,
3671 we know we have to canonicalize values. */
3672 if (has_value)
3673 has_other = true;
3674 has_value = true;
3675 if (has_other)
3676 break;
3677 }
3678 else
3679 {
3680 has_other = true;
3681 if (has_value)
3682 break;
3683 }
3684 }
3685
3686 if (has_value && has_other)
3687 {
3688 for (node = *nodep; node; node = node->next)
3689 {
3690 if (GET_CODE (node->loc) == VALUE)
3691 {
3692 decl_or_value dv = dv_from_value (node->loc);
3693 void **slot = NULL;
3694
3695 if (shared_hash_shared (dst->vars))
3696 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3697 if (!slot)
3698 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3699 INSERT);
3700 if (!*slot)
3701 {
3702 variable var = (variable) pool_alloc (onepart_pool
3703 (ONEPART_VALUE));
3704 var->dv = dv;
3705 var->refcount = 1;
3706 var->n_var_parts = 1;
3707 var->onepart = ONEPART_VALUE;
3708 var->in_changed_variables = false;
3709 var->var_part[0].loc_chain = NULL;
3710 var->var_part[0].cur_loc = NULL;
3711 VAR_LOC_1PAUX (var) = NULL;
3712 *slot = var;
3713 }
3714
3715 VALUE_RECURSED_INTO (node->loc) = true;
3716 }
3717 }
3718
3719 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3720 gcc_assert (*dstslot == dvar);
3721 canonicalize_values_star (dstslot, dst);
3722 gcc_checking_assert (dstslot
3723 == shared_hash_find_slot_noinsert_1 (dst->vars,
3724 dv, dvhash));
3725 dvar = (variable)*dstslot;
3726 }
3727 }
3728
3729 if (!onepart_variable_different_p (dvar, s2var))
3730 {
3731 variable_htab_free (dvar);
3732 *dstslot = dvar = s2var;
3733 dvar->refcount++;
3734 }
3735 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3736 {
3737 variable_htab_free (dvar);
3738 *dstslot = dvar = s1var;
3739 dvar->refcount++;
3740 dst_can_be_shared = false;
3741 }
3742 else
3743 dst_can_be_shared = false;
3744
3745 return 1;
3746 }
3747
3748 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3749 multi-part variable. Unions of multi-part variables and
3750 intersections of one-part ones will be handled in
3751 variable_merge_over_cur(). */
3752
3753 static int
3754 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3755 {
3756 dataflow_set *dst = dsm->dst;
3757 decl_or_value dv = s2var->dv;
3758
3759 if (!s2var->onepart)
3760 {
3761 void **dstp = shared_hash_find_slot (dst->vars, dv);
3762 *dstp = s2var;
3763 s2var->refcount++;
3764 return 1;
3765 }
3766
3767 dsm->src_onepart_cnt++;
3768 return 1;
3769 }
3770
3771 /* Combine dataflow set information from SRC2 into DST, using PDST
3772 to carry over information across passes. */
3773
3774 static void
3775 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3776 {
3777 dataflow_set cur = *dst;
3778 dataflow_set *src1 = &cur;
3779 struct dfset_merge dsm;
3780 int i;
3781 size_t src1_elems, src2_elems;
3782 htab_iterator hi;
3783 variable var;
3784
3785 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3786 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3787 dataflow_set_init (dst);
3788 dst->stack_adjust = cur.stack_adjust;
3789 shared_hash_destroy (dst->vars);
3790 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3791 dst->vars->refcount = 1;
3792 dst->vars->htab
3793 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3794 variable_htab_eq, variable_htab_free);
3795
3796 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3797 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3798
3799 dsm.dst = dst;
3800 dsm.src = src2;
3801 dsm.cur = src1;
3802 dsm.src_onepart_cnt = 0;
3803
3804 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3805 variable_merge_over_src (var, &dsm);
3806 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3807 variable_merge_over_cur (var, &dsm);
3808
3809 if (dsm.src_onepart_cnt)
3810 dst_can_be_shared = false;
3811
3812 dataflow_set_destroy (src1);
3813 }
3814
3815 /* Mark register equivalences. */
3816
3817 static void
3818 dataflow_set_equiv_regs (dataflow_set *set)
3819 {
3820 int i;
3821 attrs list, *listp;
3822
3823 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3824 {
3825 rtx canon[NUM_MACHINE_MODES];
3826
3827 /* If the list is empty or one entry, no need to canonicalize
3828 anything. */
3829 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3830 continue;
3831
3832 memset (canon, 0, sizeof (canon));
3833
3834 for (list = set->regs[i]; list; list = list->next)
3835 if (list->offset == 0 && dv_is_value_p (list->dv))
3836 {
3837 rtx val = dv_as_value (list->dv);
3838 rtx *cvalp = &canon[(int)GET_MODE (val)];
3839 rtx cval = *cvalp;
3840
3841 if (canon_value_cmp (val, cval))
3842 *cvalp = val;
3843 }
3844
3845 for (list = set->regs[i]; list; list = list->next)
3846 if (list->offset == 0 && dv_onepart_p (list->dv))
3847 {
3848 rtx cval = canon[(int)GET_MODE (list->loc)];
3849
3850 if (!cval)
3851 continue;
3852
3853 if (dv_is_value_p (list->dv))
3854 {
3855 rtx val = dv_as_value (list->dv);
3856
3857 if (val == cval)
3858 continue;
3859
3860 VALUE_RECURSED_INTO (val) = true;
3861 set_variable_part (set, val, dv_from_value (cval), 0,
3862 VAR_INIT_STATUS_INITIALIZED,
3863 NULL, NO_INSERT);
3864 }
3865
3866 VALUE_RECURSED_INTO (cval) = true;
3867 set_variable_part (set, cval, list->dv, 0,
3868 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3869 }
3870
3871 for (listp = &set->regs[i]; (list = *listp);
3872 listp = list ? &list->next : listp)
3873 if (list->offset == 0 && dv_onepart_p (list->dv))
3874 {
3875 rtx cval = canon[(int)GET_MODE (list->loc)];
3876 void **slot;
3877
3878 if (!cval)
3879 continue;
3880
3881 if (dv_is_value_p (list->dv))
3882 {
3883 rtx val = dv_as_value (list->dv);
3884 if (!VALUE_RECURSED_INTO (val))
3885 continue;
3886 }
3887
3888 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3889 canonicalize_values_star (slot, set);
3890 if (*listp != list)
3891 list = NULL;
3892 }
3893 }
3894 }
3895
3896 /* Remove any redundant values in the location list of VAR, which must
3897 be unshared and 1-part. */
3898
3899 static void
3900 remove_duplicate_values (variable var)
3901 {
3902 location_chain node, *nodep;
3903
3904 gcc_assert (var->onepart);
3905 gcc_assert (var->n_var_parts == 1);
3906 gcc_assert (var->refcount == 1);
3907
3908 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3909 {
3910 if (GET_CODE (node->loc) == VALUE)
3911 {
3912 if (VALUE_RECURSED_INTO (node->loc))
3913 {
3914 /* Remove duplicate value node. */
3915 *nodep = node->next;
3916 pool_free (loc_chain_pool, node);
3917 continue;
3918 }
3919 else
3920 VALUE_RECURSED_INTO (node->loc) = true;
3921 }
3922 nodep = &node->next;
3923 }
3924
3925 for (node = var->var_part[0].loc_chain; node; node = node->next)
3926 if (GET_CODE (node->loc) == VALUE)
3927 {
3928 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3929 VALUE_RECURSED_INTO (node->loc) = false;
3930 }
3931 }
3932
3933
3934 /* Hash table iteration argument passed to variable_post_merge. */
3935 struct dfset_post_merge
3936 {
3937 /* The new input set for the current block. */
3938 dataflow_set *set;
3939 /* Pointer to the permanent input set for the current block, or
3940 NULL. */
3941 dataflow_set **permp;
3942 };
3943
3944 /* Create values for incoming expressions associated with one-part
3945 variables that don't have value numbers for them. */
3946
3947 static int
3948 variable_post_merge_new_vals (void **slot, void *info)
3949 {
3950 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3951 dataflow_set *set = dfpm->set;
3952 variable var = (variable)*slot;
3953 location_chain node;
3954
3955 if (!var->onepart || !var->n_var_parts)
3956 return 1;
3957
3958 gcc_assert (var->n_var_parts == 1);
3959
3960 if (dv_is_decl_p (var->dv))
3961 {
3962 bool check_dupes = false;
3963
3964 restart:
3965 for (node = var->var_part[0].loc_chain; node; node = node->next)
3966 {
3967 if (GET_CODE (node->loc) == VALUE)
3968 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3969 else if (GET_CODE (node->loc) == REG)
3970 {
3971 attrs att, *attp, *curp = NULL;
3972
3973 if (var->refcount != 1)
3974 {
3975 slot = unshare_variable (set, slot, var,
3976 VAR_INIT_STATUS_INITIALIZED);
3977 var = (variable)*slot;
3978 goto restart;
3979 }
3980
3981 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3982 attp = &att->next)
3983 if (att->offset == 0
3984 && GET_MODE (att->loc) == GET_MODE (node->loc))
3985 {
3986 if (dv_is_value_p (att->dv))
3987 {
3988 rtx cval = dv_as_value (att->dv);
3989 node->loc = cval;
3990 check_dupes = true;
3991 break;
3992 }
3993 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3994 curp = attp;
3995 }
3996
3997 if (!curp)
3998 {
3999 curp = attp;
4000 while (*curp)
4001 if ((*curp)->offset == 0
4002 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4003 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4004 break;
4005 else
4006 curp = &(*curp)->next;
4007 gcc_assert (*curp);
4008 }
4009
4010 if (!att)
4011 {
4012 decl_or_value cdv;
4013 rtx cval;
4014
4015 if (!*dfpm->permp)
4016 {
4017 *dfpm->permp = XNEW (dataflow_set);
4018 dataflow_set_init (*dfpm->permp);
4019 }
4020
4021 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4022 att; att = att->next)
4023 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4024 {
4025 gcc_assert (att->offset == 0
4026 && dv_is_value_p (att->dv));
4027 val_reset (set, att->dv);
4028 break;
4029 }
4030
4031 if (att)
4032 {
4033 cdv = att->dv;
4034 cval = dv_as_value (cdv);
4035 }
4036 else
4037 {
4038 /* Create a unique value to hold this register,
4039 that ought to be found and reused in
4040 subsequent rounds. */
4041 cselib_val *v;
4042 gcc_assert (!cselib_lookup (node->loc,
4043 GET_MODE (node->loc), 0,
4044 VOIDmode));
4045 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4046 VOIDmode);
4047 cselib_preserve_value (v);
4048 cselib_invalidate_rtx (node->loc);
4049 cval = v->val_rtx;
4050 cdv = dv_from_value (cval);
4051 if (dump_file)
4052 fprintf (dump_file,
4053 "Created new value %u:%u for reg %i\n",
4054 v->uid, v->hash, REGNO (node->loc));
4055 }
4056
4057 var_reg_decl_set (*dfpm->permp, node->loc,
4058 VAR_INIT_STATUS_INITIALIZED,
4059 cdv, 0, NULL, INSERT);
4060
4061 node->loc = cval;
4062 check_dupes = true;
4063 }
4064
4065 /* Remove attribute referring to the decl, which now
4066 uses the value for the register, already existing or
4067 to be added when we bring perm in. */
4068 att = *curp;
4069 *curp = att->next;
4070 pool_free (attrs_pool, att);
4071 }
4072 }
4073
4074 if (check_dupes)
4075 remove_duplicate_values (var);
4076 }
4077
4078 return 1;
4079 }
4080
4081 /* Reset values in the permanent set that are not associated with the
4082 chosen expression. */
4083
4084 static int
4085 variable_post_merge_perm_vals (void **pslot, void *info)
4086 {
4087 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4088 dataflow_set *set = dfpm->set;
4089 variable pvar = (variable)*pslot, var;
4090 location_chain pnode;
4091 decl_or_value dv;
4092 attrs att;
4093
4094 gcc_assert (dv_is_value_p (pvar->dv)
4095 && pvar->n_var_parts == 1);
4096 pnode = pvar->var_part[0].loc_chain;
4097 gcc_assert (pnode
4098 && !pnode->next
4099 && REG_P (pnode->loc));
4100
4101 dv = pvar->dv;
4102
4103 var = shared_hash_find (set->vars, dv);
4104 if (var)
4105 {
4106 /* Although variable_post_merge_new_vals may have made decls
4107 non-star-canonical, values that pre-existed in canonical form
4108 remain canonical, and newly-created values reference a single
4109 REG, so they are canonical as well. Since VAR has the
4110 location list for a VALUE, using find_loc_in_1pdv for it is
4111 fine, since VALUEs don't map back to DECLs. */
4112 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4113 return 1;
4114 val_reset (set, dv);
4115 }
4116
4117 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4118 if (att->offset == 0
4119 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4120 && dv_is_value_p (att->dv))
4121 break;
4122
4123 /* If there is a value associated with this register already, create
4124 an equivalence. */
4125 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4126 {
4127 rtx cval = dv_as_value (att->dv);
4128 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4129 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4130 NULL, INSERT);
4131 }
4132 else if (!att)
4133 {
4134 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4135 dv, 0, pnode->loc);
4136 variable_union (pvar, set);
4137 }
4138
4139 return 1;
4140 }
4141
4142 /* Just checking stuff and registering register attributes for
4143 now. */
4144
4145 static void
4146 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4147 {
4148 struct dfset_post_merge dfpm;
4149
4150 dfpm.set = set;
4151 dfpm.permp = permp;
4152
4153 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4154 &dfpm);
4155 if (*permp)
4156 htab_traverse (shared_hash_htab ((*permp)->vars),
4157 variable_post_merge_perm_vals, &dfpm);
4158 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4159 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4160 }
4161
4162 /* Return a node whose loc is a MEM that refers to EXPR in the
4163 location list of a one-part variable or value VAR, or in that of
4164 any values recursively mentioned in the location lists. */
4165
4166 static location_chain
4167 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4168 {
4169 location_chain node;
4170 decl_or_value dv;
4171 variable var;
4172 location_chain where = NULL;
4173
4174 if (!val)
4175 return NULL;
4176
4177 gcc_assert (GET_CODE (val) == VALUE
4178 && !VALUE_RECURSED_INTO (val));
4179
4180 dv = dv_from_value (val);
4181 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4182
4183 if (!var)
4184 return NULL;
4185
4186 gcc_assert (var->onepart);
4187
4188 if (!var->n_var_parts)
4189 return NULL;
4190
4191 VALUE_RECURSED_INTO (val) = true;
4192
4193 for (node = var->var_part[0].loc_chain; node; node = node->next)
4194 if (MEM_P (node->loc)
4195 && MEM_EXPR (node->loc) == expr
4196 && INT_MEM_OFFSET (node->loc) == 0)
4197 {
4198 where = node;
4199 break;
4200 }
4201 else if (GET_CODE (node->loc) == VALUE
4202 && !VALUE_RECURSED_INTO (node->loc)
4203 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4204 break;
4205
4206 VALUE_RECURSED_INTO (val) = false;
4207
4208 return where;
4209 }
4210
4211 /* Return TRUE if the value of MEM may vary across a call. */
4212
4213 static bool
4214 mem_dies_at_call (rtx mem)
4215 {
4216 tree expr = MEM_EXPR (mem);
4217 tree decl;
4218
4219 if (!expr)
4220 return true;
4221
4222 decl = get_base_address (expr);
4223
4224 if (!decl)
4225 return true;
4226
4227 if (!DECL_P (decl))
4228 return true;
4229
4230 return (may_be_aliased (decl)
4231 || (!TREE_READONLY (decl) && is_global_var (decl)));
4232 }
4233
4234 /* Remove all MEMs from the location list of a hash table entry for a
4235 one-part variable, except those whose MEM attributes map back to
4236 the variable itself, directly or within a VALUE. */
4237
4238 static int
4239 dataflow_set_preserve_mem_locs (void **slot, void *data)
4240 {
4241 dataflow_set *set = (dataflow_set *) data;
4242 variable var = (variable) *slot;
4243
4244 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4245 {
4246 tree decl = dv_as_decl (var->dv);
4247 location_chain loc, *locp;
4248 bool changed = false;
4249
4250 if (!var->n_var_parts)
4251 return 1;
4252
4253 gcc_assert (var->n_var_parts == 1);
4254
4255 if (shared_var_p (var, set->vars))
4256 {
4257 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4258 {
4259 /* We want to remove dying MEMs that doesn't refer to DECL. */
4260 if (GET_CODE (loc->loc) == MEM
4261 && (MEM_EXPR (loc->loc) != decl
4262 || INT_MEM_OFFSET (loc->loc) != 0)
4263 && !mem_dies_at_call (loc->loc))
4264 break;
4265 /* We want to move here MEMs that do refer to DECL. */
4266 else if (GET_CODE (loc->loc) == VALUE
4267 && find_mem_expr_in_1pdv (decl, loc->loc,
4268 shared_hash_htab (set->vars)))
4269 break;
4270 }
4271
4272 if (!loc)
4273 return 1;
4274
4275 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4276 var = (variable)*slot;
4277 gcc_assert (var->n_var_parts == 1);
4278 }
4279
4280 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4281 loc; loc = *locp)
4282 {
4283 rtx old_loc = loc->loc;
4284 if (GET_CODE (old_loc) == VALUE)
4285 {
4286 location_chain mem_node
4287 = find_mem_expr_in_1pdv (decl, loc->loc,
4288 shared_hash_htab (set->vars));
4289
4290 /* ??? This picks up only one out of multiple MEMs that
4291 refer to the same variable. Do we ever need to be
4292 concerned about dealing with more than one, or, given
4293 that they should all map to the same variable
4294 location, their addresses will have been merged and
4295 they will be regarded as equivalent? */
4296 if (mem_node)
4297 {
4298 loc->loc = mem_node->loc;
4299 loc->set_src = mem_node->set_src;
4300 loc->init = MIN (loc->init, mem_node->init);
4301 }
4302 }
4303
4304 if (GET_CODE (loc->loc) != MEM
4305 || (MEM_EXPR (loc->loc) == decl
4306 && INT_MEM_OFFSET (loc->loc) == 0)
4307 || !mem_dies_at_call (loc->loc))
4308 {
4309 if (old_loc != loc->loc && emit_notes)
4310 {
4311 if (old_loc == var->var_part[0].cur_loc)
4312 {
4313 changed = true;
4314 var->var_part[0].cur_loc = NULL;
4315 }
4316 }
4317 locp = &loc->next;
4318 continue;
4319 }
4320
4321 if (emit_notes)
4322 {
4323 if (old_loc == var->var_part[0].cur_loc)
4324 {
4325 changed = true;
4326 var->var_part[0].cur_loc = NULL;
4327 }
4328 }
4329 *locp = loc->next;
4330 pool_free (loc_chain_pool, loc);
4331 }
4332
4333 if (!var->var_part[0].loc_chain)
4334 {
4335 var->n_var_parts--;
4336 changed = true;
4337 }
4338 if (changed)
4339 variable_was_changed (var, set);
4340 }
4341
4342 return 1;
4343 }
4344
4345 /* Remove all MEMs from the location list of a hash table entry for a
4346 value. */
4347
4348 static int
4349 dataflow_set_remove_mem_locs (void **slot, void *data)
4350 {
4351 dataflow_set *set = (dataflow_set *) data;
4352 variable var = (variable) *slot;
4353
4354 if (var->onepart == ONEPART_VALUE)
4355 {
4356 location_chain loc, *locp;
4357 bool changed = false;
4358 rtx cur_loc;
4359
4360 gcc_assert (var->n_var_parts == 1);
4361
4362 if (shared_var_p (var, set->vars))
4363 {
4364 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4365 if (GET_CODE (loc->loc) == MEM
4366 && mem_dies_at_call (loc->loc))
4367 break;
4368
4369 if (!loc)
4370 return 1;
4371
4372 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4373 var = (variable)*slot;
4374 gcc_assert (var->n_var_parts == 1);
4375 }
4376
4377 if (VAR_LOC_1PAUX (var))
4378 cur_loc = VAR_LOC_FROM (var);
4379 else
4380 cur_loc = var->var_part[0].cur_loc;
4381
4382 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4383 loc; loc = *locp)
4384 {
4385 if (GET_CODE (loc->loc) != MEM
4386 || !mem_dies_at_call (loc->loc))
4387 {
4388 locp = &loc->next;
4389 continue;
4390 }
4391
4392 *locp = loc->next;
4393 /* If we have deleted the location which was last emitted
4394 we have to emit new location so add the variable to set
4395 of changed variables. */
4396 if (cur_loc == loc->loc)
4397 {
4398 changed = true;
4399 var->var_part[0].cur_loc = NULL;
4400 if (VAR_LOC_1PAUX (var))
4401 VAR_LOC_FROM (var) = NULL;
4402 }
4403 pool_free (loc_chain_pool, loc);
4404 }
4405
4406 if (!var->var_part[0].loc_chain)
4407 {
4408 var->n_var_parts--;
4409 changed = true;
4410 }
4411 if (changed)
4412 variable_was_changed (var, set);
4413 }
4414
4415 return 1;
4416 }
4417
4418 /* Remove all variable-location information about call-clobbered
4419 registers, as well as associations between MEMs and VALUEs. */
4420
4421 static void
4422 dataflow_set_clear_at_call (dataflow_set *set)
4423 {
4424 int r;
4425
4426 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4427 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4428 var_regno_delete (set, r);
4429
4430 if (MAY_HAVE_DEBUG_INSNS)
4431 {
4432 set->traversed_vars = set->vars;
4433 htab_traverse (shared_hash_htab (set->vars),
4434 dataflow_set_preserve_mem_locs, set);
4435 set->traversed_vars = set->vars;
4436 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4437 set);
4438 set->traversed_vars = NULL;
4439 }
4440 }
4441
4442 static bool
4443 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4444 {
4445 location_chain lc1, lc2;
4446
4447 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4448 {
4449 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4450 {
4451 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4452 {
4453 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4454 break;
4455 }
4456 if (rtx_equal_p (lc1->loc, lc2->loc))
4457 break;
4458 }
4459 if (!lc2)
4460 return true;
4461 }
4462 return false;
4463 }
4464
4465 /* Return true if one-part variables VAR1 and VAR2 are different.
4466 They must be in canonical order. */
4467
4468 static bool
4469 onepart_variable_different_p (variable var1, variable var2)
4470 {
4471 location_chain lc1, lc2;
4472
4473 if (var1 == var2)
4474 return false;
4475
4476 gcc_assert (var1->n_var_parts == 1
4477 && var2->n_var_parts == 1);
4478
4479 lc1 = var1->var_part[0].loc_chain;
4480 lc2 = var2->var_part[0].loc_chain;
4481
4482 gcc_assert (lc1 && lc2);
4483
4484 while (lc1 && lc2)
4485 {
4486 if (loc_cmp (lc1->loc, lc2->loc))
4487 return true;
4488 lc1 = lc1->next;
4489 lc2 = lc2->next;
4490 }
4491
4492 return lc1 != lc2;
4493 }
4494
4495 /* Return true if variables VAR1 and VAR2 are different. */
4496
4497 static bool
4498 variable_different_p (variable var1, variable var2)
4499 {
4500 int i;
4501
4502 if (var1 == var2)
4503 return false;
4504
4505 if (var1->onepart != var2->onepart)
4506 return true;
4507
4508 if (var1->n_var_parts != var2->n_var_parts)
4509 return true;
4510
4511 if (var1->onepart && var1->n_var_parts)
4512 {
4513 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4514 && var1->n_var_parts == 1);
4515 /* One-part values have locations in a canonical order. */
4516 return onepart_variable_different_p (var1, var2);
4517 }
4518
4519 for (i = 0; i < var1->n_var_parts; i++)
4520 {
4521 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4522 return true;
4523 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4524 return true;
4525 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4526 return true;
4527 }
4528 return false;
4529 }
4530
4531 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4532
4533 static bool
4534 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4535 {
4536 htab_iterator hi;
4537 variable var1;
4538
4539 if (old_set->vars == new_set->vars)
4540 return false;
4541
4542 if (htab_elements (shared_hash_htab (old_set->vars))
4543 != htab_elements (shared_hash_htab (new_set->vars)))
4544 return true;
4545
4546 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4547 {
4548 htab_t htab = shared_hash_htab (new_set->vars);
4549 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4550 dv_htab_hash (var1->dv));
4551 if (!var2)
4552 {
4553 if (dump_file && (dump_flags & TDF_DETAILS))
4554 {
4555 fprintf (dump_file, "dataflow difference found: removal of:\n");
4556 dump_var (var1);
4557 }
4558 return true;
4559 }
4560
4561 if (variable_different_p (var1, var2))
4562 {
4563 if (dump_file && (dump_flags & TDF_DETAILS))
4564 {
4565 fprintf (dump_file, "dataflow difference found: "
4566 "old and new follow:\n");
4567 dump_var (var1);
4568 dump_var (var2);
4569 }
4570 return true;
4571 }
4572 }
4573
4574 /* No need to traverse the second hashtab, if both have the same number
4575 of elements and the second one had all entries found in the first one,
4576 then it can't have any extra entries. */
4577 return false;
4578 }
4579
4580 /* Free the contents of dataflow set SET. */
4581
4582 static void
4583 dataflow_set_destroy (dataflow_set *set)
4584 {
4585 int i;
4586
4587 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4588 attrs_list_clear (&set->regs[i]);
4589
4590 shared_hash_destroy (set->vars);
4591 set->vars = NULL;
4592 }
4593
4594 /* Return true if RTL X contains a SYMBOL_REF. */
4595
4596 static bool
4597 contains_symbol_ref (rtx x)
4598 {
4599 const char *fmt;
4600 RTX_CODE code;
4601 int i;
4602
4603 if (!x)
4604 return false;
4605
4606 code = GET_CODE (x);
4607 if (code == SYMBOL_REF)
4608 return true;
4609
4610 fmt = GET_RTX_FORMAT (code);
4611 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4612 {
4613 if (fmt[i] == 'e')
4614 {
4615 if (contains_symbol_ref (XEXP (x, i)))
4616 return true;
4617 }
4618 else if (fmt[i] == 'E')
4619 {
4620 int j;
4621 for (j = 0; j < XVECLEN (x, i); j++)
4622 if (contains_symbol_ref (XVECEXP (x, i, j)))
4623 return true;
4624 }
4625 }
4626
4627 return false;
4628 }
4629
4630 /* Shall EXPR be tracked? */
4631
4632 static bool
4633 track_expr_p (tree expr, bool need_rtl)
4634 {
4635 rtx decl_rtl;
4636 tree realdecl;
4637
4638 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4639 return DECL_RTL_SET_P (expr);
4640
4641 /* If EXPR is not a parameter or a variable do not track it. */
4642 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4643 return 0;
4644
4645 /* It also must have a name... */
4646 if (!DECL_NAME (expr) && need_rtl)
4647 return 0;
4648
4649 /* ... and a RTL assigned to it. */
4650 decl_rtl = DECL_RTL_IF_SET (expr);
4651 if (!decl_rtl && need_rtl)
4652 return 0;
4653
4654 /* If this expression is really a debug alias of some other declaration, we
4655 don't need to track this expression if the ultimate declaration is
4656 ignored. */
4657 realdecl = expr;
4658 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4659 {
4660 realdecl = DECL_DEBUG_EXPR (realdecl);
4661 if (realdecl == NULL_TREE)
4662 realdecl = expr;
4663 else if (!DECL_P (realdecl))
4664 {
4665 if (handled_component_p (realdecl))
4666 {
4667 HOST_WIDE_INT bitsize, bitpos, maxsize;
4668 tree innerdecl
4669 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4670 &maxsize);
4671 if (!DECL_P (innerdecl)
4672 || DECL_IGNORED_P (innerdecl)
4673 || TREE_STATIC (innerdecl)
4674 || bitsize <= 0
4675 || bitpos + bitsize > 256
4676 || bitsize != maxsize)
4677 return 0;
4678 else
4679 realdecl = expr;
4680 }
4681 else
4682 return 0;
4683 }
4684 }
4685
4686 /* Do not track EXPR if REALDECL it should be ignored for debugging
4687 purposes. */
4688 if (DECL_IGNORED_P (realdecl))
4689 return 0;
4690
4691 /* Do not track global variables until we are able to emit correct location
4692 list for them. */
4693 if (TREE_STATIC (realdecl))
4694 return 0;
4695
4696 /* When the EXPR is a DECL for alias of some variable (see example)
4697 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4698 DECL_RTL contains SYMBOL_REF.
4699
4700 Example:
4701 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4702 char **_dl_argv;
4703 */
4704 if (decl_rtl && MEM_P (decl_rtl)
4705 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4706 return 0;
4707
4708 /* If RTX is a memory it should not be very large (because it would be
4709 an array or struct). */
4710 if (decl_rtl && MEM_P (decl_rtl))
4711 {
4712 /* Do not track structures and arrays. */
4713 if (GET_MODE (decl_rtl) == BLKmode
4714 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4715 return 0;
4716 if (MEM_SIZE_KNOWN_P (decl_rtl)
4717 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4718 return 0;
4719 }
4720
4721 DECL_CHANGED (expr) = 0;
4722 DECL_CHANGED (realdecl) = 0;
4723 return 1;
4724 }
4725
4726 /* Determine whether a given LOC refers to the same variable part as
4727 EXPR+OFFSET. */
4728
4729 static bool
4730 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4731 {
4732 tree expr2;
4733 HOST_WIDE_INT offset2;
4734
4735 if (! DECL_P (expr))
4736 return false;
4737
4738 if (REG_P (loc))
4739 {
4740 expr2 = REG_EXPR (loc);
4741 offset2 = REG_OFFSET (loc);
4742 }
4743 else if (MEM_P (loc))
4744 {
4745 expr2 = MEM_EXPR (loc);
4746 offset2 = INT_MEM_OFFSET (loc);
4747 }
4748 else
4749 return false;
4750
4751 if (! expr2 || ! DECL_P (expr2))
4752 return false;
4753
4754 expr = var_debug_decl (expr);
4755 expr2 = var_debug_decl (expr2);
4756
4757 return (expr == expr2 && offset == offset2);
4758 }
4759
4760 /* LOC is a REG or MEM that we would like to track if possible.
4761 If EXPR is null, we don't know what expression LOC refers to,
4762 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4763 LOC is an lvalue register.
4764
4765 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4766 is something we can track. When returning true, store the mode of
4767 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4768 from EXPR in *OFFSET_OUT (if nonnull). */
4769
4770 static bool
4771 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4772 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4773 {
4774 enum machine_mode mode;
4775
4776 if (expr == NULL || !track_expr_p (expr, true))
4777 return false;
4778
4779 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4780 whole subreg, but only the old inner part is really relevant. */
4781 mode = GET_MODE (loc);
4782 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4783 {
4784 enum machine_mode pseudo_mode;
4785
4786 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4787 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4788 {
4789 offset += byte_lowpart_offset (pseudo_mode, mode);
4790 mode = pseudo_mode;
4791 }
4792 }
4793
4794 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4795 Do the same if we are storing to a register and EXPR occupies
4796 the whole of register LOC; in that case, the whole of EXPR is
4797 being changed. We exclude complex modes from the second case
4798 because the real and imaginary parts are represented as separate
4799 pseudo registers, even if the whole complex value fits into one
4800 hard register. */
4801 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4802 || (store_reg_p
4803 && !COMPLEX_MODE_P (DECL_MODE (expr))
4804 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4805 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4806 {
4807 mode = DECL_MODE (expr);
4808 offset = 0;
4809 }
4810
4811 if (offset < 0 || offset >= MAX_VAR_PARTS)
4812 return false;
4813
4814 if (mode_out)
4815 *mode_out = mode;
4816 if (offset_out)
4817 *offset_out = offset;
4818 return true;
4819 }
4820
4821 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4822 want to track. When returning nonnull, make sure that the attributes
4823 on the returned value are updated. */
4824
4825 static rtx
4826 var_lowpart (enum machine_mode mode, rtx loc)
4827 {
4828 unsigned int offset, reg_offset, regno;
4829
4830 if (!REG_P (loc) && !MEM_P (loc))
4831 return NULL;
4832
4833 if (GET_MODE (loc) == mode)
4834 return loc;
4835
4836 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4837
4838 if (MEM_P (loc))
4839 return adjust_address_nv (loc, mode, offset);
4840
4841 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4842 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4843 reg_offset, mode);
4844 return gen_rtx_REG_offset (loc, mode, regno, offset);
4845 }
4846
4847 /* Carry information about uses and stores while walking rtx. */
4848
4849 struct count_use_info
4850 {
4851 /* The insn where the RTX is. */
4852 rtx insn;
4853
4854 /* The basic block where insn is. */
4855 basic_block bb;
4856
4857 /* The array of n_sets sets in the insn, as determined by cselib. */
4858 struct cselib_set *sets;
4859 int n_sets;
4860
4861 /* True if we're counting stores, false otherwise. */
4862 bool store_p;
4863 };
4864
4865 /* Find a VALUE corresponding to X. */
4866
4867 static inline cselib_val *
4868 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4869 {
4870 int i;
4871
4872 if (cui->sets)
4873 {
4874 /* This is called after uses are set up and before stores are
4875 processed by cselib, so it's safe to look up srcs, but not
4876 dsts. So we look up expressions that appear in srcs or in
4877 dest expressions, but we search the sets array for dests of
4878 stores. */
4879 if (cui->store_p)
4880 {
4881 /* Some targets represent memset and memcpy patterns
4882 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
4883 (set (mem:BLK ...) (const_int ...)) or
4884 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
4885 in that case, otherwise we end up with mode mismatches. */
4886 if (mode == BLKmode && MEM_P (x))
4887 return NULL;
4888 for (i = 0; i < cui->n_sets; i++)
4889 if (cui->sets[i].dest == x)
4890 return cui->sets[i].src_elt;
4891 }
4892 else
4893 return cselib_lookup (x, mode, 0, VOIDmode);
4894 }
4895
4896 return NULL;
4897 }
4898
4899 /* Helper function to get mode of MEM's address. */
4900
4901 static inline enum machine_mode
4902 get_address_mode (rtx mem)
4903 {
4904 enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4905 if (mode != VOIDmode)
4906 return mode;
4907 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4908 }
4909
4910 /* Replace all registers and addresses in an expression with VALUE
4911 expressions that map back to them, unless the expression is a
4912 register. If no mapping is or can be performed, returns NULL. */
4913
4914 static rtx
4915 replace_expr_with_values (rtx loc)
4916 {
4917 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
4918 return NULL;
4919 else if (MEM_P (loc))
4920 {
4921 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4922 get_address_mode (loc), 0,
4923 GET_MODE (loc));
4924 if (addr)
4925 return replace_equiv_address_nv (loc, addr->val_rtx);
4926 else
4927 return NULL;
4928 }
4929 else
4930 return cselib_subst_to_values (loc, VOIDmode);
4931 }
4932
4933 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
4934 for_each_rtx to tell whether there are any DEBUG_EXPRs within
4935 RTX. */
4936
4937 static int
4938 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
4939 {
4940 rtx loc = *x;
4941
4942 return GET_CODE (loc) == DEBUG_EXPR;
4943 }
4944
4945 /* Determine what kind of micro operation to choose for a USE. Return
4946 MO_CLOBBER if no micro operation is to be generated. */
4947
4948 static enum micro_operation_type
4949 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4950 {
4951 tree expr;
4952
4953 if (cui && cui->sets)
4954 {
4955 if (GET_CODE (loc) == VAR_LOCATION)
4956 {
4957 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4958 {
4959 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4960 if (! VAR_LOC_UNKNOWN_P (ploc))
4961 {
4962 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
4963 VOIDmode);
4964
4965 /* ??? flag_float_store and volatile mems are never
4966 given values, but we could in theory use them for
4967 locations. */
4968 gcc_assert (val || 1);
4969 }
4970 return MO_VAL_LOC;
4971 }
4972 else
4973 return MO_CLOBBER;
4974 }
4975
4976 if (REG_P (loc) || MEM_P (loc))
4977 {
4978 if (modep)
4979 *modep = GET_MODE (loc);
4980 if (cui->store_p)
4981 {
4982 if (REG_P (loc)
4983 || (find_use_val (loc, GET_MODE (loc), cui)
4984 && cselib_lookup (XEXP (loc, 0),
4985 get_address_mode (loc), 0,
4986 GET_MODE (loc))))
4987 return MO_VAL_SET;
4988 }
4989 else
4990 {
4991 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4992
4993 if (val && !cselib_preserved_value_p (val))
4994 return MO_VAL_USE;
4995 }
4996 }
4997 }
4998
4999 if (REG_P (loc))
5000 {
5001 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5002
5003 if (loc == cfa_base_rtx)
5004 return MO_CLOBBER;
5005 expr = REG_EXPR (loc);
5006
5007 if (!expr)
5008 return MO_USE_NO_VAR;
5009 else if (target_for_debug_bind (var_debug_decl (expr)))
5010 return MO_CLOBBER;
5011 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5012 false, modep, NULL))
5013 return MO_USE;
5014 else
5015 return MO_USE_NO_VAR;
5016 }
5017 else if (MEM_P (loc))
5018 {
5019 expr = MEM_EXPR (loc);
5020
5021 if (!expr)
5022 return MO_CLOBBER;
5023 else if (target_for_debug_bind (var_debug_decl (expr)))
5024 return MO_CLOBBER;
5025 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5026 false, modep, NULL)
5027 /* Multi-part variables shouldn't refer to one-part
5028 variable names such as VALUEs (never happens) or
5029 DEBUG_EXPRs (only happens in the presence of debug
5030 insns). */
5031 && (!MAY_HAVE_DEBUG_INSNS
5032 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5033 return MO_USE;
5034 else
5035 return MO_CLOBBER;
5036 }
5037
5038 return MO_CLOBBER;
5039 }
5040
5041 /* Log to OUT information about micro-operation MOPT involving X in
5042 INSN of BB. */
5043
5044 static inline void
5045 log_op_type (rtx x, basic_block bb, rtx insn,
5046 enum micro_operation_type mopt, FILE *out)
5047 {
5048 fprintf (out, "bb %i op %i insn %i %s ",
5049 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
5050 INSN_UID (insn), micro_operation_type_name[mopt]);
5051 print_inline_rtx (out, x, 2);
5052 fputc ('\n', out);
5053 }
5054
5055 /* Tell whether the CONCAT used to holds a VALUE and its location
5056 needs value resolution, i.e., an attempt of mapping the location
5057 back to other incoming values. */
5058 #define VAL_NEEDS_RESOLUTION(x) \
5059 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5060 /* Whether the location in the CONCAT is a tracked expression, that
5061 should also be handled like a MO_USE. */
5062 #define VAL_HOLDS_TRACK_EXPR(x) \
5063 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5064 /* Whether the location in the CONCAT should be handled like a MO_COPY
5065 as well. */
5066 #define VAL_EXPR_IS_COPIED(x) \
5067 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5068 /* Whether the location in the CONCAT should be handled like a
5069 MO_CLOBBER as well. */
5070 #define VAL_EXPR_IS_CLOBBERED(x) \
5071 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5072
5073 /* All preserved VALUEs. */
5074 static VEC (rtx, heap) *preserved_values;
5075
5076 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5077
5078 static void
5079 preserve_value (cselib_val *val)
5080 {
5081 cselib_preserve_value (val);
5082 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5083 }
5084
5085 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5086 any rtxes not suitable for CONST use not replaced by VALUEs
5087 are discovered. */
5088
5089 static int
5090 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5091 {
5092 if (*x == NULL_RTX)
5093 return 0;
5094
5095 switch (GET_CODE (*x))
5096 {
5097 case REG:
5098 case DEBUG_EXPR:
5099 case PC:
5100 case SCRATCH:
5101 case CC0:
5102 case ASM_INPUT:
5103 case ASM_OPERANDS:
5104 return 1;
5105 case MEM:
5106 return !MEM_READONLY_P (*x);
5107 default:
5108 return 0;
5109 }
5110 }
5111
5112 /* Add uses (register and memory references) LOC which will be tracked
5113 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5114
5115 static int
5116 add_uses (rtx *ploc, void *data)
5117 {
5118 rtx loc = *ploc;
5119 enum machine_mode mode = VOIDmode;
5120 struct count_use_info *cui = (struct count_use_info *)data;
5121 enum micro_operation_type type = use_type (loc, cui, &mode);
5122
5123 if (type != MO_CLOBBER)
5124 {
5125 basic_block bb = cui->bb;
5126 micro_operation mo;
5127
5128 mo.type = type;
5129 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5130 mo.insn = cui->insn;
5131
5132 if (type == MO_VAL_LOC)
5133 {
5134 rtx oloc = loc;
5135 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5136 cselib_val *val;
5137
5138 gcc_assert (cui->sets);
5139
5140 if (MEM_P (vloc)
5141 && !REG_P (XEXP (vloc, 0))
5142 && !MEM_P (XEXP (vloc, 0)))
5143 {
5144 rtx mloc = vloc;
5145 enum machine_mode address_mode = get_address_mode (mloc);
5146 cselib_val *val
5147 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5148 GET_MODE (mloc));
5149
5150 if (val && !cselib_preserved_value_p (val))
5151 preserve_value (val);
5152 }
5153
5154 if (CONSTANT_P (vloc)
5155 && (GET_CODE (vloc) != CONST
5156 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5157 /* For constants don't look up any value. */;
5158 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5159 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5160 {
5161 enum machine_mode mode2;
5162 enum micro_operation_type type2;
5163 rtx nloc = NULL;
5164 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5165
5166 if (resolvable)
5167 nloc = replace_expr_with_values (vloc);
5168
5169 if (nloc)
5170 {
5171 oloc = shallow_copy_rtx (oloc);
5172 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5173 }
5174
5175 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5176
5177 type2 = use_type (vloc, 0, &mode2);
5178
5179 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5180 || type2 == MO_CLOBBER);
5181
5182 if (type2 == MO_CLOBBER
5183 && !cselib_preserved_value_p (val))
5184 {
5185 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5186 preserve_value (val);
5187 }
5188 }
5189 else if (!VAR_LOC_UNKNOWN_P (vloc))
5190 {
5191 oloc = shallow_copy_rtx (oloc);
5192 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5193 }
5194
5195 mo.u.loc = oloc;
5196 }
5197 else if (type == MO_VAL_USE)
5198 {
5199 enum machine_mode mode2 = VOIDmode;
5200 enum micro_operation_type type2;
5201 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5202 rtx vloc, oloc = loc, nloc;
5203
5204 gcc_assert (cui->sets);
5205
5206 if (MEM_P (oloc)
5207 && !REG_P (XEXP (oloc, 0))
5208 && !MEM_P (XEXP (oloc, 0)))
5209 {
5210 rtx mloc = oloc;
5211 enum machine_mode address_mode = get_address_mode (mloc);
5212 cselib_val *val
5213 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5214 GET_MODE (mloc));
5215
5216 if (val && !cselib_preserved_value_p (val))
5217 preserve_value (val);
5218 }
5219
5220 type2 = use_type (loc, 0, &mode2);
5221
5222 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5223 || type2 == MO_CLOBBER);
5224
5225 if (type2 == MO_USE)
5226 vloc = var_lowpart (mode2, loc);
5227 else
5228 vloc = oloc;
5229
5230 /* The loc of a MO_VAL_USE may have two forms:
5231
5232 (concat val src): val is at src, a value-based
5233 representation.
5234
5235 (concat (concat val use) src): same as above, with use as
5236 the MO_USE tracked value, if it differs from src.
5237
5238 */
5239
5240 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5241 nloc = replace_expr_with_values (loc);
5242 if (!nloc)
5243 nloc = oloc;
5244
5245 if (vloc != nloc)
5246 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5247 else
5248 oloc = val->val_rtx;
5249
5250 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5251
5252 if (type2 == MO_USE)
5253 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5254 if (!cselib_preserved_value_p (val))
5255 {
5256 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5257 preserve_value (val);
5258 }
5259 }
5260 else
5261 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5262
5263 if (dump_file && (dump_flags & TDF_DETAILS))
5264 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5265 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5266 }
5267
5268 return 0;
5269 }
5270
5271 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5272
5273 static void
5274 add_uses_1 (rtx *x, void *cui)
5275 {
5276 for_each_rtx (x, add_uses, cui);
5277 }
5278
5279 /* This is the value used during expansion of locations. We want it
5280 to be unbounded, so that variables expanded deep in a recursion
5281 nest are fully evaluated, so that their values are cached
5282 correctly. We avoid recursion cycles through other means, and we
5283 don't unshare RTL, so excess complexity is not a problem. */
5284 #define EXPR_DEPTH (INT_MAX)
5285 /* We use this to keep too-complex expressions from being emitted as
5286 location notes, and then to debug information. Users can trade
5287 compile time for ridiculously complex expressions, although they're
5288 seldom useful, and they may often have to be discarded as not
5289 representable anyway. */
5290 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5291
5292 /* Attempt to reverse the EXPR operation in the debug info and record
5293 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5294 no longer live we can express its value as VAL - 6. */
5295
5296 static void
5297 reverse_op (rtx val, const_rtx expr, rtx insn)
5298 {
5299 rtx src, arg, ret;
5300 cselib_val *v;
5301 struct elt_loc_list *l;
5302 enum rtx_code code;
5303
5304 if (GET_CODE (expr) != SET)
5305 return;
5306
5307 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5308 return;
5309
5310 src = SET_SRC (expr);
5311 switch (GET_CODE (src))
5312 {
5313 case PLUS:
5314 case MINUS:
5315 case XOR:
5316 case NOT:
5317 case NEG:
5318 if (!REG_P (XEXP (src, 0)))
5319 return;
5320 break;
5321 case SIGN_EXTEND:
5322 case ZERO_EXTEND:
5323 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5324 return;
5325 break;
5326 default:
5327 return;
5328 }
5329
5330 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5331 return;
5332
5333 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5334 if (!v || !cselib_preserved_value_p (v))
5335 return;
5336
5337 /* Adding a reverse op isn't useful if V already has an always valid
5338 location. Ignore ENTRY_VALUE, while it is always constant, we should
5339 prefer non-ENTRY_VALUE locations whenever possible. */
5340 for (l = v->locs; l; l = l->next)
5341 if (CONSTANT_P (l->loc)
5342 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5343 return;
5344
5345 switch (GET_CODE (src))
5346 {
5347 case NOT:
5348 case NEG:
5349 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5350 return;
5351 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5352 break;
5353 case SIGN_EXTEND:
5354 case ZERO_EXTEND:
5355 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5356 break;
5357 case XOR:
5358 code = XOR;
5359 goto binary;
5360 case PLUS:
5361 code = MINUS;
5362 goto binary;
5363 case MINUS:
5364 code = PLUS;
5365 goto binary;
5366 binary:
5367 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5368 return;
5369 arg = XEXP (src, 1);
5370 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5371 {
5372 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5373 if (arg == NULL_RTX)
5374 return;
5375 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5376 return;
5377 }
5378 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5379 if (ret == val)
5380 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5381 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5382 breaks a lot of routines during var-tracking. */
5383 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5384 break;
5385 default:
5386 gcc_unreachable ();
5387 }
5388
5389 cselib_add_permanent_equiv (v, ret, insn);
5390 }
5391
5392 /* Add stores (register and memory references) LOC which will be tracked
5393 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5394 CUIP->insn is instruction which the LOC is part of. */
5395
5396 static void
5397 add_stores (rtx loc, const_rtx expr, void *cuip)
5398 {
5399 enum machine_mode mode = VOIDmode, mode2;
5400 struct count_use_info *cui = (struct count_use_info *)cuip;
5401 basic_block bb = cui->bb;
5402 micro_operation mo;
5403 rtx oloc = loc, nloc, src = NULL;
5404 enum micro_operation_type type = use_type (loc, cui, &mode);
5405 bool track_p = false;
5406 cselib_val *v;
5407 bool resolve, preserve;
5408
5409 if (type == MO_CLOBBER)
5410 return;
5411
5412 mode2 = mode;
5413
5414 if (REG_P (loc))
5415 {
5416 gcc_assert (loc != cfa_base_rtx);
5417 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5418 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5419 || GET_CODE (expr) == CLOBBER)
5420 {
5421 mo.type = MO_CLOBBER;
5422 mo.u.loc = loc;
5423 if (GET_CODE (expr) == SET
5424 && SET_DEST (expr) == loc
5425 && !unsuitable_loc (SET_SRC (expr))
5426 && find_use_val (loc, mode, cui))
5427 {
5428 gcc_checking_assert (type == MO_VAL_SET);
5429 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5430 }
5431 }
5432 else
5433 {
5434 if (GET_CODE (expr) == SET
5435 && SET_DEST (expr) == loc
5436 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5437 src = var_lowpart (mode2, SET_SRC (expr));
5438 loc = var_lowpart (mode2, loc);
5439
5440 if (src == NULL)
5441 {
5442 mo.type = MO_SET;
5443 mo.u.loc = loc;
5444 }
5445 else
5446 {
5447 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5448 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5449 mo.type = MO_COPY;
5450 else
5451 mo.type = MO_SET;
5452 mo.u.loc = xexpr;
5453 }
5454 }
5455 mo.insn = cui->insn;
5456 }
5457 else if (MEM_P (loc)
5458 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5459 || cui->sets))
5460 {
5461 if (MEM_P (loc) && type == MO_VAL_SET
5462 && !REG_P (XEXP (loc, 0))
5463 && !MEM_P (XEXP (loc, 0)))
5464 {
5465 rtx mloc = loc;
5466 enum machine_mode address_mode = get_address_mode (mloc);
5467 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5468 address_mode, 0,
5469 GET_MODE (mloc));
5470
5471 if (val && !cselib_preserved_value_p (val))
5472 preserve_value (val);
5473 }
5474
5475 if (GET_CODE (expr) == CLOBBER || !track_p)
5476 {
5477 mo.type = MO_CLOBBER;
5478 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5479 }
5480 else
5481 {
5482 if (GET_CODE (expr) == SET
5483 && SET_DEST (expr) == loc
5484 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5485 src = var_lowpart (mode2, SET_SRC (expr));
5486 loc = var_lowpart (mode2, loc);
5487
5488 if (src == NULL)
5489 {
5490 mo.type = MO_SET;
5491 mo.u.loc = loc;
5492 }
5493 else
5494 {
5495 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5496 if (same_variable_part_p (SET_SRC (xexpr),
5497 MEM_EXPR (loc),
5498 INT_MEM_OFFSET (loc)))
5499 mo.type = MO_COPY;
5500 else
5501 mo.type = MO_SET;
5502 mo.u.loc = xexpr;
5503 }
5504 }
5505 mo.insn = cui->insn;
5506 }
5507 else
5508 return;
5509
5510 if (type != MO_VAL_SET)
5511 goto log_and_return;
5512
5513 v = find_use_val (oloc, mode, cui);
5514
5515 if (!v)
5516 goto log_and_return;
5517
5518 resolve = preserve = !cselib_preserved_value_p (v);
5519
5520 nloc = replace_expr_with_values (oloc);
5521 if (nloc)
5522 oloc = nloc;
5523
5524 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5525 {
5526 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5527
5528 gcc_assert (oval != v);
5529 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5530
5531 if (oval && !cselib_preserved_value_p (oval))
5532 {
5533 micro_operation moa;
5534
5535 preserve_value (oval);
5536
5537 moa.type = MO_VAL_USE;
5538 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5539 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5540 moa.insn = cui->insn;
5541
5542 if (dump_file && (dump_flags & TDF_DETAILS))
5543 log_op_type (moa.u.loc, cui->bb, cui->insn,
5544 moa.type, dump_file);
5545 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5546 }
5547
5548 resolve = false;
5549 }
5550 else if (resolve && GET_CODE (mo.u.loc) == SET)
5551 {
5552 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5553 nloc = replace_expr_with_values (SET_SRC (expr));
5554 else
5555 nloc = NULL_RTX;
5556
5557 /* Avoid the mode mismatch between oexpr and expr. */
5558 if (!nloc && mode != mode2)
5559 {
5560 nloc = SET_SRC (expr);
5561 gcc_assert (oloc == SET_DEST (expr));
5562 }
5563
5564 if (nloc && nloc != SET_SRC (mo.u.loc))
5565 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5566 else
5567 {
5568 if (oloc == SET_DEST (mo.u.loc))
5569 /* No point in duplicating. */
5570 oloc = mo.u.loc;
5571 if (!REG_P (SET_SRC (mo.u.loc)))
5572 resolve = false;
5573 }
5574 }
5575 else if (!resolve)
5576 {
5577 if (GET_CODE (mo.u.loc) == SET
5578 && oloc == SET_DEST (mo.u.loc))
5579 /* No point in duplicating. */
5580 oloc = mo.u.loc;
5581 }
5582 else
5583 resolve = false;
5584
5585 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5586
5587 if (mo.u.loc != oloc)
5588 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5589
5590 /* The loc of a MO_VAL_SET may have various forms:
5591
5592 (concat val dst): dst now holds val
5593
5594 (concat val (set dst src)): dst now holds val, copied from src
5595
5596 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5597 after replacing mems and non-top-level regs with values.
5598
5599 (concat (concat val dstv) (set dst src)): dst now holds val,
5600 copied from src. dstv is a value-based representation of dst, if
5601 it differs from dst. If resolution is needed, src is a REG, and
5602 its mode is the same as that of val.
5603
5604 (concat (concat val (set dstv srcv)) (set dst src)): src
5605 copied to dst, holding val. dstv and srcv are value-based
5606 representations of dst and src, respectively.
5607
5608 */
5609
5610 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5611 reverse_op (v->val_rtx, expr, cui->insn);
5612
5613 mo.u.loc = loc;
5614
5615 if (track_p)
5616 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5617 if (preserve)
5618 {
5619 VAL_NEEDS_RESOLUTION (loc) = resolve;
5620 preserve_value (v);
5621 }
5622 if (mo.type == MO_CLOBBER)
5623 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5624 if (mo.type == MO_COPY)
5625 VAL_EXPR_IS_COPIED (loc) = 1;
5626
5627 mo.type = MO_VAL_SET;
5628
5629 log_and_return:
5630 if (dump_file && (dump_flags & TDF_DETAILS))
5631 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5632 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5633 }
5634
5635 /* Arguments to the call. */
5636 static rtx call_arguments;
5637
5638 /* Compute call_arguments. */
5639
5640 static void
5641 prepare_call_arguments (basic_block bb, rtx insn)
5642 {
5643 rtx link, x;
5644 rtx prev, cur, next;
5645 rtx call = PATTERN (insn);
5646 rtx this_arg = NULL_RTX;
5647 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5648 tree obj_type_ref = NULL_TREE;
5649 CUMULATIVE_ARGS args_so_far_v;
5650 cumulative_args_t args_so_far;
5651
5652 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5653 args_so_far = pack_cumulative_args (&args_so_far_v);
5654 if (GET_CODE (call) == PARALLEL)
5655 call = XVECEXP (call, 0, 0);
5656 if (GET_CODE (call) == SET)
5657 call = SET_SRC (call);
5658 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
5659 {
5660 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5661 {
5662 rtx symbol = XEXP (XEXP (call, 0), 0);
5663 if (SYMBOL_REF_DECL (symbol))
5664 fndecl = SYMBOL_REF_DECL (symbol);
5665 }
5666 if (fndecl == NULL_TREE)
5667 fndecl = MEM_EXPR (XEXP (call, 0));
5668 if (fndecl
5669 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5670 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5671 fndecl = NULL_TREE;
5672 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5673 type = TREE_TYPE (fndecl);
5674 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5675 {
5676 if (TREE_CODE (fndecl) == INDIRECT_REF
5677 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5678 obj_type_ref = TREE_OPERAND (fndecl, 0);
5679 fndecl = NULL_TREE;
5680 }
5681 if (type)
5682 {
5683 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5684 t = TREE_CHAIN (t))
5685 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5686 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5687 break;
5688 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5689 type = NULL;
5690 else
5691 {
5692 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5693 link = CALL_INSN_FUNCTION_USAGE (insn);
5694 #ifndef PCC_STATIC_STRUCT_RETURN
5695 if (aggregate_value_p (TREE_TYPE (type), type)
5696 && targetm.calls.struct_value_rtx (type, 0) == 0)
5697 {
5698 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5699 enum machine_mode mode = TYPE_MODE (struct_addr);
5700 rtx reg;
5701 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5702 nargs + 1);
5703 reg = targetm.calls.function_arg (args_so_far, mode,
5704 struct_addr, true);
5705 targetm.calls.function_arg_advance (args_so_far, mode,
5706 struct_addr, true);
5707 if (reg == NULL_RTX)
5708 {
5709 for (; link; link = XEXP (link, 1))
5710 if (GET_CODE (XEXP (link, 0)) == USE
5711 && MEM_P (XEXP (XEXP (link, 0), 0)))
5712 {
5713 link = XEXP (link, 1);
5714 break;
5715 }
5716 }
5717 }
5718 else
5719 #endif
5720 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5721 nargs);
5722 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5723 {
5724 enum machine_mode mode;
5725 t = TYPE_ARG_TYPES (type);
5726 mode = TYPE_MODE (TREE_VALUE (t));
5727 this_arg = targetm.calls.function_arg (args_so_far, mode,
5728 TREE_VALUE (t), true);
5729 if (this_arg && !REG_P (this_arg))
5730 this_arg = NULL_RTX;
5731 else if (this_arg == NULL_RTX)
5732 {
5733 for (; link; link = XEXP (link, 1))
5734 if (GET_CODE (XEXP (link, 0)) == USE
5735 && MEM_P (XEXP (XEXP (link, 0), 0)))
5736 {
5737 this_arg = XEXP (XEXP (link, 0), 0);
5738 break;
5739 }
5740 }
5741 }
5742 }
5743 }
5744 }
5745 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5746
5747 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
5748 if (GET_CODE (XEXP (link, 0)) == USE)
5749 {
5750 rtx item = NULL_RTX;
5751 x = XEXP (XEXP (link, 0), 0);
5752 if (GET_MODE (link) == VOIDmode
5753 || GET_MODE (link) == BLKmode
5754 || (GET_MODE (link) != GET_MODE (x)
5755 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
5756 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
5757 /* Can't do anything for these, if the original type mode
5758 isn't known or can't be converted. */;
5759 else if (REG_P (x))
5760 {
5761 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5762 if (val && cselib_preserved_value_p (val))
5763 item = val->val_rtx;
5764 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
5765 {
5766 enum machine_mode mode = GET_MODE (x);
5767
5768 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
5769 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
5770 {
5771 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
5772
5773 if (reg == NULL_RTX || !REG_P (reg))
5774 continue;
5775 val = cselib_lookup (reg, mode, 0, VOIDmode);
5776 if (val && cselib_preserved_value_p (val))
5777 {
5778 item = val->val_rtx;
5779 break;
5780 }
5781 }
5782 }
5783 }
5784 else if (MEM_P (x))
5785 {
5786 rtx mem = x;
5787 cselib_val *val;
5788
5789 if (!frame_pointer_needed)
5790 {
5791 struct adjust_mem_data amd;
5792 amd.mem_mode = VOIDmode;
5793 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
5794 amd.side_effects = NULL_RTX;
5795 amd.store = true;
5796 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
5797 &amd);
5798 gcc_assert (amd.side_effects == NULL_RTX);
5799 }
5800 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
5801 if (val && cselib_preserved_value_p (val))
5802 item = val->val_rtx;
5803 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
5804 {
5805 /* For non-integer stack argument see also if they weren't
5806 initialized by integers. */
5807 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
5808 if (imode != GET_MODE (mem) && imode != BLKmode)
5809 {
5810 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
5811 imode, 0, VOIDmode);
5812 if (val && cselib_preserved_value_p (val))
5813 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
5814 imode);
5815 }
5816 }
5817 }
5818 if (item)
5819 {
5820 rtx x2 = x;
5821 if (GET_MODE (item) != GET_MODE (link))
5822 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
5823 if (GET_MODE (x2) != GET_MODE (link))
5824 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
5825 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
5826 call_arguments
5827 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
5828 }
5829 if (t && t != void_list_node)
5830 {
5831 tree argtype = TREE_VALUE (t);
5832 enum machine_mode mode = TYPE_MODE (argtype);
5833 rtx reg;
5834 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
5835 {
5836 argtype = build_pointer_type (argtype);
5837 mode = TYPE_MODE (argtype);
5838 }
5839 reg = targetm.calls.function_arg (args_so_far, mode,
5840 argtype, true);
5841 if (TREE_CODE (argtype) == REFERENCE_TYPE
5842 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
5843 && reg
5844 && REG_P (reg)
5845 && GET_MODE (reg) == mode
5846 && GET_MODE_CLASS (mode) == MODE_INT
5847 && REG_P (x)
5848 && REGNO (x) == REGNO (reg)
5849 && GET_MODE (x) == mode
5850 && item)
5851 {
5852 enum machine_mode indmode
5853 = TYPE_MODE (TREE_TYPE (argtype));
5854 rtx mem = gen_rtx_MEM (indmode, x);
5855 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
5856 if (val && cselib_preserved_value_p (val))
5857 {
5858 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
5859 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5860 call_arguments);
5861 }
5862 else
5863 {
5864 struct elt_loc_list *l;
5865 tree initial;
5866
5867 /* Try harder, when passing address of a constant
5868 pool integer it can be easily read back. */
5869 item = XEXP (item, 1);
5870 if (GET_CODE (item) == SUBREG)
5871 item = SUBREG_REG (item);
5872 gcc_assert (GET_CODE (item) == VALUE);
5873 val = CSELIB_VAL_PTR (item);
5874 for (l = val->locs; l; l = l->next)
5875 if (GET_CODE (l->loc) == SYMBOL_REF
5876 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
5877 && SYMBOL_REF_DECL (l->loc)
5878 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
5879 {
5880 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
5881 if (host_integerp (initial, 0))
5882 {
5883 item = GEN_INT (tree_low_cst (initial, 0));
5884 item = gen_rtx_CONCAT (indmode, mem, item);
5885 call_arguments
5886 = gen_rtx_EXPR_LIST (VOIDmode, item,
5887 call_arguments);
5888 }
5889 break;
5890 }
5891 }
5892 }
5893 targetm.calls.function_arg_advance (args_so_far, mode,
5894 argtype, true);
5895 t = TREE_CHAIN (t);
5896 }
5897 }
5898
5899 /* Add debug arguments. */
5900 if (fndecl
5901 && TREE_CODE (fndecl) == FUNCTION_DECL
5902 && DECL_HAS_DEBUG_ARGS_P (fndecl))
5903 {
5904 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
5905 if (debug_args)
5906 {
5907 unsigned int ix;
5908 tree param;
5909 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
5910 {
5911 rtx item;
5912 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
5913 enum machine_mode mode = DECL_MODE (dtemp);
5914 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
5915 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
5916 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5917 call_arguments);
5918 }
5919 }
5920 }
5921
5922 /* Reverse call_arguments chain. */
5923 prev = NULL_RTX;
5924 for (cur = call_arguments; cur; cur = next)
5925 {
5926 next = XEXP (cur, 1);
5927 XEXP (cur, 1) = prev;
5928 prev = cur;
5929 }
5930 call_arguments = prev;
5931
5932 x = PATTERN (insn);
5933 if (GET_CODE (x) == PARALLEL)
5934 x = XVECEXP (x, 0, 0);
5935 if (GET_CODE (x) == SET)
5936 x = SET_SRC (x);
5937 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
5938 {
5939 x = XEXP (XEXP (x, 0), 0);
5940 if (GET_CODE (x) == SYMBOL_REF)
5941 /* Don't record anything. */;
5942 else if (CONSTANT_P (x))
5943 {
5944 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
5945 pc_rtx, x);
5946 call_arguments
5947 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5948 }
5949 else
5950 {
5951 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5952 if (val && cselib_preserved_value_p (val))
5953 {
5954 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
5955 call_arguments
5956 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5957 }
5958 }
5959 }
5960 if (this_arg)
5961 {
5962 enum machine_mode mode
5963 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
5964 rtx clobbered = gen_rtx_MEM (mode, this_arg);
5965 HOST_WIDE_INT token
5966 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
5967 if (token)
5968 clobbered = plus_constant (clobbered, token * GET_MODE_SIZE (mode));
5969 clobbered = gen_rtx_MEM (mode, clobbered);
5970 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
5971 call_arguments
5972 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5973 }
5974 }
5975
5976 /* Callback for cselib_record_sets_hook, that records as micro
5977 operations uses and stores in an insn after cselib_record_sets has
5978 analyzed the sets in an insn, but before it modifies the stored
5979 values in the internal tables, unless cselib_record_sets doesn't
5980 call it directly (perhaps because we're not doing cselib in the
5981 first place, in which case sets and n_sets will be 0). */
5982
5983 static void
5984 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5985 {
5986 basic_block bb = BLOCK_FOR_INSN (insn);
5987 int n1, n2;
5988 struct count_use_info cui;
5989 micro_operation *mos;
5990
5991 cselib_hook_called = true;
5992
5993 cui.insn = insn;
5994 cui.bb = bb;
5995 cui.sets = sets;
5996 cui.n_sets = n_sets;
5997
5998 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5999 cui.store_p = false;
6000 note_uses (&PATTERN (insn), add_uses_1, &cui);
6001 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6002 mos = VEC_address (micro_operation, VTI (bb)->mos);
6003
6004 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6005 MO_VAL_LOC last. */
6006 while (n1 < n2)
6007 {
6008 while (n1 < n2 && mos[n1].type == MO_USE)
6009 n1++;
6010 while (n1 < n2 && mos[n2].type != MO_USE)
6011 n2--;
6012 if (n1 < n2)
6013 {
6014 micro_operation sw;
6015
6016 sw = mos[n1];
6017 mos[n1] = mos[n2];
6018 mos[n2] = sw;
6019 }
6020 }
6021
6022 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6023 while (n1 < n2)
6024 {
6025 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6026 n1++;
6027 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6028 n2--;
6029 if (n1 < n2)
6030 {
6031 micro_operation sw;
6032
6033 sw = mos[n1];
6034 mos[n1] = mos[n2];
6035 mos[n2] = sw;
6036 }
6037 }
6038
6039 if (CALL_P (insn))
6040 {
6041 micro_operation mo;
6042
6043 mo.type = MO_CALL;
6044 mo.insn = insn;
6045 mo.u.loc = call_arguments;
6046 call_arguments = NULL_RTX;
6047
6048 if (dump_file && (dump_flags & TDF_DETAILS))
6049 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6050 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
6051 }
6052
6053 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6054 /* This will record NEXT_INSN (insn), such that we can
6055 insert notes before it without worrying about any
6056 notes that MO_USEs might emit after the insn. */
6057 cui.store_p = true;
6058 note_stores (PATTERN (insn), add_stores, &cui);
6059 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6060 mos = VEC_address (micro_operation, VTI (bb)->mos);
6061
6062 /* Order the MO_VAL_USEs first (note_stores does nothing
6063 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6064 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6065 while (n1 < n2)
6066 {
6067 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6068 n1++;
6069 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6070 n2--;
6071 if (n1 < n2)
6072 {
6073 micro_operation sw;
6074
6075 sw = mos[n1];
6076 mos[n1] = mos[n2];
6077 mos[n2] = sw;
6078 }
6079 }
6080
6081 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6082 while (n1 < n2)
6083 {
6084 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6085 n1++;
6086 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6087 n2--;
6088 if (n1 < n2)
6089 {
6090 micro_operation sw;
6091
6092 sw = mos[n1];
6093 mos[n1] = mos[n2];
6094 mos[n2] = sw;
6095 }
6096 }
6097 }
6098
6099 static enum var_init_status
6100 find_src_status (dataflow_set *in, rtx src)
6101 {
6102 tree decl = NULL_TREE;
6103 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6104
6105 if (! flag_var_tracking_uninit)
6106 status = VAR_INIT_STATUS_INITIALIZED;
6107
6108 if (src && REG_P (src))
6109 decl = var_debug_decl (REG_EXPR (src));
6110 else if (src && MEM_P (src))
6111 decl = var_debug_decl (MEM_EXPR (src));
6112
6113 if (src && decl)
6114 status = get_init_value (in, src, dv_from_decl (decl));
6115
6116 return status;
6117 }
6118
6119 /* SRC is the source of an assignment. Use SET to try to find what
6120 was ultimately assigned to SRC. Return that value if known,
6121 otherwise return SRC itself. */
6122
6123 static rtx
6124 find_src_set_src (dataflow_set *set, rtx src)
6125 {
6126 tree decl = NULL_TREE; /* The variable being copied around. */
6127 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6128 variable var;
6129 location_chain nextp;
6130 int i;
6131 bool found;
6132
6133 if (src && REG_P (src))
6134 decl = var_debug_decl (REG_EXPR (src));
6135 else if (src && MEM_P (src))
6136 decl = var_debug_decl (MEM_EXPR (src));
6137
6138 if (src && decl)
6139 {
6140 decl_or_value dv = dv_from_decl (decl);
6141
6142 var = shared_hash_find (set->vars, dv);
6143 if (var)
6144 {
6145 found = false;
6146 for (i = 0; i < var->n_var_parts && !found; i++)
6147 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6148 nextp = nextp->next)
6149 if (rtx_equal_p (nextp->loc, src))
6150 {
6151 set_src = nextp->set_src;
6152 found = true;
6153 }
6154
6155 }
6156 }
6157
6158 return set_src;
6159 }
6160
6161 /* Compute the changes of variable locations in the basic block BB. */
6162
6163 static bool
6164 compute_bb_dataflow (basic_block bb)
6165 {
6166 unsigned int i;
6167 micro_operation *mo;
6168 bool changed;
6169 dataflow_set old_out;
6170 dataflow_set *in = &VTI (bb)->in;
6171 dataflow_set *out = &VTI (bb)->out;
6172
6173 dataflow_set_init (&old_out);
6174 dataflow_set_copy (&old_out, out);
6175 dataflow_set_copy (out, in);
6176
6177 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
6178 {
6179 rtx insn = mo->insn;
6180
6181 switch (mo->type)
6182 {
6183 case MO_CALL:
6184 dataflow_set_clear_at_call (out);
6185 break;
6186
6187 case MO_USE:
6188 {
6189 rtx loc = mo->u.loc;
6190
6191 if (REG_P (loc))
6192 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6193 else if (MEM_P (loc))
6194 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6195 }
6196 break;
6197
6198 case MO_VAL_LOC:
6199 {
6200 rtx loc = mo->u.loc;
6201 rtx val, vloc;
6202 tree var;
6203
6204 if (GET_CODE (loc) == CONCAT)
6205 {
6206 val = XEXP (loc, 0);
6207 vloc = XEXP (loc, 1);
6208 }
6209 else
6210 {
6211 val = NULL_RTX;
6212 vloc = loc;
6213 }
6214
6215 var = PAT_VAR_LOCATION_DECL (vloc);
6216
6217 clobber_variable_part (out, NULL_RTX,
6218 dv_from_decl (var), 0, NULL_RTX);
6219 if (val)
6220 {
6221 if (VAL_NEEDS_RESOLUTION (loc))
6222 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6223 set_variable_part (out, val, dv_from_decl (var), 0,
6224 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6225 INSERT);
6226 }
6227 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6228 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6229 dv_from_decl (var), 0,
6230 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6231 INSERT);
6232 }
6233 break;
6234
6235 case MO_VAL_USE:
6236 {
6237 rtx loc = mo->u.loc;
6238 rtx val, vloc, uloc;
6239
6240 vloc = uloc = XEXP (loc, 1);
6241 val = XEXP (loc, 0);
6242
6243 if (GET_CODE (val) == CONCAT)
6244 {
6245 uloc = XEXP (val, 1);
6246 val = XEXP (val, 0);
6247 }
6248
6249 if (VAL_NEEDS_RESOLUTION (loc))
6250 val_resolve (out, val, vloc, insn);
6251 else
6252 val_store (out, val, uloc, insn, false);
6253
6254 if (VAL_HOLDS_TRACK_EXPR (loc))
6255 {
6256 if (GET_CODE (uloc) == REG)
6257 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6258 NULL);
6259 else if (GET_CODE (uloc) == MEM)
6260 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6261 NULL);
6262 }
6263 }
6264 break;
6265
6266 case MO_VAL_SET:
6267 {
6268 rtx loc = mo->u.loc;
6269 rtx val, vloc, uloc;
6270
6271 vloc = loc;
6272 uloc = XEXP (vloc, 1);
6273 val = XEXP (vloc, 0);
6274 vloc = uloc;
6275
6276 if (GET_CODE (val) == CONCAT)
6277 {
6278 vloc = XEXP (val, 1);
6279 val = XEXP (val, 0);
6280 }
6281
6282 if (GET_CODE (vloc) == SET)
6283 {
6284 rtx vsrc = SET_SRC (vloc);
6285
6286 gcc_assert (val != vsrc);
6287 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6288
6289 vloc = SET_DEST (vloc);
6290
6291 if (VAL_NEEDS_RESOLUTION (loc))
6292 val_resolve (out, val, vsrc, insn);
6293 }
6294 else if (VAL_NEEDS_RESOLUTION (loc))
6295 {
6296 gcc_assert (GET_CODE (uloc) == SET
6297 && GET_CODE (SET_SRC (uloc)) == REG);
6298 val_resolve (out, val, SET_SRC (uloc), insn);
6299 }
6300
6301 if (VAL_HOLDS_TRACK_EXPR (loc))
6302 {
6303 if (VAL_EXPR_IS_CLOBBERED (loc))
6304 {
6305 if (REG_P (uloc))
6306 var_reg_delete (out, uloc, true);
6307 else if (MEM_P (uloc))
6308 var_mem_delete (out, uloc, true);
6309 }
6310 else
6311 {
6312 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6313 rtx set_src = NULL;
6314 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6315
6316 if (GET_CODE (uloc) == SET)
6317 {
6318 set_src = SET_SRC (uloc);
6319 uloc = SET_DEST (uloc);
6320 }
6321
6322 if (copied_p)
6323 {
6324 if (flag_var_tracking_uninit)
6325 {
6326 status = find_src_status (in, set_src);
6327
6328 if (status == VAR_INIT_STATUS_UNKNOWN)
6329 status = find_src_status (out, set_src);
6330 }
6331
6332 set_src = find_src_set_src (in, set_src);
6333 }
6334
6335 if (REG_P (uloc))
6336 var_reg_delete_and_set (out, uloc, !copied_p,
6337 status, set_src);
6338 else if (MEM_P (uloc))
6339 var_mem_delete_and_set (out, uloc, !copied_p,
6340 status, set_src);
6341 }
6342 }
6343 else if (REG_P (uloc))
6344 var_regno_delete (out, REGNO (uloc));
6345
6346 val_store (out, val, vloc, insn, true);
6347 }
6348 break;
6349
6350 case MO_SET:
6351 {
6352 rtx loc = mo->u.loc;
6353 rtx set_src = NULL;
6354
6355 if (GET_CODE (loc) == SET)
6356 {
6357 set_src = SET_SRC (loc);
6358 loc = SET_DEST (loc);
6359 }
6360
6361 if (REG_P (loc))
6362 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6363 set_src);
6364 else if (MEM_P (loc))
6365 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6366 set_src);
6367 }
6368 break;
6369
6370 case MO_COPY:
6371 {
6372 rtx loc = mo->u.loc;
6373 enum var_init_status src_status;
6374 rtx set_src = NULL;
6375
6376 if (GET_CODE (loc) == SET)
6377 {
6378 set_src = SET_SRC (loc);
6379 loc = SET_DEST (loc);
6380 }
6381
6382 if (! flag_var_tracking_uninit)
6383 src_status = VAR_INIT_STATUS_INITIALIZED;
6384 else
6385 {
6386 src_status = find_src_status (in, set_src);
6387
6388 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6389 src_status = find_src_status (out, set_src);
6390 }
6391
6392 set_src = find_src_set_src (in, set_src);
6393
6394 if (REG_P (loc))
6395 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6396 else if (MEM_P (loc))
6397 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6398 }
6399 break;
6400
6401 case MO_USE_NO_VAR:
6402 {
6403 rtx loc = mo->u.loc;
6404
6405 if (REG_P (loc))
6406 var_reg_delete (out, loc, false);
6407 else if (MEM_P (loc))
6408 var_mem_delete (out, loc, false);
6409 }
6410 break;
6411
6412 case MO_CLOBBER:
6413 {
6414 rtx loc = mo->u.loc;
6415
6416 if (REG_P (loc))
6417 var_reg_delete (out, loc, true);
6418 else if (MEM_P (loc))
6419 var_mem_delete (out, loc, true);
6420 }
6421 break;
6422
6423 case MO_ADJUST:
6424 out->stack_adjust += mo->u.adjust;
6425 break;
6426 }
6427 }
6428
6429 if (MAY_HAVE_DEBUG_INSNS)
6430 {
6431 dataflow_set_equiv_regs (out);
6432 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6433 out);
6434 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6435 out);
6436 #if ENABLE_CHECKING
6437 htab_traverse (shared_hash_htab (out->vars),
6438 canonicalize_loc_order_check, out);
6439 #endif
6440 }
6441 changed = dataflow_set_different (&old_out, out);
6442 dataflow_set_destroy (&old_out);
6443 return changed;
6444 }
6445
6446 /* Find the locations of variables in the whole function. */
6447
6448 static bool
6449 vt_find_locations (void)
6450 {
6451 fibheap_t worklist, pending, fibheap_swap;
6452 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6453 basic_block bb;
6454 edge e;
6455 int *bb_order;
6456 int *rc_order;
6457 int i;
6458 int htabsz = 0;
6459 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6460 bool success = true;
6461
6462 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6463 /* Compute reverse completion order of depth first search of the CFG
6464 so that the data-flow runs faster. */
6465 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6466 bb_order = XNEWVEC (int, last_basic_block);
6467 pre_and_rev_post_order_compute (NULL, rc_order, false);
6468 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6469 bb_order[rc_order[i]] = i;
6470 free (rc_order);
6471
6472 worklist = fibheap_new ();
6473 pending = fibheap_new ();
6474 visited = sbitmap_alloc (last_basic_block);
6475 in_worklist = sbitmap_alloc (last_basic_block);
6476 in_pending = sbitmap_alloc (last_basic_block);
6477 sbitmap_zero (in_worklist);
6478
6479 FOR_EACH_BB (bb)
6480 fibheap_insert (pending, bb_order[bb->index], bb);
6481 sbitmap_ones (in_pending);
6482
6483 while (success && !fibheap_empty (pending))
6484 {
6485 fibheap_swap = pending;
6486 pending = worklist;
6487 worklist = fibheap_swap;
6488 sbitmap_swap = in_pending;
6489 in_pending = in_worklist;
6490 in_worklist = sbitmap_swap;
6491
6492 sbitmap_zero (visited);
6493
6494 while (!fibheap_empty (worklist))
6495 {
6496 bb = (basic_block) fibheap_extract_min (worklist);
6497 RESET_BIT (in_worklist, bb->index);
6498 gcc_assert (!TEST_BIT (visited, bb->index));
6499 if (!TEST_BIT (visited, bb->index))
6500 {
6501 bool changed;
6502 edge_iterator ei;
6503 int oldinsz, oldoutsz;
6504
6505 SET_BIT (visited, bb->index);
6506
6507 if (VTI (bb)->in.vars)
6508 {
6509 htabsz
6510 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6511 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6512 oldinsz
6513 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6514 oldoutsz
6515 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6516 }
6517 else
6518 oldinsz = oldoutsz = 0;
6519
6520 if (MAY_HAVE_DEBUG_INSNS)
6521 {
6522 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6523 bool first = true, adjust = false;
6524
6525 /* Calculate the IN set as the intersection of
6526 predecessor OUT sets. */
6527
6528 dataflow_set_clear (in);
6529 dst_can_be_shared = true;
6530
6531 FOR_EACH_EDGE (e, ei, bb->preds)
6532 if (!VTI (e->src)->flooded)
6533 gcc_assert (bb_order[bb->index]
6534 <= bb_order[e->src->index]);
6535 else if (first)
6536 {
6537 dataflow_set_copy (in, &VTI (e->src)->out);
6538 first_out = &VTI (e->src)->out;
6539 first = false;
6540 }
6541 else
6542 {
6543 dataflow_set_merge (in, &VTI (e->src)->out);
6544 adjust = true;
6545 }
6546
6547 if (adjust)
6548 {
6549 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6550 #if ENABLE_CHECKING
6551 /* Merge and merge_adjust should keep entries in
6552 canonical order. */
6553 htab_traverse (shared_hash_htab (in->vars),
6554 canonicalize_loc_order_check,
6555 in);
6556 #endif
6557 if (dst_can_be_shared)
6558 {
6559 shared_hash_destroy (in->vars);
6560 in->vars = shared_hash_copy (first_out->vars);
6561 }
6562 }
6563
6564 VTI (bb)->flooded = true;
6565 }
6566 else
6567 {
6568 /* Calculate the IN set as union of predecessor OUT sets. */
6569 dataflow_set_clear (&VTI (bb)->in);
6570 FOR_EACH_EDGE (e, ei, bb->preds)
6571 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6572 }
6573
6574 changed = compute_bb_dataflow (bb);
6575 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6576 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6577
6578 if (htabmax && htabsz > htabmax)
6579 {
6580 if (MAY_HAVE_DEBUG_INSNS)
6581 inform (DECL_SOURCE_LOCATION (cfun->decl),
6582 "variable tracking size limit exceeded with "
6583 "-fvar-tracking-assignments, retrying without");
6584 else
6585 inform (DECL_SOURCE_LOCATION (cfun->decl),
6586 "variable tracking size limit exceeded");
6587 success = false;
6588 break;
6589 }
6590
6591 if (changed)
6592 {
6593 FOR_EACH_EDGE (e, ei, bb->succs)
6594 {
6595 if (e->dest == EXIT_BLOCK_PTR)
6596 continue;
6597
6598 if (TEST_BIT (visited, e->dest->index))
6599 {
6600 if (!TEST_BIT (in_pending, e->dest->index))
6601 {
6602 /* Send E->DEST to next round. */
6603 SET_BIT (in_pending, e->dest->index);
6604 fibheap_insert (pending,
6605 bb_order[e->dest->index],
6606 e->dest);
6607 }
6608 }
6609 else if (!TEST_BIT (in_worklist, e->dest->index))
6610 {
6611 /* Add E->DEST to current round. */
6612 SET_BIT (in_worklist, e->dest->index);
6613 fibheap_insert (worklist, bb_order[e->dest->index],
6614 e->dest);
6615 }
6616 }
6617 }
6618
6619 if (dump_file)
6620 fprintf (dump_file,
6621 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6622 bb->index,
6623 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6624 oldinsz,
6625 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6626 oldoutsz,
6627 (int)worklist->nodes, (int)pending->nodes, htabsz);
6628
6629 if (dump_file && (dump_flags & TDF_DETAILS))
6630 {
6631 fprintf (dump_file, "BB %i IN:\n", bb->index);
6632 dump_dataflow_set (&VTI (bb)->in);
6633 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6634 dump_dataflow_set (&VTI (bb)->out);
6635 }
6636 }
6637 }
6638 }
6639
6640 if (success && MAY_HAVE_DEBUG_INSNS)
6641 FOR_EACH_BB (bb)
6642 gcc_assert (VTI (bb)->flooded);
6643
6644 free (bb_order);
6645 fibheap_delete (worklist);
6646 fibheap_delete (pending);
6647 sbitmap_free (visited);
6648 sbitmap_free (in_worklist);
6649 sbitmap_free (in_pending);
6650
6651 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6652 return success;
6653 }
6654
6655 /* Print the content of the LIST to dump file. */
6656
6657 static void
6658 dump_attrs_list (attrs list)
6659 {
6660 for (; list; list = list->next)
6661 {
6662 if (dv_is_decl_p (list->dv))
6663 print_mem_expr (dump_file, dv_as_decl (list->dv));
6664 else
6665 print_rtl_single (dump_file, dv_as_value (list->dv));
6666 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6667 }
6668 fprintf (dump_file, "\n");
6669 }
6670
6671 /* Print the information about variable *SLOT to dump file. */
6672
6673 static int
6674 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6675 {
6676 variable var = (variable) *slot;
6677
6678 dump_var (var);
6679
6680 /* Continue traversing the hash table. */
6681 return 1;
6682 }
6683
6684 /* Print the information about variable VAR to dump file. */
6685
6686 static void
6687 dump_var (variable var)
6688 {
6689 int i;
6690 location_chain node;
6691
6692 if (dv_is_decl_p (var->dv))
6693 {
6694 const_tree decl = dv_as_decl (var->dv);
6695
6696 if (DECL_NAME (decl))
6697 {
6698 fprintf (dump_file, " name: %s",
6699 IDENTIFIER_POINTER (DECL_NAME (decl)));
6700 if (dump_flags & TDF_UID)
6701 fprintf (dump_file, "D.%u", DECL_UID (decl));
6702 }
6703 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6704 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6705 else
6706 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6707 fprintf (dump_file, "\n");
6708 }
6709 else
6710 {
6711 fputc (' ', dump_file);
6712 print_rtl_single (dump_file, dv_as_value (var->dv));
6713 }
6714
6715 for (i = 0; i < var->n_var_parts; i++)
6716 {
6717 fprintf (dump_file, " offset %ld\n",
6718 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
6719 for (node = var->var_part[i].loc_chain; node; node = node->next)
6720 {
6721 fprintf (dump_file, " ");
6722 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6723 fprintf (dump_file, "[uninit]");
6724 print_rtl_single (dump_file, node->loc);
6725 }
6726 }
6727 }
6728
6729 /* Print the information about variables from hash table VARS to dump file. */
6730
6731 static void
6732 dump_vars (htab_t vars)
6733 {
6734 if (htab_elements (vars) > 0)
6735 {
6736 fprintf (dump_file, "Variables:\n");
6737 htab_traverse (vars, dump_var_slot, NULL);
6738 }
6739 }
6740
6741 /* Print the dataflow set SET to dump file. */
6742
6743 static void
6744 dump_dataflow_set (dataflow_set *set)
6745 {
6746 int i;
6747
6748 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6749 set->stack_adjust);
6750 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6751 {
6752 if (set->regs[i])
6753 {
6754 fprintf (dump_file, "Reg %d:", i);
6755 dump_attrs_list (set->regs[i]);
6756 }
6757 }
6758 dump_vars (shared_hash_htab (set->vars));
6759 fprintf (dump_file, "\n");
6760 }
6761
6762 /* Print the IN and OUT sets for each basic block to dump file. */
6763
6764 static void
6765 dump_dataflow_sets (void)
6766 {
6767 basic_block bb;
6768
6769 FOR_EACH_BB (bb)
6770 {
6771 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6772 fprintf (dump_file, "IN:\n");
6773 dump_dataflow_set (&VTI (bb)->in);
6774 fprintf (dump_file, "OUT:\n");
6775 dump_dataflow_set (&VTI (bb)->out);
6776 }
6777 }
6778
6779 /* Return the variable for DV in dropped_values, inserting one if
6780 requested with INSERT. */
6781
6782 static inline variable
6783 variable_from_dropped (decl_or_value dv, enum insert_option insert)
6784 {
6785 void **slot;
6786 variable empty_var;
6787 onepart_enum_t onepart;
6788
6789 slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
6790 insert);
6791
6792 if (!slot)
6793 return NULL;
6794
6795 if (*slot)
6796 return (variable) *slot;
6797
6798 gcc_checking_assert (insert == INSERT);
6799
6800 onepart = dv_onepart_p (dv);
6801
6802 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
6803
6804 empty_var = (variable) pool_alloc (onepart_pool (onepart));
6805 empty_var->dv = dv;
6806 empty_var->refcount = 1;
6807 empty_var->n_var_parts = 0;
6808 empty_var->onepart = onepart;
6809 empty_var->in_changed_variables = false;
6810 empty_var->var_part[0].loc_chain = NULL;
6811 empty_var->var_part[0].cur_loc = NULL;
6812 VAR_LOC_1PAUX (empty_var) = NULL;
6813 set_dv_changed (dv, true);
6814
6815 *slot = empty_var;
6816
6817 return empty_var;
6818 }
6819
6820 /* Recover the one-part aux from dropped_values. */
6821
6822 static struct onepart_aux *
6823 recover_dropped_1paux (variable var)
6824 {
6825 variable dvar;
6826
6827 gcc_checking_assert (var->onepart);
6828
6829 if (VAR_LOC_1PAUX (var))
6830 return VAR_LOC_1PAUX (var);
6831
6832 if (var->onepart == ONEPART_VDECL)
6833 return NULL;
6834
6835 dvar = variable_from_dropped (var->dv, NO_INSERT);
6836
6837 if (!dvar)
6838 return NULL;
6839
6840 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
6841 VAR_LOC_1PAUX (dvar) = NULL;
6842
6843 return VAR_LOC_1PAUX (var);
6844 }
6845
6846 /* Add variable VAR to the hash table of changed variables and
6847 if it has no locations delete it from SET's hash table. */
6848
6849 static void
6850 variable_was_changed (variable var, dataflow_set *set)
6851 {
6852 hashval_t hash = dv_htab_hash (var->dv);
6853
6854 if (emit_notes)
6855 {
6856 void **slot;
6857
6858 /* Remember this decl or VALUE has been added to changed_variables. */
6859 set_dv_changed (var->dv, true);
6860
6861 slot = htab_find_slot_with_hash (changed_variables,
6862 var->dv,
6863 hash, INSERT);
6864
6865 if (*slot)
6866 {
6867 variable old_var = (variable) *slot;
6868 gcc_assert (old_var->in_changed_variables);
6869 old_var->in_changed_variables = false;
6870 if (var != old_var && var->onepart)
6871 {
6872 /* Restore the auxiliary info from an empty variable
6873 previously created for changed_variables, so it is
6874 not lost. */
6875 gcc_checking_assert (!VAR_LOC_1PAUX (var));
6876 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
6877 VAR_LOC_1PAUX (old_var) = NULL;
6878 }
6879 variable_htab_free (*slot);
6880 }
6881
6882 if (set && var->n_var_parts == 0)
6883 {
6884 onepart_enum_t onepart = var->onepart;
6885 variable empty_var = NULL;
6886 void **dslot = NULL;
6887
6888 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
6889 {
6890 dslot = htab_find_slot_with_hash (dropped_values, var->dv,
6891 dv_htab_hash (var->dv),
6892 INSERT);
6893 empty_var = (variable) *dslot;
6894
6895 if (empty_var)
6896 {
6897 gcc_checking_assert (!empty_var->in_changed_variables);
6898 if (!VAR_LOC_1PAUX (var))
6899 {
6900 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
6901 VAR_LOC_1PAUX (empty_var) = NULL;
6902 }
6903 else
6904 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
6905 }
6906 }
6907
6908 if (!empty_var)
6909 {
6910 empty_var = (variable) pool_alloc (onepart_pool (onepart));
6911 empty_var->dv = var->dv;
6912 empty_var->refcount = 1;
6913 empty_var->n_var_parts = 0;
6914 empty_var->onepart = onepart;
6915 if (dslot)
6916 {
6917 empty_var->refcount++;
6918 *dslot = empty_var;
6919 }
6920 }
6921 else
6922 empty_var->refcount++;
6923 empty_var->in_changed_variables = true;
6924 *slot = empty_var;
6925 if (onepart)
6926 {
6927 empty_var->var_part[0].loc_chain = NULL;
6928 empty_var->var_part[0].cur_loc = NULL;
6929 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
6930 VAR_LOC_1PAUX (var) = NULL;
6931 }
6932 goto drop_var;
6933 }
6934 else
6935 {
6936 if (var->onepart && !VAR_LOC_1PAUX (var))
6937 recover_dropped_1paux (var);
6938 var->refcount++;
6939 var->in_changed_variables = true;
6940 *slot = var;
6941 }
6942 }
6943 else
6944 {
6945 gcc_assert (set);
6946 if (var->n_var_parts == 0)
6947 {
6948 void **slot;
6949
6950 drop_var:
6951 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6952 if (slot)
6953 {
6954 if (shared_hash_shared (set->vars))
6955 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6956 NO_INSERT);
6957 htab_clear_slot (shared_hash_htab (set->vars), slot);
6958 }
6959 }
6960 }
6961 }
6962
6963 /* Look for the index in VAR->var_part corresponding to OFFSET.
6964 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6965 referenced int will be set to the index that the part has or should
6966 have, if it should be inserted. */
6967
6968 static inline int
6969 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6970 int *insertion_point)
6971 {
6972 int pos, low, high;
6973
6974 if (var->onepart)
6975 {
6976 if (offset != 0)
6977 return -1;
6978
6979 if (insertion_point)
6980 *insertion_point = 0;
6981
6982 return var->n_var_parts - 1;
6983 }
6984
6985 /* Find the location part. */
6986 low = 0;
6987 high = var->n_var_parts;
6988 while (low != high)
6989 {
6990 pos = (low + high) / 2;
6991 if (VAR_PART_OFFSET (var, pos) < offset)
6992 low = pos + 1;
6993 else
6994 high = pos;
6995 }
6996 pos = low;
6997
6998 if (insertion_point)
6999 *insertion_point = pos;
7000
7001 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7002 return pos;
7003
7004 return -1;
7005 }
7006
7007 static void **
7008 set_slot_part (dataflow_set *set, rtx loc, void **slot,
7009 decl_or_value dv, HOST_WIDE_INT offset,
7010 enum var_init_status initialized, rtx set_src)
7011 {
7012 int pos;
7013 location_chain node, next;
7014 location_chain *nextp;
7015 variable var;
7016 onepart_enum_t onepart;
7017
7018 var = (variable) *slot;
7019
7020 if (var)
7021 onepart = var->onepart;
7022 else
7023 onepart = dv_onepart_p (dv);
7024
7025 gcc_checking_assert (offset == 0 || !onepart);
7026 gcc_checking_assert (loc != dv_as_opaque (dv));
7027
7028 if (! flag_var_tracking_uninit)
7029 initialized = VAR_INIT_STATUS_INITIALIZED;
7030
7031 if (!var)
7032 {
7033 /* Create new variable information. */
7034 var = (variable) pool_alloc (onepart_pool (onepart));
7035 var->dv = dv;
7036 var->refcount = 1;
7037 var->n_var_parts = 1;
7038 var->onepart = onepart;
7039 var->in_changed_variables = false;
7040 if (var->onepart)
7041 VAR_LOC_1PAUX (var) = NULL;
7042 else
7043 VAR_PART_OFFSET (var, 0) = offset;
7044 var->var_part[0].loc_chain = NULL;
7045 var->var_part[0].cur_loc = NULL;
7046 *slot = var;
7047 pos = 0;
7048 nextp = &var->var_part[0].loc_chain;
7049 }
7050 else if (onepart)
7051 {
7052 int r = -1, c = 0;
7053
7054 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7055
7056 pos = 0;
7057
7058 if (GET_CODE (loc) == VALUE)
7059 {
7060 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7061 nextp = &node->next)
7062 if (GET_CODE (node->loc) == VALUE)
7063 {
7064 if (node->loc == loc)
7065 {
7066 r = 0;
7067 break;
7068 }
7069 if (canon_value_cmp (node->loc, loc))
7070 c++;
7071 else
7072 {
7073 r = 1;
7074 break;
7075 }
7076 }
7077 else if (REG_P (node->loc) || MEM_P (node->loc))
7078 c++;
7079 else
7080 {
7081 r = 1;
7082 break;
7083 }
7084 }
7085 else if (REG_P (loc))
7086 {
7087 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7088 nextp = &node->next)
7089 if (REG_P (node->loc))
7090 {
7091 if (REGNO (node->loc) < REGNO (loc))
7092 c++;
7093 else
7094 {
7095 if (REGNO (node->loc) == REGNO (loc))
7096 r = 0;
7097 else
7098 r = 1;
7099 break;
7100 }
7101 }
7102 else
7103 {
7104 r = 1;
7105 break;
7106 }
7107 }
7108 else if (MEM_P (loc))
7109 {
7110 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7111 nextp = &node->next)
7112 if (REG_P (node->loc))
7113 c++;
7114 else if (MEM_P (node->loc))
7115 {
7116 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7117 break;
7118 else
7119 c++;
7120 }
7121 else
7122 {
7123 r = 1;
7124 break;
7125 }
7126 }
7127 else
7128 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7129 nextp = &node->next)
7130 if ((r = loc_cmp (node->loc, loc)) >= 0)
7131 break;
7132 else
7133 c++;
7134
7135 if (r == 0)
7136 return slot;
7137
7138 if (shared_var_p (var, set->vars))
7139 {
7140 slot = unshare_variable (set, slot, var, initialized);
7141 var = (variable)*slot;
7142 for (nextp = &var->var_part[0].loc_chain; c;
7143 nextp = &(*nextp)->next)
7144 c--;
7145 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7146 }
7147 }
7148 else
7149 {
7150 int inspos = 0;
7151
7152 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7153
7154 pos = find_variable_location_part (var, offset, &inspos);
7155
7156 if (pos >= 0)
7157 {
7158 node = var->var_part[pos].loc_chain;
7159
7160 if (node
7161 && ((REG_P (node->loc) && REG_P (loc)
7162 && REGNO (node->loc) == REGNO (loc))
7163 || rtx_equal_p (node->loc, loc)))
7164 {
7165 /* LOC is in the beginning of the chain so we have nothing
7166 to do. */
7167 if (node->init < initialized)
7168 node->init = initialized;
7169 if (set_src != NULL)
7170 node->set_src = set_src;
7171
7172 return slot;
7173 }
7174 else
7175 {
7176 /* We have to make a copy of a shared variable. */
7177 if (shared_var_p (var, set->vars))
7178 {
7179 slot = unshare_variable (set, slot, var, initialized);
7180 var = (variable)*slot;
7181 }
7182 }
7183 }
7184 else
7185 {
7186 /* We have not found the location part, new one will be created. */
7187
7188 /* We have to make a copy of the shared variable. */
7189 if (shared_var_p (var, set->vars))
7190 {
7191 slot = unshare_variable (set, slot, var, initialized);
7192 var = (variable)*slot;
7193 }
7194
7195 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7196 thus there are at most MAX_VAR_PARTS different offsets. */
7197 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7198 && (!var->n_var_parts || !onepart));
7199
7200 /* We have to move the elements of array starting at index
7201 inspos to the next position. */
7202 for (pos = var->n_var_parts; pos > inspos; pos--)
7203 var->var_part[pos] = var->var_part[pos - 1];
7204
7205 var->n_var_parts++;
7206 gcc_checking_assert (!onepart);
7207 VAR_PART_OFFSET (var, pos) = offset;
7208 var->var_part[pos].loc_chain = NULL;
7209 var->var_part[pos].cur_loc = NULL;
7210 }
7211
7212 /* Delete the location from the list. */
7213 nextp = &var->var_part[pos].loc_chain;
7214 for (node = var->var_part[pos].loc_chain; node; node = next)
7215 {
7216 next = node->next;
7217 if ((REG_P (node->loc) && REG_P (loc)
7218 && REGNO (node->loc) == REGNO (loc))
7219 || rtx_equal_p (node->loc, loc))
7220 {
7221 /* Save these values, to assign to the new node, before
7222 deleting this one. */
7223 if (node->init > initialized)
7224 initialized = node->init;
7225 if (node->set_src != NULL && set_src == NULL)
7226 set_src = node->set_src;
7227 if (var->var_part[pos].cur_loc == node->loc)
7228 var->var_part[pos].cur_loc = NULL;
7229 pool_free (loc_chain_pool, node);
7230 *nextp = next;
7231 break;
7232 }
7233 else
7234 nextp = &node->next;
7235 }
7236
7237 nextp = &var->var_part[pos].loc_chain;
7238 }
7239
7240 /* Add the location to the beginning. */
7241 node = (location_chain) pool_alloc (loc_chain_pool);
7242 node->loc = loc;
7243 node->init = initialized;
7244 node->set_src = set_src;
7245 node->next = *nextp;
7246 *nextp = node;
7247
7248 /* If no location was emitted do so. */
7249 if (var->var_part[pos].cur_loc == NULL)
7250 variable_was_changed (var, set);
7251
7252 return slot;
7253 }
7254
7255 /* Set the part of variable's location in the dataflow set SET. The
7256 variable part is specified by variable's declaration in DV and
7257 offset OFFSET and the part's location by LOC. IOPT should be
7258 NO_INSERT if the variable is known to be in SET already and the
7259 variable hash table must not be resized, and INSERT otherwise. */
7260
7261 static void
7262 set_variable_part (dataflow_set *set, rtx loc,
7263 decl_or_value dv, HOST_WIDE_INT offset,
7264 enum var_init_status initialized, rtx set_src,
7265 enum insert_option iopt)
7266 {
7267 void **slot;
7268
7269 if (iopt == NO_INSERT)
7270 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7271 else
7272 {
7273 slot = shared_hash_find_slot (set->vars, dv);
7274 if (!slot)
7275 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7276 }
7277 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7278 }
7279
7280 /* Remove all recorded register locations for the given variable part
7281 from dataflow set SET, except for those that are identical to loc.
7282 The variable part is specified by variable's declaration or value
7283 DV and offset OFFSET. */
7284
7285 static void **
7286 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7287 HOST_WIDE_INT offset, rtx set_src)
7288 {
7289 variable var = (variable) *slot;
7290 int pos = find_variable_location_part (var, offset, NULL);
7291
7292 if (pos >= 0)
7293 {
7294 location_chain node, next;
7295
7296 /* Remove the register locations from the dataflow set. */
7297 next = var->var_part[pos].loc_chain;
7298 for (node = next; node; node = next)
7299 {
7300 next = node->next;
7301 if (node->loc != loc
7302 && (!flag_var_tracking_uninit
7303 || !set_src
7304 || MEM_P (set_src)
7305 || !rtx_equal_p (set_src, node->set_src)))
7306 {
7307 if (REG_P (node->loc))
7308 {
7309 attrs anode, anext;
7310 attrs *anextp;
7311
7312 /* Remove the variable part from the register's
7313 list, but preserve any other variable parts
7314 that might be regarded as live in that same
7315 register. */
7316 anextp = &set->regs[REGNO (node->loc)];
7317 for (anode = *anextp; anode; anode = anext)
7318 {
7319 anext = anode->next;
7320 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7321 && anode->offset == offset)
7322 {
7323 pool_free (attrs_pool, anode);
7324 *anextp = anext;
7325 }
7326 else
7327 anextp = &anode->next;
7328 }
7329 }
7330
7331 slot = delete_slot_part (set, node->loc, slot, offset);
7332 }
7333 }
7334 }
7335
7336 return slot;
7337 }
7338
7339 /* Remove all recorded register locations for the given variable part
7340 from dataflow set SET, except for those that are identical to loc.
7341 The variable part is specified by variable's declaration or value
7342 DV and offset OFFSET. */
7343
7344 static void
7345 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7346 HOST_WIDE_INT offset, rtx set_src)
7347 {
7348 void **slot;
7349
7350 if (!dv_as_opaque (dv)
7351 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7352 return;
7353
7354 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7355 if (!slot)
7356 return;
7357
7358 clobber_slot_part (set, loc, slot, offset, set_src);
7359 }
7360
7361 /* Delete the part of variable's location from dataflow set SET. The
7362 variable part is specified by its SET->vars slot SLOT and offset
7363 OFFSET and the part's location by LOC. */
7364
7365 static void **
7366 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7367 HOST_WIDE_INT offset)
7368 {
7369 variable var = (variable) *slot;
7370 int pos = find_variable_location_part (var, offset, NULL);
7371
7372 if (pos >= 0)
7373 {
7374 location_chain node, next;
7375 location_chain *nextp;
7376 bool changed;
7377 rtx cur_loc;
7378
7379 if (shared_var_p (var, set->vars))
7380 {
7381 /* If the variable contains the location part we have to
7382 make a copy of the variable. */
7383 for (node = var->var_part[pos].loc_chain; node;
7384 node = node->next)
7385 {
7386 if ((REG_P (node->loc) && REG_P (loc)
7387 && REGNO (node->loc) == REGNO (loc))
7388 || rtx_equal_p (node->loc, loc))
7389 {
7390 slot = unshare_variable (set, slot, var,
7391 VAR_INIT_STATUS_UNKNOWN);
7392 var = (variable)*slot;
7393 break;
7394 }
7395 }
7396 }
7397
7398 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7399 cur_loc = VAR_LOC_FROM (var);
7400 else
7401 cur_loc = var->var_part[pos].cur_loc;
7402
7403 /* Delete the location part. */
7404 changed = false;
7405 nextp = &var->var_part[pos].loc_chain;
7406 for (node = *nextp; node; node = next)
7407 {
7408 next = node->next;
7409 if ((REG_P (node->loc) && REG_P (loc)
7410 && REGNO (node->loc) == REGNO (loc))
7411 || rtx_equal_p (node->loc, loc))
7412 {
7413 /* If we have deleted the location which was last emitted
7414 we have to emit new location so add the variable to set
7415 of changed variables. */
7416 if (cur_loc == node->loc)
7417 {
7418 changed = true;
7419 var->var_part[pos].cur_loc = NULL;
7420 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7421 VAR_LOC_FROM (var) = NULL;
7422 }
7423 pool_free (loc_chain_pool, node);
7424 *nextp = next;
7425 break;
7426 }
7427 else
7428 nextp = &node->next;
7429 }
7430
7431 if (var->var_part[pos].loc_chain == NULL)
7432 {
7433 changed = true;
7434 var->n_var_parts--;
7435 while (pos < var->n_var_parts)
7436 {
7437 var->var_part[pos] = var->var_part[pos + 1];
7438 pos++;
7439 }
7440 }
7441 if (changed)
7442 variable_was_changed (var, set);
7443 }
7444
7445 return slot;
7446 }
7447
7448 /* Delete the part of variable's location from dataflow set SET. The
7449 variable part is specified by variable's declaration or value DV
7450 and offset OFFSET and the part's location by LOC. */
7451
7452 static void
7453 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7454 HOST_WIDE_INT offset)
7455 {
7456 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7457 if (!slot)
7458 return;
7459
7460 delete_slot_part (set, loc, slot, offset);
7461 }
7462
7463 DEF_VEC_P (variable);
7464 DEF_VEC_ALLOC_P (variable, heap);
7465
7466 DEF_VEC_ALLOC_P_STACK (rtx);
7467 #define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
7468
7469 /* Structure for passing some other parameters to function
7470 vt_expand_loc_callback. */
7471 struct expand_loc_callback_data
7472 {
7473 /* The variables and values active at this point. */
7474 htab_t vars;
7475
7476 /* Stack of values and debug_exprs under expansion, and their
7477 children. */
7478 VEC (rtx, stack) *expanding;
7479
7480 /* Stack of values and debug_exprs whose expansion hit recursion
7481 cycles. They will have VALUE_RECURSED_INTO marked when added to
7482 this list. This flag will be cleared if any of its dependencies
7483 resolves to a valid location. So, if the flag remains set at the
7484 end of the search, we know no valid location for this one can
7485 possibly exist. */
7486 VEC (rtx, stack) *pending;
7487
7488 /* The maximum depth among the sub-expressions under expansion.
7489 Zero indicates no expansion so far. */
7490 int depth;
7491 };
7492
7493 /* Allocate the one-part auxiliary data structure for VAR, with enough
7494 room for COUNT dependencies. */
7495
7496 static void
7497 loc_exp_dep_alloc (variable var, int count)
7498 {
7499 size_t allocsize;
7500
7501 gcc_checking_assert (var->onepart);
7502
7503 /* We can be called with COUNT == 0 to allocate the data structure
7504 without any dependencies, e.g. for the backlinks only. However,
7505 if we are specifying a COUNT, then the dependency list must have
7506 been emptied before. It would be possible to adjust pointers or
7507 force it empty here, but this is better done at an earlier point
7508 in the algorithm, so we instead leave an assertion to catch
7509 errors. */
7510 gcc_checking_assert (!count
7511 || VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7512
7513 if (VAR_LOC_1PAUX (var)
7514 && VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
7515 return;
7516
7517 allocsize = offsetof (struct onepart_aux, deps)
7518 + VEC_embedded_size (loc_exp_dep, count);
7519
7520 if (VAR_LOC_1PAUX (var))
7521 {
7522 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7523 VAR_LOC_1PAUX (var), allocsize);
7524 /* If the reallocation moves the onepaux structure, the
7525 back-pointer to BACKLINKS in the first list member will still
7526 point to its old location. Adjust it. */
7527 if (VAR_LOC_DEP_LST (var))
7528 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7529 }
7530 else
7531 {
7532 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7533 *VAR_LOC_DEP_LSTP (var) = NULL;
7534 VAR_LOC_FROM (var) = NULL;
7535 VAR_LOC_DEPTH (var) = 0;
7536 }
7537 VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
7538 }
7539
7540 /* Remove all entries from the vector of active dependencies of VAR,
7541 removing them from the back-links lists too. */
7542
7543 static void
7544 loc_exp_dep_clear (variable var)
7545 {
7546 while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
7547 {
7548 loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7549 if (led->next)
7550 led->next->pprev = led->pprev;
7551 if (led->pprev)
7552 *led->pprev = led->next;
7553 VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7554 }
7555 }
7556
7557 /* Insert an active dependency from VAR on X to the vector of
7558 dependencies, and add the corresponding back-link to X's list of
7559 back-links in VARS. */
7560
7561 static void
7562 loc_exp_insert_dep (variable var, rtx x, htab_t vars)
7563 {
7564 decl_or_value dv;
7565 variable xvar;
7566 loc_exp_dep *led;
7567
7568 dv = dv_from_rtx (x);
7569
7570 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
7571 an additional look up? */
7572 xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7573
7574 if (!xvar)
7575 {
7576 xvar = variable_from_dropped (dv, NO_INSERT);
7577 gcc_checking_assert (xvar);
7578 }
7579
7580 /* No point in adding the same backlink more than once. This may
7581 arise if say the same value appears in two complex expressions in
7582 the same loc_list, or even more than once in a single
7583 expression. */
7584 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
7585 return;
7586
7587 VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
7588 led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7589 led->dv = var->dv;
7590 led->value = x;
7591
7592 loc_exp_dep_alloc (xvar, 0);
7593 led->pprev = VAR_LOC_DEP_LSTP (xvar);
7594 led->next = *led->pprev;
7595 if (led->next)
7596 led->next->pprev = &led->next;
7597 *led->pprev = led;
7598 }
7599
7600 /* Create active dependencies of VAR on COUNT values starting at
7601 VALUE, and corresponding back-links to the entries in VARS. Return
7602 true if we found any pending-recursion results. */
7603
7604 static bool
7605 loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
7606 {
7607 bool pending_recursion = false;
7608
7609 gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7610
7611 /* Set up all dependencies from last_child (as set up at the end of
7612 the loop above) to the end. */
7613 loc_exp_dep_alloc (var, count);
7614
7615 while (count--)
7616 {
7617 rtx x = *value++;
7618
7619 if (!pending_recursion)
7620 pending_recursion = !result && VALUE_RECURSED_INTO (x);
7621
7622 loc_exp_insert_dep (var, x, vars);
7623 }
7624
7625 return pending_recursion;
7626 }
7627
7628 /* Notify the back-links of IVAR that are pending recursion that we
7629 have found a non-NIL value for it, so they are cleared for another
7630 attempt to compute a current location. */
7631
7632 static void
7633 notify_dependents_of_resolved_value (variable ivar, htab_t vars)
7634 {
7635 loc_exp_dep *led, *next;
7636
7637 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
7638 {
7639 decl_or_value dv = led->dv;
7640 variable var;
7641
7642 next = led->next;
7643
7644 if (dv_is_value_p (dv))
7645 {
7646 rtx value = dv_as_value (dv);
7647
7648 /* If we have already resolved it, leave it alone. */
7649 if (!VALUE_RECURSED_INTO (value))
7650 continue;
7651
7652 /* Check that VALUE_RECURSED_INTO, true from the test above,
7653 implies NO_LOC_P. */
7654 gcc_checking_assert (NO_LOC_P (value));
7655
7656 /* We won't notify variables that are being expanded,
7657 because their dependency list is cleared before
7658 recursing. */
7659 NO_LOC_P (value) = false;
7660 VALUE_RECURSED_INTO (value) = false;
7661
7662 gcc_checking_assert (dv_changed_p (dv));
7663 }
7664 else if (!dv_changed_p (dv))
7665 continue;
7666
7667 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7668
7669 if (!var)
7670 var = variable_from_dropped (dv, NO_INSERT);
7671
7672 if (var)
7673 notify_dependents_of_resolved_value (var, vars);
7674
7675 if (next)
7676 next->pprev = led->pprev;
7677 if (led->pprev)
7678 *led->pprev = next;
7679 led->next = NULL;
7680 led->pprev = NULL;
7681 }
7682 }
7683
7684 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
7685 int max_depth, void *data);
7686
7687 /* Return the combined depth, when one sub-expression evaluated to
7688 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
7689
7690 static inline int
7691 update_depth (int saved_depth, int best_depth)
7692 {
7693 /* If we didn't find anything, stick with what we had. */
7694 if (!best_depth)
7695 return saved_depth;
7696
7697 /* If we found hadn't found anything, use the depth of the current
7698 expression. Do NOT add one extra level, we want to compute the
7699 maximum depth among sub-expressions. We'll increment it later,
7700 if appropriate. */
7701 if (!saved_depth)
7702 return best_depth;
7703
7704 if (saved_depth < best_depth)
7705 return best_depth;
7706 else
7707 return saved_depth;
7708 }
7709
7710 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
7711 DATA for cselib expand callback. If PENDRECP is given, indicate in
7712 it whether any sub-expression couldn't be fully evaluated because
7713 it is pending recursion resolution. */
7714
7715 static inline rtx
7716 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
7717 {
7718 struct expand_loc_callback_data *elcd
7719 = (struct expand_loc_callback_data *) data;
7720 location_chain loc, next;
7721 rtx result = NULL;
7722 int first_child, result_first_child, last_child;
7723 bool pending_recursion;
7724 rtx loc_from = NULL;
7725 struct elt_loc_list *cloc = NULL;
7726 int depth = 0, saved_depth = elcd->depth;
7727
7728 /* Clear all backlinks pointing at this, so that we're not notified
7729 while we're active. */
7730 loc_exp_dep_clear (var);
7731
7732 if (var->onepart == ONEPART_VALUE)
7733 {
7734 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
7735
7736 gcc_checking_assert (cselib_preserved_value_p (val));
7737
7738 cloc = val->locs;
7739 }
7740
7741 first_child = result_first_child = last_child
7742 = VEC_length (rtx, elcd->expanding);
7743
7744 /* Attempt to expand each available location in turn. */
7745 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
7746 loc || cloc; loc = next)
7747 {
7748 result_first_child = last_child;
7749
7750 if (!loc || (GET_CODE (loc->loc) == ENTRY_VALUE && cloc))
7751 {
7752 loc_from = cloc->loc;
7753 next = loc;
7754 cloc = cloc->next;
7755 if (unsuitable_loc (loc_from))
7756 continue;
7757 }
7758 else
7759 {
7760 loc_from = loc->loc;
7761 next = loc->next;
7762 }
7763
7764 gcc_checking_assert (!unsuitable_loc (loc_from));
7765
7766 elcd->depth = 0;
7767 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
7768 vt_expand_loc_callback, data);
7769 last_child = VEC_length (rtx, elcd->expanding);
7770
7771 if (result)
7772 {
7773 depth = elcd->depth;
7774
7775 gcc_checking_assert (depth || result_first_child == last_child);
7776
7777 if (last_child - result_first_child != 1)
7778 depth++;
7779
7780 if (depth <= EXPR_USE_DEPTH)
7781 break;
7782
7783 result = NULL;
7784 }
7785
7786 /* Set it up in case we leave the loop. */
7787 depth = 0;
7788 loc_from = NULL;
7789 result_first_child = first_child;
7790 }
7791
7792 /* Register all encountered dependencies as active. */
7793 pending_recursion = loc_exp_dep_set
7794 (var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
7795 last_child - result_first_child, elcd->vars);
7796
7797 VEC_truncate (rtx, elcd->expanding, first_child);
7798
7799 /* Record where the expansion came from. */
7800 gcc_checking_assert (!result || !pending_recursion);
7801 VAR_LOC_FROM (var) = loc_from;
7802 VAR_LOC_DEPTH (var) = depth;
7803
7804 gcc_checking_assert (!depth == !result);
7805
7806 elcd->depth = update_depth (saved_depth, depth);
7807
7808 /* Indicate whether any of the dependencies are pending recursion
7809 resolution. */
7810 if (pendrecp)
7811 *pendrecp = pending_recursion;
7812
7813 if (!pendrecp || !pending_recursion)
7814 var->var_part[0].cur_loc = result;
7815
7816 return result;
7817 }
7818
7819 /* Callback for cselib_expand_value, that looks for expressions
7820 holding the value in the var-tracking hash tables. Return X for
7821 standard processing, anything else is to be used as-is. */
7822
7823 static rtx
7824 vt_expand_loc_callback (rtx x, bitmap regs,
7825 int max_depth ATTRIBUTE_UNUSED,
7826 void *data)
7827 {
7828 struct expand_loc_callback_data *elcd
7829 = (struct expand_loc_callback_data *) data;
7830 decl_or_value dv;
7831 variable var;
7832 rtx result, subreg;
7833 bool pending_recursion = false;
7834 bool from_empty = false;
7835
7836 switch (GET_CODE (x))
7837 {
7838 case SUBREG:
7839 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
7840 EXPR_DEPTH,
7841 vt_expand_loc_callback, data);
7842
7843 if (!subreg)
7844 return NULL;
7845
7846 result = simplify_gen_subreg (GET_MODE (x), subreg,
7847 GET_MODE (SUBREG_REG (x)),
7848 SUBREG_BYTE (x));
7849
7850 /* Invalid SUBREGs are ok in debug info. ??? We could try
7851 alternate expansions for the VALUE as well. */
7852 if (!result)
7853 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
7854
7855 return result;
7856
7857 case DEBUG_EXPR:
7858 case VALUE:
7859 dv = dv_from_rtx (x);
7860 break;
7861
7862 default:
7863 return x;
7864 }
7865
7866 VEC_safe_push (rtx, stack, elcd->expanding, x);
7867
7868 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
7869 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
7870
7871 if (NO_LOC_P (x))
7872 {
7873 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
7874 return NULL;
7875 }
7876
7877 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
7878
7879 if (!var)
7880 {
7881 from_empty = true;
7882 var = variable_from_dropped (dv, INSERT);
7883 }
7884
7885 gcc_checking_assert (var);
7886
7887 if (!dv_changed_p (dv))
7888 {
7889 gcc_checking_assert (!NO_LOC_P (x));
7890 gcc_checking_assert (var->var_part[0].cur_loc);
7891 gcc_checking_assert (VAR_LOC_1PAUX (var));
7892 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth);
7893
7894 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
7895
7896 return var->var_part[0].cur_loc;
7897 }
7898
7899 VALUE_RECURSED_INTO (x) = true;
7900 /* This is tentative, but it makes some tests simpler. */
7901 NO_LOC_P (x) = true;
7902
7903 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
7904
7905 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
7906
7907 if (pending_recursion)
7908 {
7909 gcc_checking_assert (!result);
7910 VEC_safe_push (rtx, stack, elcd->pending, x);
7911 }
7912 else
7913 {
7914 NO_LOC_P (x) = !result;
7915 VALUE_RECURSED_INTO (x) = false;
7916 set_dv_changed (dv, false);
7917
7918 if (result)
7919 notify_dependents_of_resolved_value (var, elcd->vars);
7920 }
7921
7922 return result;
7923 }
7924
7925 /* While expanding variables, we may encounter recursion cycles
7926 because of mutual (possibly indirect) dependencies between two
7927 particular variables (or values), say A and B. If we're trying to
7928 expand A when we get to B, which in turn attempts to expand A, if
7929 we can't find any other expansion for B, we'll add B to this
7930 pending-recursion stack, and tentatively return NULL for its
7931 location. This tentative value will be used for any other
7932 occurrences of B, unless A gets some other location, in which case
7933 it will notify B that it is worth another try at computing a
7934 location for it, and it will use the location computed for A then.
7935 At the end of the expansion, the tentative NULL locations become
7936 final for all members of PENDING that didn't get a notification.
7937 This function performs this finalization of NULL locations. */
7938
7939 static void
7940 resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
7941 {
7942 while (!VEC_empty (rtx, pending))
7943 {
7944 rtx x = VEC_pop (rtx, pending);
7945 decl_or_value dv;
7946
7947 if (!VALUE_RECURSED_INTO (x))
7948 continue;
7949
7950 gcc_checking_assert (NO_LOC_P (x));
7951 VALUE_RECURSED_INTO (x) = false;
7952 dv = dv_from_rtx (x);
7953 gcc_checking_assert (dv_changed_p (dv));
7954 set_dv_changed (dv, false);
7955 }
7956 }
7957
7958 /* Initialize expand_loc_callback_data D with variable hash table V.
7959 It must be a macro because of alloca (VEC stack). */
7960 #define INIT_ELCD(d, v) \
7961 do \
7962 { \
7963 (d).vars = (v); \
7964 (d).expanding = VEC_alloc (rtx, stack, 4); \
7965 (d).pending = VEC_alloc (rtx, stack, 4); \
7966 (d).depth = 0; \
7967 } \
7968 while (0)
7969 /* Finalize expand_loc_callback_data D, resolved to location L. */
7970 #define FINI_ELCD(d, l) \
7971 do \
7972 { \
7973 resolve_expansions_pending_recursion ((d).pending); \
7974 VEC_free (rtx, stack, (d).pending); \
7975 VEC_free (rtx, stack, (d).expanding); \
7976 \
7977 if ((l) && MEM_P (l)) \
7978 (l) = targetm.delegitimize_address (l); \
7979 } \
7980 while (0)
7981
7982 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
7983 equivalences in VARS, updating their CUR_LOCs in the process. */
7984
7985 static rtx
7986 vt_expand_loc (rtx loc, htab_t vars)
7987 {
7988 struct expand_loc_callback_data data;
7989 rtx result;
7990
7991 if (!MAY_HAVE_DEBUG_INSNS)
7992 return loc;
7993
7994 INIT_ELCD (data, vars);
7995
7996 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
7997 vt_expand_loc_callback, &data);
7998
7999 FINI_ELCD (data, result);
8000
8001 return result;
8002 }
8003
8004 /* Expand the one-part VARiable to a location, using the equivalences
8005 in VARS, updating their CUR_LOCs in the process. */
8006
8007 static rtx
8008 vt_expand_1pvar (variable var, htab_t vars)
8009 {
8010 struct expand_loc_callback_data data;
8011 rtx loc;
8012
8013 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8014
8015 if (!dv_changed_p (var->dv))
8016 return var->var_part[0].cur_loc;
8017
8018 INIT_ELCD (data, vars);
8019
8020 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8021
8022 gcc_checking_assert (VEC_empty (rtx, data.expanding));
8023
8024 FINI_ELCD (data, loc);
8025
8026 return loc;
8027 }
8028
8029 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8030 additional parameters: WHERE specifies whether the note shall be emitted
8031 before or after instruction INSN. */
8032
8033 static int
8034 emit_note_insn_var_location (void **varp, void *data)
8035 {
8036 variable var = (variable) *varp;
8037 rtx insn = ((emit_note_data *)data)->insn;
8038 enum emit_note_where where = ((emit_note_data *)data)->where;
8039 htab_t vars = ((emit_note_data *)data)->vars;
8040 rtx note, note_vl;
8041 int i, j, n_var_parts;
8042 bool complete;
8043 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8044 HOST_WIDE_INT last_limit;
8045 tree type_size_unit;
8046 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8047 rtx loc[MAX_VAR_PARTS];
8048 tree decl;
8049 location_chain lc;
8050
8051 gcc_checking_assert (var->onepart == NOT_ONEPART
8052 || var->onepart == ONEPART_VDECL);
8053
8054 decl = dv_as_decl (var->dv);
8055
8056 complete = true;
8057 last_limit = 0;
8058 n_var_parts = 0;
8059 if (!var->onepart)
8060 for (i = 0; i < var->n_var_parts; i++)
8061 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8062 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8063 for (i = 0; i < var->n_var_parts; i++)
8064 {
8065 enum machine_mode mode, wider_mode;
8066 rtx loc2;
8067 HOST_WIDE_INT offset;
8068
8069 if (i == 0 && var->onepart)
8070 {
8071 gcc_checking_assert (var->n_var_parts == 1);
8072 offset = 0;
8073 initialized = VAR_INIT_STATUS_INITIALIZED;
8074 loc2 = vt_expand_1pvar (var, vars);
8075 }
8076 else
8077 {
8078 if (last_limit < VAR_PART_OFFSET (var, i))
8079 {
8080 complete = false;
8081 break;
8082 }
8083 else if (last_limit > VAR_PART_OFFSET (var, i))
8084 continue;
8085 offset = VAR_PART_OFFSET (var, i);
8086 if (!var->var_part[i].cur_loc)
8087 {
8088 complete = false;
8089 continue;
8090 }
8091 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8092 if (var->var_part[i].cur_loc == lc->loc)
8093 {
8094 initialized = lc->init;
8095 break;
8096 }
8097 gcc_assert (lc);
8098 loc2 = var->var_part[i].cur_loc;
8099 }
8100
8101 offsets[n_var_parts] = offset;
8102 if (!loc2)
8103 {
8104 complete = false;
8105 continue;
8106 }
8107 loc[n_var_parts] = loc2;
8108 mode = GET_MODE (var->var_part[i].cur_loc);
8109 if (mode == VOIDmode && var->onepart)
8110 mode = DECL_MODE (decl);
8111 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8112
8113 /* Attempt to merge adjacent registers or memory. */
8114 wider_mode = GET_MODE_WIDER_MODE (mode);
8115 for (j = i + 1; j < var->n_var_parts; j++)
8116 if (last_limit <= VAR_PART_OFFSET (var, j))
8117 break;
8118 if (j < var->n_var_parts
8119 && wider_mode != VOIDmode
8120 && var->var_part[j].cur_loc
8121 && mode == GET_MODE (var->var_part[j].cur_loc)
8122 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8123 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8124 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8125 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8126 {
8127 rtx new_loc = NULL;
8128
8129 if (REG_P (loc[n_var_parts])
8130 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8131 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8132 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8133 == REGNO (loc2))
8134 {
8135 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8136 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8137 mode, 0);
8138 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8139 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8140 if (new_loc)
8141 {
8142 if (!REG_P (new_loc)
8143 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8144 new_loc = NULL;
8145 else
8146 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8147 }
8148 }
8149 else if (MEM_P (loc[n_var_parts])
8150 && GET_CODE (XEXP (loc2, 0)) == PLUS
8151 && REG_P (XEXP (XEXP (loc2, 0), 0))
8152 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8153 {
8154 if ((REG_P (XEXP (loc[n_var_parts], 0))
8155 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8156 XEXP (XEXP (loc2, 0), 0))
8157 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8158 == GET_MODE_SIZE (mode))
8159 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8160 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8161 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8162 XEXP (XEXP (loc2, 0), 0))
8163 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8164 + GET_MODE_SIZE (mode)
8165 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8166 new_loc = adjust_address_nv (loc[n_var_parts],
8167 wider_mode, 0);
8168 }
8169
8170 if (new_loc)
8171 {
8172 loc[n_var_parts] = new_loc;
8173 mode = wider_mode;
8174 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8175 i = j;
8176 }
8177 }
8178 ++n_var_parts;
8179 }
8180 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8181 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8182 complete = false;
8183
8184 if (! flag_var_tracking_uninit)
8185 initialized = VAR_INIT_STATUS_INITIALIZED;
8186
8187 note_vl = NULL_RTX;
8188 if (!complete)
8189 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8190 (int) initialized);
8191 else if (n_var_parts == 1)
8192 {
8193 rtx expr_list;
8194
8195 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8196 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8197 else
8198 expr_list = loc[0];
8199
8200 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8201 (int) initialized);
8202 }
8203 else if (n_var_parts)
8204 {
8205 rtx parallel;
8206
8207 for (i = 0; i < n_var_parts; i++)
8208 loc[i]
8209 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8210
8211 parallel = gen_rtx_PARALLEL (VOIDmode,
8212 gen_rtvec_v (n_var_parts, loc));
8213 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8214 parallel, (int) initialized);
8215 }
8216
8217 if (where != EMIT_NOTE_BEFORE_INSN)
8218 {
8219 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8220 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8221 NOTE_DURING_CALL_P (note) = true;
8222 }
8223 else
8224 {
8225 /* Make sure that the call related notes come first. */
8226 while (NEXT_INSN (insn)
8227 && NOTE_P (insn)
8228 && NOTE_DURING_CALL_P (insn))
8229 insn = NEXT_INSN (insn);
8230 if (NOTE_P (insn) && NOTE_DURING_CALL_P (insn))
8231 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8232 else
8233 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8234 }
8235 NOTE_VAR_LOCATION (note) = note_vl;
8236
8237 set_dv_changed (var->dv, false);
8238 gcc_assert (var->in_changed_variables);
8239 var->in_changed_variables = false;
8240 htab_clear_slot (changed_variables, varp);
8241
8242 /* Continue traversing the hash table. */
8243 return 1;
8244 }
8245
8246 /* While traversing changed_variables, push onto DATA (a stack of RTX
8247 values) entries that aren't user variables. */
8248
8249 static int
8250 values_to_stack (void **slot, void *data)
8251 {
8252 VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
8253 variable var = (variable) *slot;
8254
8255 if (var->onepart == ONEPART_VALUE)
8256 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
8257 else if (var->onepart == ONEPART_DEXPR)
8258 VEC_safe_push (rtx, stack, *changed_values_stack,
8259 DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8260
8261 return 1;
8262 }
8263
8264 /* Remove from changed_variables the entry whose DV corresponds to
8265 value or debug_expr VAL. */
8266 static void
8267 remove_value_from_changed_variables (rtx val)
8268 {
8269 decl_or_value dv = dv_from_rtx (val);
8270 void **slot;
8271 variable var;
8272
8273 slot = htab_find_slot_with_hash (changed_variables,
8274 dv, dv_htab_hash (dv), NO_INSERT);
8275 var = (variable) *slot;
8276 var->in_changed_variables = false;
8277 htab_clear_slot (changed_variables, slot);
8278 }
8279
8280 /* If VAL (a value or debug_expr) has backlinks to variables actively
8281 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8282 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8283 have dependencies of their own to notify. */
8284
8285 static void
8286 notify_dependents_of_changed_value (rtx val, htab_t htab,
8287 VEC (rtx, stack) **changed_values_stack)
8288 {
8289 void **slot;
8290 variable var;
8291 loc_exp_dep *led;
8292 decl_or_value dv = dv_from_rtx (val);
8293
8294 slot = htab_find_slot_with_hash (changed_variables,
8295 dv, dv_htab_hash (dv), NO_INSERT);
8296 if (!slot)
8297 slot = htab_find_slot_with_hash (htab,
8298 dv, dv_htab_hash (dv), NO_INSERT);
8299 if (!slot)
8300 slot = htab_find_slot_with_hash (dropped_values,
8301 dv, dv_htab_hash (dv), NO_INSERT);
8302 var = (variable) *slot;
8303
8304 while ((led = VAR_LOC_DEP_LST (var)))
8305 {
8306 decl_or_value ldv = led->dv;
8307 void **islot;
8308 variable ivar;
8309
8310 /* Deactivate and remove the backlink, as it was “used up”. It
8311 makes no sense to attempt to notify the same entity again:
8312 either it will be recomputed and re-register an active
8313 dependency, or it will still have the changed mark. */
8314 if (led->next)
8315 led->next->pprev = led->pprev;
8316 if (led->pprev)
8317 *led->pprev = led->next;
8318 led->next = NULL;
8319 led->pprev = NULL;
8320
8321 if (dv_changed_p (ldv))
8322 continue;
8323
8324 switch (dv_onepart_p (ldv))
8325 {
8326 case ONEPART_VALUE:
8327 case ONEPART_DEXPR:
8328 set_dv_changed (ldv, true);
8329 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
8330 break;
8331
8332 default:
8333 islot = htab_find_slot_with_hash (htab, ldv, dv_htab_hash (ldv),
8334 NO_INSERT);
8335 ivar = (variable) *islot;
8336 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8337 variable_was_changed (ivar, NULL);
8338 break;
8339 }
8340 }
8341 }
8342
8343 /* Take out of changed_variables any entries that don't refer to use
8344 variables. Back-propagate change notifications from values and
8345 debug_exprs to their active dependencies in HTAB or in
8346 CHANGED_VARIABLES. */
8347
8348 static void
8349 process_changed_values (htab_t htab)
8350 {
8351 int i, n;
8352 rtx val;
8353 VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
8354
8355 /* Move values from changed_variables to changed_values_stack. */
8356 htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
8357
8358 /* Back-propagate change notifications in values while popping
8359 them from the stack. */
8360 for (n = i = VEC_length (rtx, changed_values_stack);
8361 i > 0; i = VEC_length (rtx, changed_values_stack))
8362 {
8363 val = VEC_pop (rtx, changed_values_stack);
8364 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8365
8366 /* This condition will hold when visiting each of the entries
8367 originally in changed_variables. We can't remove them
8368 earlier because this could drop the backlinks before we got a
8369 chance to use them. */
8370 if (i == n)
8371 {
8372 remove_value_from_changed_variables (val);
8373 n--;
8374 }
8375 }
8376
8377 VEC_free (rtx, stack, changed_values_stack);
8378 }
8379
8380 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8381 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8382 the notes shall be emitted before of after instruction INSN. */
8383
8384 static void
8385 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8386 shared_hash vars)
8387 {
8388 emit_note_data data;
8389 htab_t htab = shared_hash_htab (vars);
8390
8391 if (!htab_elements (changed_variables))
8392 return;
8393
8394 if (MAY_HAVE_DEBUG_INSNS)
8395 process_changed_values (htab);
8396
8397 data.insn = insn;
8398 data.where = where;
8399 data.vars = htab;
8400
8401 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
8402 }
8403
8404 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8405 same variable in hash table DATA or is not there at all. */
8406
8407 static int
8408 emit_notes_for_differences_1 (void **slot, void *data)
8409 {
8410 htab_t new_vars = (htab_t) data;
8411 variable old_var, new_var;
8412
8413 old_var = (variable) *slot;
8414 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
8415 dv_htab_hash (old_var->dv));
8416
8417 if (!new_var)
8418 {
8419 /* Variable has disappeared. */
8420 variable empty_var = NULL;
8421
8422 if (old_var->onepart == ONEPART_VALUE
8423 || old_var->onepart == ONEPART_DEXPR)
8424 {
8425 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8426 if (empty_var)
8427 {
8428 gcc_checking_assert (!empty_var->in_changed_variables);
8429 if (!VAR_LOC_1PAUX (old_var))
8430 {
8431 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8432 VAR_LOC_1PAUX (empty_var) = NULL;
8433 }
8434 else
8435 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8436 }
8437 }
8438
8439 if (!empty_var)
8440 {
8441 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8442 empty_var->dv = old_var->dv;
8443 empty_var->refcount = 0;
8444 empty_var->n_var_parts = 0;
8445 empty_var->onepart = old_var->onepart;
8446 empty_var->in_changed_variables = false;
8447 }
8448
8449 if (empty_var->onepart)
8450 {
8451 /* Propagate the auxiliary data to (ultimately)
8452 changed_variables. */
8453 empty_var->var_part[0].loc_chain = NULL;
8454 empty_var->var_part[0].cur_loc = NULL;
8455 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8456 VAR_LOC_1PAUX (old_var) = NULL;
8457 }
8458 variable_was_changed (empty_var, NULL);
8459 /* Continue traversing the hash table. */
8460 return 1;
8461 }
8462 /* Update cur_loc and one-part auxiliary data, before new_var goes
8463 through variable_was_changed. */
8464 if (old_var != new_var && new_var->onepart)
8465 {
8466 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8467 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8468 VAR_LOC_1PAUX (old_var) = NULL;
8469 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8470 }
8471 if (variable_different_p (old_var, new_var))
8472 variable_was_changed (new_var, NULL);
8473
8474 /* Continue traversing the hash table. */
8475 return 1;
8476 }
8477
8478 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8479 table DATA. */
8480
8481 static int
8482 emit_notes_for_differences_2 (void **slot, void *data)
8483 {
8484 htab_t old_vars = (htab_t) data;
8485 variable old_var, new_var;
8486
8487 new_var = (variable) *slot;
8488 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8489 dv_htab_hash (new_var->dv));
8490 if (!old_var)
8491 {
8492 int i;
8493 for (i = 0; i < new_var->n_var_parts; i++)
8494 new_var->var_part[i].cur_loc = NULL;
8495 variable_was_changed (new_var, NULL);
8496 }
8497
8498 /* Continue traversing the hash table. */
8499 return 1;
8500 }
8501
8502 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8503 NEW_SET. */
8504
8505 static void
8506 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8507 dataflow_set *new_set)
8508 {
8509 htab_traverse (shared_hash_htab (old_set->vars),
8510 emit_notes_for_differences_1,
8511 shared_hash_htab (new_set->vars));
8512 htab_traverse (shared_hash_htab (new_set->vars),
8513 emit_notes_for_differences_2,
8514 shared_hash_htab (old_set->vars));
8515 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8516 }
8517
8518 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8519
8520 static rtx
8521 next_non_note_insn_var_location (rtx insn)
8522 {
8523 while (insn)
8524 {
8525 insn = NEXT_INSN (insn);
8526 if (insn == 0
8527 || !NOTE_P (insn)
8528 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8529 break;
8530 }
8531
8532 return insn;
8533 }
8534
8535 /* Emit the notes for changes of location parts in the basic block BB. */
8536
8537 static void
8538 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8539 {
8540 unsigned int i;
8541 micro_operation *mo;
8542
8543 dataflow_set_clear (set);
8544 dataflow_set_copy (set, &VTI (bb)->in);
8545
8546 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
8547 {
8548 rtx insn = mo->insn;
8549 rtx next_insn = next_non_note_insn_var_location (insn);
8550
8551 switch (mo->type)
8552 {
8553 case MO_CALL:
8554 dataflow_set_clear_at_call (set);
8555 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8556 {
8557 rtx arguments = mo->u.loc, *p = &arguments, note;
8558 while (*p)
8559 {
8560 XEXP (XEXP (*p, 0), 1)
8561 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8562 shared_hash_htab (set->vars));
8563 /* If expansion is successful, keep it in the list. */
8564 if (XEXP (XEXP (*p, 0), 1))
8565 p = &XEXP (*p, 1);
8566 /* Otherwise, if the following item is data_value for it,
8567 drop it too too. */
8568 else if (XEXP (*p, 1)
8569 && REG_P (XEXP (XEXP (*p, 0), 0))
8570 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8571 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8572 0))
8573 && REGNO (XEXP (XEXP (*p, 0), 0))
8574 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8575 0), 0)))
8576 *p = XEXP (XEXP (*p, 1), 1);
8577 /* Just drop this item. */
8578 else
8579 *p = XEXP (*p, 1);
8580 }
8581 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8582 NOTE_VAR_LOCATION (note) = arguments;
8583 }
8584 break;
8585
8586 case MO_USE:
8587 {
8588 rtx loc = mo->u.loc;
8589
8590 if (REG_P (loc))
8591 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8592 else
8593 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8594
8595 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8596 }
8597 break;
8598
8599 case MO_VAL_LOC:
8600 {
8601 rtx loc = mo->u.loc;
8602 rtx val, vloc;
8603 tree var;
8604
8605 if (GET_CODE (loc) == CONCAT)
8606 {
8607 val = XEXP (loc, 0);
8608 vloc = XEXP (loc, 1);
8609 }
8610 else
8611 {
8612 val = NULL_RTX;
8613 vloc = loc;
8614 }
8615
8616 var = PAT_VAR_LOCATION_DECL (vloc);
8617
8618 clobber_variable_part (set, NULL_RTX,
8619 dv_from_decl (var), 0, NULL_RTX);
8620 if (val)
8621 {
8622 if (VAL_NEEDS_RESOLUTION (loc))
8623 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8624 set_variable_part (set, val, dv_from_decl (var), 0,
8625 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8626 INSERT);
8627 }
8628 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8629 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8630 dv_from_decl (var), 0,
8631 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8632 INSERT);
8633
8634 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8635 }
8636 break;
8637
8638 case MO_VAL_USE:
8639 {
8640 rtx loc = mo->u.loc;
8641 rtx val, vloc, uloc;
8642
8643 vloc = uloc = XEXP (loc, 1);
8644 val = XEXP (loc, 0);
8645
8646 if (GET_CODE (val) == CONCAT)
8647 {
8648 uloc = XEXP (val, 1);
8649 val = XEXP (val, 0);
8650 }
8651
8652 if (VAL_NEEDS_RESOLUTION (loc))
8653 val_resolve (set, val, vloc, insn);
8654 else
8655 val_store (set, val, uloc, insn, false);
8656
8657 if (VAL_HOLDS_TRACK_EXPR (loc))
8658 {
8659 if (GET_CODE (uloc) == REG)
8660 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8661 NULL);
8662 else if (GET_CODE (uloc) == MEM)
8663 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8664 NULL);
8665 }
8666
8667 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
8668 }
8669 break;
8670
8671 case MO_VAL_SET:
8672 {
8673 rtx loc = mo->u.loc;
8674 rtx val, vloc, uloc;
8675
8676 vloc = loc;
8677 uloc = XEXP (vloc, 1);
8678 val = XEXP (vloc, 0);
8679 vloc = uloc;
8680
8681 if (GET_CODE (val) == CONCAT)
8682 {
8683 vloc = XEXP (val, 1);
8684 val = XEXP (val, 0);
8685 }
8686
8687 if (GET_CODE (vloc) == SET)
8688 {
8689 rtx vsrc = SET_SRC (vloc);
8690
8691 gcc_assert (val != vsrc);
8692 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
8693
8694 vloc = SET_DEST (vloc);
8695
8696 if (VAL_NEEDS_RESOLUTION (loc))
8697 val_resolve (set, val, vsrc, insn);
8698 }
8699 else if (VAL_NEEDS_RESOLUTION (loc))
8700 {
8701 gcc_assert (GET_CODE (uloc) == SET
8702 && GET_CODE (SET_SRC (uloc)) == REG);
8703 val_resolve (set, val, SET_SRC (uloc), insn);
8704 }
8705
8706 if (VAL_HOLDS_TRACK_EXPR (loc))
8707 {
8708 if (VAL_EXPR_IS_CLOBBERED (loc))
8709 {
8710 if (REG_P (uloc))
8711 var_reg_delete (set, uloc, true);
8712 else if (MEM_P (uloc))
8713 var_mem_delete (set, uloc, true);
8714 }
8715 else
8716 {
8717 bool copied_p = VAL_EXPR_IS_COPIED (loc);
8718 rtx set_src = NULL;
8719 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
8720
8721 if (GET_CODE (uloc) == SET)
8722 {
8723 set_src = SET_SRC (uloc);
8724 uloc = SET_DEST (uloc);
8725 }
8726
8727 if (copied_p)
8728 {
8729 status = find_src_status (set, set_src);
8730
8731 set_src = find_src_set_src (set, set_src);
8732 }
8733
8734 if (REG_P (uloc))
8735 var_reg_delete_and_set (set, uloc, !copied_p,
8736 status, set_src);
8737 else if (MEM_P (uloc))
8738 var_mem_delete_and_set (set, uloc, !copied_p,
8739 status, set_src);
8740 }
8741 }
8742 else if (REG_P (uloc))
8743 var_regno_delete (set, REGNO (uloc));
8744
8745 val_store (set, val, vloc, insn, true);
8746
8747 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8748 set->vars);
8749 }
8750 break;
8751
8752 case MO_SET:
8753 {
8754 rtx loc = mo->u.loc;
8755 rtx set_src = NULL;
8756
8757 if (GET_CODE (loc) == SET)
8758 {
8759 set_src = SET_SRC (loc);
8760 loc = SET_DEST (loc);
8761 }
8762
8763 if (REG_P (loc))
8764 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8765 set_src);
8766 else
8767 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8768 set_src);
8769
8770 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8771 set->vars);
8772 }
8773 break;
8774
8775 case MO_COPY:
8776 {
8777 rtx loc = mo->u.loc;
8778 enum var_init_status src_status;
8779 rtx set_src = NULL;
8780
8781 if (GET_CODE (loc) == SET)
8782 {
8783 set_src = SET_SRC (loc);
8784 loc = SET_DEST (loc);
8785 }
8786
8787 src_status = find_src_status (set, set_src);
8788 set_src = find_src_set_src (set, set_src);
8789
8790 if (REG_P (loc))
8791 var_reg_delete_and_set (set, loc, false, src_status, set_src);
8792 else
8793 var_mem_delete_and_set (set, loc, false, src_status, set_src);
8794
8795 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8796 set->vars);
8797 }
8798 break;
8799
8800 case MO_USE_NO_VAR:
8801 {
8802 rtx loc = mo->u.loc;
8803
8804 if (REG_P (loc))
8805 var_reg_delete (set, loc, false);
8806 else
8807 var_mem_delete (set, loc, false);
8808
8809 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8810 }
8811 break;
8812
8813 case MO_CLOBBER:
8814 {
8815 rtx loc = mo->u.loc;
8816
8817 if (REG_P (loc))
8818 var_reg_delete (set, loc, true);
8819 else
8820 var_mem_delete (set, loc, true);
8821
8822 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8823 set->vars);
8824 }
8825 break;
8826
8827 case MO_ADJUST:
8828 set->stack_adjust += mo->u.adjust;
8829 break;
8830 }
8831 }
8832 }
8833
8834 /* Emit notes for the whole function. */
8835
8836 static void
8837 vt_emit_notes (void)
8838 {
8839 basic_block bb;
8840 dataflow_set cur;
8841
8842 gcc_assert (!htab_elements (changed_variables));
8843
8844 /* Free memory occupied by the out hash tables, as they aren't used
8845 anymore. */
8846 FOR_EACH_BB (bb)
8847 dataflow_set_clear (&VTI (bb)->out);
8848
8849 /* Enable emitting notes by functions (mainly by set_variable_part and
8850 delete_variable_part). */
8851 emit_notes = true;
8852
8853 if (MAY_HAVE_DEBUG_INSNS)
8854 dropped_values = htab_create (cselib_get_next_uid () * 2,
8855 variable_htab_hash, variable_htab_eq,
8856 variable_htab_free);
8857
8858 dataflow_set_init (&cur);
8859
8860 FOR_EACH_BB (bb)
8861 {
8862 /* Emit the notes for changes of variable locations between two
8863 subsequent basic blocks. */
8864 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
8865
8866 /* Emit the notes for the changes in the basic block itself. */
8867 emit_notes_in_bb (bb, &cur);
8868
8869 /* Free memory occupied by the in hash table, we won't need it
8870 again. */
8871 dataflow_set_clear (&VTI (bb)->in);
8872 }
8873 #ifdef ENABLE_CHECKING
8874 htab_traverse (shared_hash_htab (cur.vars),
8875 emit_notes_for_differences_1,
8876 shared_hash_htab (empty_shared_hash));
8877 #endif
8878 dataflow_set_destroy (&cur);
8879
8880 if (MAY_HAVE_DEBUG_INSNS)
8881 htab_delete (dropped_values);
8882
8883 emit_notes = false;
8884 }
8885
8886 /* If there is a declaration and offset associated with register/memory RTL
8887 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
8888
8889 static bool
8890 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
8891 {
8892 if (REG_P (rtl))
8893 {
8894 if (REG_ATTRS (rtl))
8895 {
8896 *declp = REG_EXPR (rtl);
8897 *offsetp = REG_OFFSET (rtl);
8898 return true;
8899 }
8900 }
8901 else if (MEM_P (rtl))
8902 {
8903 if (MEM_ATTRS (rtl))
8904 {
8905 *declp = MEM_EXPR (rtl);
8906 *offsetp = INT_MEM_OFFSET (rtl);
8907 return true;
8908 }
8909 }
8910 return false;
8911 }
8912
8913 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
8914 of VAL. */
8915
8916 static void
8917 record_entry_value (cselib_val *val, rtx rtl)
8918 {
8919 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
8920
8921 ENTRY_VALUE_EXP (ev) = rtl;
8922
8923 cselib_add_permanent_equiv (val, ev, get_insns ());
8924 }
8925
8926 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
8927
8928 static void
8929 vt_add_function_parameter (tree parm)
8930 {
8931 rtx decl_rtl = DECL_RTL_IF_SET (parm);
8932 rtx incoming = DECL_INCOMING_RTL (parm);
8933 tree decl;
8934 enum machine_mode mode;
8935 HOST_WIDE_INT offset;
8936 dataflow_set *out;
8937 decl_or_value dv;
8938
8939 if (TREE_CODE (parm) != PARM_DECL)
8940 return;
8941
8942 if (!decl_rtl || !incoming)
8943 return;
8944
8945 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
8946 return;
8947
8948 /* If there is a DRAP register, rewrite the incoming location of parameters
8949 passed on the stack into MEMs based on the argument pointer, as the DRAP
8950 register can be reused for other purposes and we do not track locations
8951 based on generic registers. But the prerequisite is that this argument
8952 pointer be also the virtual CFA pointer, see vt_initialize. */
8953 if (MEM_P (incoming)
8954 && stack_realign_drap
8955 && arg_pointer_rtx == cfa_base_rtx
8956 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
8957 || (GET_CODE (XEXP (incoming, 0)) == PLUS
8958 && XEXP (XEXP (incoming, 0), 0)
8959 == crtl->args.internal_arg_pointer
8960 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
8961 {
8962 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
8963 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
8964 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
8965 incoming
8966 = replace_equiv_address_nv (incoming,
8967 plus_constant (arg_pointer_rtx, off));
8968 }
8969
8970 #ifdef HAVE_window_save
8971 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
8972 If the target machine has an explicit window save instruction, the
8973 actual entry value is the corresponding OUTGOING_REGNO instead. */
8974 if (REG_P (incoming)
8975 && HARD_REGISTER_P (incoming)
8976 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
8977 {
8978 parm_reg_t *p
8979 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8980 p->incoming = incoming;
8981 incoming
8982 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
8983 OUTGOING_REGNO (REGNO (incoming)), 0);
8984 p->outgoing = incoming;
8985 }
8986 else if (MEM_P (incoming)
8987 && REG_P (XEXP (incoming, 0))
8988 && HARD_REGISTER_P (XEXP (incoming, 0)))
8989 {
8990 rtx reg = XEXP (incoming, 0);
8991 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
8992 {
8993 parm_reg_t *p
8994 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8995 p->incoming = reg;
8996 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
8997 p->outgoing = reg;
8998 incoming = replace_equiv_address_nv (incoming, reg);
8999 }
9000 }
9001 #endif
9002
9003 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9004 {
9005 if (REG_P (incoming) || MEM_P (incoming))
9006 {
9007 /* This means argument is passed by invisible reference. */
9008 offset = 0;
9009 decl = parm;
9010 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
9011 }
9012 else
9013 {
9014 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9015 return;
9016 offset += byte_lowpart_offset (GET_MODE (incoming),
9017 GET_MODE (decl_rtl));
9018 }
9019 }
9020
9021 if (!decl)
9022 return;
9023
9024 if (parm != decl)
9025 {
9026 /* Assume that DECL_RTL was a pseudo that got spilled to
9027 memory. The spill slot sharing code will force the
9028 memory to reference spill_slot_decl (%sfp), so we don't
9029 match above. That's ok, the pseudo must have referenced
9030 the entire parameter, so just reset OFFSET. */
9031 gcc_assert (decl == get_spill_slot_decl (false));
9032 offset = 0;
9033 }
9034
9035 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9036 return;
9037
9038 out = &VTI (ENTRY_BLOCK_PTR)->out;
9039
9040 dv = dv_from_decl (parm);
9041
9042 if (target_for_debug_bind (parm)
9043 /* We can't deal with these right now, because this kind of
9044 variable is single-part. ??? We could handle parallels
9045 that describe multiple locations for the same single
9046 value, but ATM we don't. */
9047 && GET_CODE (incoming) != PARALLEL)
9048 {
9049 cselib_val *val;
9050
9051 /* ??? We shouldn't ever hit this, but it may happen because
9052 arguments passed by invisible reference aren't dealt with
9053 above: incoming-rtl will have Pmode rather than the
9054 expected mode for the type. */
9055 if (offset)
9056 return;
9057
9058 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
9059 VOIDmode, get_insns ());
9060
9061 /* ??? Float-typed values in memory are not handled by
9062 cselib. */
9063 if (val)
9064 {
9065 preserve_value (val);
9066 set_variable_part (out, val->val_rtx, dv, offset,
9067 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9068 dv = dv_from_value (val->val_rtx);
9069 }
9070 }
9071
9072 if (REG_P (incoming))
9073 {
9074 incoming = var_lowpart (mode, incoming);
9075 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9076 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9077 incoming);
9078 set_variable_part (out, incoming, dv, offset,
9079 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9080 if (dv_is_value_p (dv))
9081 {
9082 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9083 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9084 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9085 {
9086 enum machine_mode indmode
9087 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9088 rtx mem = gen_rtx_MEM (indmode, incoming);
9089 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9090 VOIDmode,
9091 get_insns ());
9092 if (val)
9093 {
9094 preserve_value (val);
9095 record_entry_value (val, mem);
9096 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9097 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9098 }
9099 }
9100 }
9101 }
9102 else if (MEM_P (incoming))
9103 {
9104 incoming = var_lowpart (mode, incoming);
9105 set_variable_part (out, incoming, dv, offset,
9106 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9107 }
9108 }
9109
9110 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9111
9112 static void
9113 vt_add_function_parameters (void)
9114 {
9115 tree parm;
9116
9117 for (parm = DECL_ARGUMENTS (current_function_decl);
9118 parm; parm = DECL_CHAIN (parm))
9119 vt_add_function_parameter (parm);
9120
9121 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9122 {
9123 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9124
9125 if (TREE_CODE (vexpr) == INDIRECT_REF)
9126 vexpr = TREE_OPERAND (vexpr, 0);
9127
9128 if (TREE_CODE (vexpr) == PARM_DECL
9129 && DECL_ARTIFICIAL (vexpr)
9130 && !DECL_IGNORED_P (vexpr)
9131 && DECL_NAMELESS (vexpr))
9132 vt_add_function_parameter (vexpr);
9133 }
9134 }
9135
9136 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
9137
9138 static bool
9139 fp_setter (rtx insn)
9140 {
9141 rtx pat = PATTERN (insn);
9142 if (RTX_FRAME_RELATED_P (insn))
9143 {
9144 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
9145 if (expr)
9146 pat = XEXP (expr, 0);
9147 }
9148 if (GET_CODE (pat) == SET)
9149 return SET_DEST (pat) == hard_frame_pointer_rtx;
9150 else if (GET_CODE (pat) == PARALLEL)
9151 {
9152 int i;
9153 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9154 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
9155 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
9156 return true;
9157 }
9158 return false;
9159 }
9160
9161 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9162 ensure it isn't flushed during cselib_reset_table.
9163 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9164 has been eliminated. */
9165
9166 static void
9167 vt_init_cfa_base (void)
9168 {
9169 cselib_val *val;
9170
9171 #ifdef FRAME_POINTER_CFA_OFFSET
9172 cfa_base_rtx = frame_pointer_rtx;
9173 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9174 #else
9175 cfa_base_rtx = arg_pointer_rtx;
9176 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9177 #endif
9178 if (cfa_base_rtx == hard_frame_pointer_rtx
9179 || !fixed_regs[REGNO (cfa_base_rtx)])
9180 {
9181 cfa_base_rtx = NULL_RTX;
9182 return;
9183 }
9184 if (!MAY_HAVE_DEBUG_INSNS)
9185 return;
9186
9187 /* Tell alias analysis that cfa_base_rtx should share
9188 find_base_term value with stack pointer or hard frame pointer. */
9189 if (!frame_pointer_needed)
9190 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9191 else if (!crtl->stack_realign_tried)
9192 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9193
9194 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9195 VOIDmode, get_insns ());
9196 preserve_value (val);
9197 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9198 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
9199 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
9200 0, NULL_RTX, INSERT);
9201 }
9202
9203 /* Allocate and initialize the data structures for variable tracking
9204 and parse the RTL to get the micro operations. */
9205
9206 static bool
9207 vt_initialize (void)
9208 {
9209 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
9210 HOST_WIDE_INT fp_cfa_offset = -1;
9211
9212 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9213
9214 attrs_pool = create_alloc_pool ("attrs_def pool",
9215 sizeof (struct attrs_def), 1024);
9216 var_pool = create_alloc_pool ("variable_def pool",
9217 sizeof (struct variable_def)
9218 + (MAX_VAR_PARTS - 1)
9219 * sizeof (((variable)NULL)->var_part[0]), 64);
9220 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9221 sizeof (struct location_chain_def),
9222 1024);
9223 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9224 sizeof (struct shared_hash_def), 256);
9225 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9226 empty_shared_hash->refcount = 1;
9227 empty_shared_hash->htab
9228 = htab_create (1, variable_htab_hash, variable_htab_eq,
9229 variable_htab_free);
9230 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
9231 variable_htab_free);
9232
9233 /* Init the IN and OUT sets. */
9234 FOR_ALL_BB (bb)
9235 {
9236 VTI (bb)->visited = false;
9237 VTI (bb)->flooded = false;
9238 dataflow_set_init (&VTI (bb)->in);
9239 dataflow_set_init (&VTI (bb)->out);
9240 VTI (bb)->permp = NULL;
9241 }
9242
9243 if (MAY_HAVE_DEBUG_INSNS)
9244 {
9245 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9246 scratch_regs = BITMAP_ALLOC (NULL);
9247 valvar_pool = create_alloc_pool ("small variable_def pool",
9248 sizeof (struct variable_def), 256);
9249 preserved_values = VEC_alloc (rtx, heap, 256);
9250 }
9251 else
9252 {
9253 scratch_regs = NULL;
9254 valvar_pool = NULL;
9255 }
9256
9257 /* In order to factor out the adjustments made to the stack pointer or to
9258 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9259 instead of individual location lists, we're going to rewrite MEMs based
9260 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9261 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9262 resp. arg_pointer_rtx. We can do this either when there is no frame
9263 pointer in the function and stack adjustments are consistent for all
9264 basic blocks or when there is a frame pointer and no stack realignment.
9265 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9266 has been eliminated. */
9267 if (!frame_pointer_needed)
9268 {
9269 rtx reg, elim;
9270
9271 if (!vt_stack_adjustments ())
9272 return false;
9273
9274 #ifdef FRAME_POINTER_CFA_OFFSET
9275 reg = frame_pointer_rtx;
9276 #else
9277 reg = arg_pointer_rtx;
9278 #endif
9279 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9280 if (elim != reg)
9281 {
9282 if (GET_CODE (elim) == PLUS)
9283 elim = XEXP (elim, 0);
9284 if (elim == stack_pointer_rtx)
9285 vt_init_cfa_base ();
9286 }
9287 }
9288 else if (!crtl->stack_realign_tried)
9289 {
9290 rtx reg, elim;
9291
9292 #ifdef FRAME_POINTER_CFA_OFFSET
9293 reg = frame_pointer_rtx;
9294 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9295 #else
9296 reg = arg_pointer_rtx;
9297 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9298 #endif
9299 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9300 if (elim != reg)
9301 {
9302 if (GET_CODE (elim) == PLUS)
9303 {
9304 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9305 elim = XEXP (elim, 0);
9306 }
9307 if (elim != hard_frame_pointer_rtx)
9308 fp_cfa_offset = -1;
9309 }
9310 else
9311 fp_cfa_offset = -1;
9312 }
9313
9314 /* If the stack is realigned and a DRAP register is used, we're going to
9315 rewrite MEMs based on it representing incoming locations of parameters
9316 passed on the stack into MEMs based on the argument pointer. Although
9317 we aren't going to rewrite other MEMs, we still need to initialize the
9318 virtual CFA pointer in order to ensure that the argument pointer will
9319 be seen as a constant throughout the function.
9320
9321 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9322 else if (stack_realign_drap)
9323 {
9324 rtx reg, elim;
9325
9326 #ifdef FRAME_POINTER_CFA_OFFSET
9327 reg = frame_pointer_rtx;
9328 #else
9329 reg = arg_pointer_rtx;
9330 #endif
9331 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9332 if (elim != reg)
9333 {
9334 if (GET_CODE (elim) == PLUS)
9335 elim = XEXP (elim, 0);
9336 if (elim == hard_frame_pointer_rtx)
9337 vt_init_cfa_base ();
9338 }
9339 }
9340
9341 hard_frame_pointer_adjustment = -1;
9342
9343 vt_add_function_parameters ();
9344
9345 FOR_EACH_BB (bb)
9346 {
9347 rtx insn;
9348 HOST_WIDE_INT pre, post = 0;
9349 basic_block first_bb, last_bb;
9350
9351 if (MAY_HAVE_DEBUG_INSNS)
9352 {
9353 cselib_record_sets_hook = add_with_sets;
9354 if (dump_file && (dump_flags & TDF_DETAILS))
9355 fprintf (dump_file, "first value: %i\n",
9356 cselib_get_next_uid ());
9357 }
9358
9359 first_bb = bb;
9360 for (;;)
9361 {
9362 edge e;
9363 if (bb->next_bb == EXIT_BLOCK_PTR
9364 || ! single_pred_p (bb->next_bb))
9365 break;
9366 e = find_edge (bb, bb->next_bb);
9367 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9368 break;
9369 bb = bb->next_bb;
9370 }
9371 last_bb = bb;
9372
9373 /* Add the micro-operations to the vector. */
9374 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9375 {
9376 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9377 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9378 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9379 insn = NEXT_INSN (insn))
9380 {
9381 if (INSN_P (insn))
9382 {
9383 if (!frame_pointer_needed)
9384 {
9385 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9386 if (pre)
9387 {
9388 micro_operation mo;
9389 mo.type = MO_ADJUST;
9390 mo.u.adjust = pre;
9391 mo.insn = insn;
9392 if (dump_file && (dump_flags & TDF_DETAILS))
9393 log_op_type (PATTERN (insn), bb, insn,
9394 MO_ADJUST, dump_file);
9395 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9396 &mo);
9397 VTI (bb)->out.stack_adjust += pre;
9398 }
9399 }
9400
9401 cselib_hook_called = false;
9402 adjust_insn (bb, insn);
9403 if (MAY_HAVE_DEBUG_INSNS)
9404 {
9405 if (CALL_P (insn))
9406 prepare_call_arguments (bb, insn);
9407 cselib_process_insn (insn);
9408 if (dump_file && (dump_flags & TDF_DETAILS))
9409 {
9410 print_rtl_single (dump_file, insn);
9411 dump_cselib_table (dump_file);
9412 }
9413 }
9414 if (!cselib_hook_called)
9415 add_with_sets (insn, 0, 0);
9416 cancel_changes (0);
9417
9418 if (!frame_pointer_needed && post)
9419 {
9420 micro_operation mo;
9421 mo.type = MO_ADJUST;
9422 mo.u.adjust = post;
9423 mo.insn = insn;
9424 if (dump_file && (dump_flags & TDF_DETAILS))
9425 log_op_type (PATTERN (insn), bb, insn,
9426 MO_ADJUST, dump_file);
9427 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9428 &mo);
9429 VTI (bb)->out.stack_adjust += post;
9430 }
9431
9432 if (bb == prologue_bb
9433 && fp_cfa_offset != -1
9434 && hard_frame_pointer_adjustment == -1
9435 && RTX_FRAME_RELATED_P (insn)
9436 && fp_setter (insn))
9437 {
9438 vt_init_cfa_base ();
9439 hard_frame_pointer_adjustment = fp_cfa_offset;
9440 }
9441 }
9442 }
9443 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9444 }
9445
9446 bb = last_bb;
9447
9448 if (MAY_HAVE_DEBUG_INSNS)
9449 {
9450 cselib_preserve_only_values ();
9451 cselib_reset_table (cselib_get_next_uid ());
9452 cselib_record_sets_hook = NULL;
9453 }
9454 }
9455
9456 hard_frame_pointer_adjustment = -1;
9457 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9458 cfa_base_rtx = NULL_RTX;
9459 return true;
9460 }
9461
9462 /* This is *not* reset after each function. It gives each
9463 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
9464 a unique label number. */
9465
9466 static int debug_label_num = 1;
9467
9468 /* Get rid of all debug insns from the insn stream. */
9469
9470 static void
9471 delete_debug_insns (void)
9472 {
9473 basic_block bb;
9474 rtx insn, next;
9475
9476 if (!MAY_HAVE_DEBUG_INSNS)
9477 return;
9478
9479 FOR_EACH_BB (bb)
9480 {
9481 FOR_BB_INSNS_SAFE (bb, insn, next)
9482 if (DEBUG_INSN_P (insn))
9483 {
9484 tree decl = INSN_VAR_LOCATION_DECL (insn);
9485 if (TREE_CODE (decl) == LABEL_DECL
9486 && DECL_NAME (decl)
9487 && !DECL_RTL_SET_P (decl))
9488 {
9489 PUT_CODE (insn, NOTE);
9490 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
9491 NOTE_DELETED_LABEL_NAME (insn)
9492 = IDENTIFIER_POINTER (DECL_NAME (decl));
9493 SET_DECL_RTL (decl, insn);
9494 CODE_LABEL_NUMBER (insn) = debug_label_num++;
9495 }
9496 else
9497 delete_insn (insn);
9498 }
9499 }
9500 }
9501
9502 /* Run a fast, BB-local only version of var tracking, to take care of
9503 information that we don't do global analysis on, such that not all
9504 information is lost. If SKIPPED holds, we're skipping the global
9505 pass entirely, so we should try to use information it would have
9506 handled as well.. */
9507
9508 static void
9509 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9510 {
9511 /* ??? Just skip it all for now. */
9512 delete_debug_insns ();
9513 }
9514
9515 /* Free the data structures needed for variable tracking. */
9516
9517 static void
9518 vt_finalize (void)
9519 {
9520 basic_block bb;
9521
9522 FOR_EACH_BB (bb)
9523 {
9524 VEC_free (micro_operation, heap, VTI (bb)->mos);
9525 }
9526
9527 FOR_ALL_BB (bb)
9528 {
9529 dataflow_set_destroy (&VTI (bb)->in);
9530 dataflow_set_destroy (&VTI (bb)->out);
9531 if (VTI (bb)->permp)
9532 {
9533 dataflow_set_destroy (VTI (bb)->permp);
9534 XDELETE (VTI (bb)->permp);
9535 }
9536 }
9537 free_aux_for_blocks ();
9538 htab_delete (empty_shared_hash->htab);
9539 htab_delete (changed_variables);
9540 free_alloc_pool (attrs_pool);
9541 free_alloc_pool (var_pool);
9542 free_alloc_pool (loc_chain_pool);
9543 free_alloc_pool (shared_hash_pool);
9544
9545 if (MAY_HAVE_DEBUG_INSNS)
9546 {
9547 free_alloc_pool (valvar_pool);
9548 VEC_free (rtx, heap, preserved_values);
9549 cselib_finish ();
9550 BITMAP_FREE (scratch_regs);
9551 scratch_regs = NULL;
9552 }
9553
9554 #ifdef HAVE_window_save
9555 VEC_free (parm_reg_t, gc, windowed_parm_regs);
9556 #endif
9557
9558 if (vui_vec)
9559 XDELETEVEC (vui_vec);
9560 vui_vec = NULL;
9561 vui_allocated = 0;
9562 }
9563
9564 /* The entry point to variable tracking pass. */
9565
9566 static inline unsigned int
9567 variable_tracking_main_1 (void)
9568 {
9569 bool success;
9570
9571 if (flag_var_tracking_assignments < 0)
9572 {
9573 delete_debug_insns ();
9574 return 0;
9575 }
9576
9577 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9578 {
9579 vt_debug_insns_local (true);
9580 return 0;
9581 }
9582
9583 mark_dfs_back_edges ();
9584 if (!vt_initialize ())
9585 {
9586 vt_finalize ();
9587 vt_debug_insns_local (true);
9588 return 0;
9589 }
9590
9591 success = vt_find_locations ();
9592
9593 if (!success && flag_var_tracking_assignments > 0)
9594 {
9595 vt_finalize ();
9596
9597 delete_debug_insns ();
9598
9599 /* This is later restored by our caller. */
9600 flag_var_tracking_assignments = 0;
9601
9602 success = vt_initialize ();
9603 gcc_assert (success);
9604
9605 success = vt_find_locations ();
9606 }
9607
9608 if (!success)
9609 {
9610 vt_finalize ();
9611 vt_debug_insns_local (false);
9612 return 0;
9613 }
9614
9615 if (dump_file && (dump_flags & TDF_DETAILS))
9616 {
9617 dump_dataflow_sets ();
9618 dump_flow_info (dump_file, dump_flags);
9619 }
9620
9621 timevar_push (TV_VAR_TRACKING_EMIT);
9622 vt_emit_notes ();
9623 timevar_pop (TV_VAR_TRACKING_EMIT);
9624
9625 vt_finalize ();
9626 vt_debug_insns_local (false);
9627 return 0;
9628 }
9629
9630 unsigned int
9631 variable_tracking_main (void)
9632 {
9633 unsigned int ret;
9634 int save = flag_var_tracking_assignments;
9635
9636 ret = variable_tracking_main_1 ();
9637
9638 flag_var_tracking_assignments = save;
9639
9640 return ret;
9641 }
9642 \f
9643 static bool
9644 gate_handle_var_tracking (void)
9645 {
9646 return (flag_var_tracking && !targetm.delay_vartrack);
9647 }
9648
9649
9650
9651 struct rtl_opt_pass pass_variable_tracking =
9652 {
9653 {
9654 RTL_PASS,
9655 "vartrack", /* name */
9656 gate_handle_var_tracking, /* gate */
9657 variable_tracking_main, /* execute */
9658 NULL, /* sub */
9659 NULL, /* next */
9660 0, /* static_pass_number */
9661 TV_VAR_TRACKING, /* tv_id */
9662 0, /* properties_required */
9663 0, /* properties_provided */
9664 0, /* properties_destroyed */
9665 0, /* todo_flags_start */
9666 TODO_verify_rtl_sharing /* todo_flags_finish */
9667 }
9668 };
This page took 0.453116 seconds and 6 git commands to generate.