]> gcc.gnu.org Git - gcc.git/blob - gcc/var-tracking.c
e1fed62830937dfc73a3959deb60b7d352abfb3a
[gcc.git] / gcc / var-tracking.c
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
28
29 How does the variable tracking pass work?
30
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
38
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
45
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
55
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
61
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
71
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
79
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
86
87 */
88
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "tm_p.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
98 #include "flags.h"
99 #include "insn-config.h"
100 #include "reload.h"
101 #include "sbitmap.h"
102 #include "alloc-pool.h"
103 #include "fibheap.h"
104 #include "hashtab.h"
105 #include "regs.h"
106 #include "expr.h"
107 #include "timevar.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
110 #include "cselib.h"
111 #include "target.h"
112 #include "params.h"
113 #include "diagnostic.h"
114 #include "tree-pretty-print.h"
115 #include "pointer-set.h"
116 #include "recog.h"
117 #include "tm_p.h"
118 #include "alias.h"
119
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
126
127 /* Type of micro operation. */
128 enum micro_operation_type
129 {
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
142
143 };
144
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
157 };
158
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
162 enum emit_note_where
163 {
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
167 };
168
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
171 {
172 /* Type of micro operation. */
173 enum micro_operation_type type;
174
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
181
182 union {
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
189 rtx loc;
190
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
194 } micro_operation;
195
196 DEF_VEC_O(micro_operation);
197 DEF_VEC_ALLOC_O(micro_operation,heap);
198
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
202
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
206 {
207 /* The instruction which the note will be emitted before/after. */
208 rtx insn;
209
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
212
213 /* The variables and values active at this point. */
214 htab_t vars;
215 } emit_note_data;
216
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
222 {
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
225
226 /* The rtx of register. */
227 rtx loc;
228
229 /* The declaration corresponding to LOC. */
230 decl_or_value dv;
231
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
234 } *attrs;
235
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
239 {
240 /* Reference count. */
241 int refcount;
242
243 /* Actual hash table. */
244 htab_t htab;
245 } *shared_hash;
246
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
249 {
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
252
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
255
256 /* Variable locations. */
257 shared_hash vars;
258
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
261 } dataflow_set;
262
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
266 {
267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
269
270 /* The IN and OUT set for dataflow analysis. */
271 dataflow_set in;
272 dataflow_set out;
273
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
278 dataflow_set *permp;
279
280 /* Has the block been visited in DFS? */
281 bool visited;
282
283 /* Has the block been flooded in VTA? */
284 bool flooded;
285
286 } *variable_tracking_info;
287
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
290 {
291 /* Next element in the chain. */
292 struct location_chain_def *next;
293
294 /* The location (REG, MEM or VALUE). */
295 rtx loc;
296
297 /* The "value" stored in this location. */
298 rtx set_src;
299
300 /* Initialized? */
301 enum var_init_status init;
302 } *location_chain;
303
304 /* A vector of loc_exp_dep holds the active dependencies of a one-part
305 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
306 location of DV. Each entry is also part of VALUE' s linked-list of
307 backlinks back to DV. */
308 typedef struct loc_exp_dep_s
309 {
310 /* The dependent DV. */
311 decl_or_value dv;
312 /* The dependency VALUE or DECL_DEBUG. */
313 rtx value;
314 /* The next entry in VALUE's backlinks list. */
315 struct loc_exp_dep_s *next;
316 /* A pointer to the pointer to this entry (head or prev's next) in
317 the doubly-linked list. */
318 struct loc_exp_dep_s **pprev;
319 } loc_exp_dep;
320
321 DEF_VEC_O (loc_exp_dep);
322
323 /* This data structure holds information about the depth of a variable
324 expansion. */
325 typedef struct expand_depth_struct
326 {
327 /* This measures the complexity of the expanded expression. It
328 grows by one for each level of expansion that adds more than one
329 operand. */
330 int complexity;
331 /* This counts the number of ENTRY_VALUE expressions in an
332 expansion. We want to minimize their use. */
333 int entryvals;
334 } expand_depth;
335
336 /* This data structure is allocated for one-part variables at the time
337 of emitting notes. */
338 struct onepart_aux
339 {
340 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
341 computation used the expansion of this variable, and that ought
342 to be notified should this variable change. If the DV's cur_loc
343 expanded to NULL, all components of the loc list are regarded as
344 active, so that any changes in them give us a chance to get a
345 location. Otherwise, only components of the loc that expanded to
346 non-NULL are regarded as active dependencies. */
347 loc_exp_dep *backlinks;
348 /* This holds the LOC that was expanded into cur_loc. We need only
349 mark a one-part variable as changed if the FROM loc is removed,
350 or if it has no known location and a loc is added, or if it gets
351 a change notification from any of its active dependencies. */
352 rtx from;
353 /* The depth of the cur_loc expression. */
354 expand_depth depth;
355 /* Dependencies actively used when expand FROM into cur_loc. */
356 VEC (loc_exp_dep, none) deps;
357 };
358
359 /* Structure describing one part of variable. */
360 typedef struct variable_part_def
361 {
362 /* Chain of locations of the part. */
363 location_chain loc_chain;
364
365 /* Location which was last emitted to location list. */
366 rtx cur_loc;
367
368 union variable_aux
369 {
370 /* The offset in the variable, if !var->onepart. */
371 HOST_WIDE_INT offset;
372
373 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
374 struct onepart_aux *onepaux;
375 } aux;
376 } variable_part;
377
378 /* Maximum number of location parts. */
379 #define MAX_VAR_PARTS 16
380
381 /* Enumeration type used to discriminate various types of one-part
382 variables. */
383 typedef enum onepart_enum
384 {
385 /* Not a one-part variable. */
386 NOT_ONEPART = 0,
387 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
388 ONEPART_VDECL = 1,
389 /* A DEBUG_EXPR_DECL. */
390 ONEPART_DEXPR = 2,
391 /* A VALUE. */
392 ONEPART_VALUE = 3
393 } onepart_enum_t;
394
395 /* Structure describing where the variable is located. */
396 typedef struct variable_def
397 {
398 /* The declaration of the variable, or an RTL value being handled
399 like a declaration. */
400 decl_or_value dv;
401
402 /* Reference count. */
403 int refcount;
404
405 /* Number of variable parts. */
406 char n_var_parts;
407
408 /* What type of DV this is, according to enum onepart_enum. */
409 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
410
411 /* True if this variable_def struct is currently in the
412 changed_variables hash table. */
413 bool in_changed_variables;
414
415 /* The variable parts. */
416 variable_part var_part[1];
417 } *variable;
418 typedef const struct variable_def *const_variable;
419
420 /* Pointer to the BB's information specific to variable tracking pass. */
421 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
422
423 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
424 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
425
426 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
427
428 /* Access VAR's Ith part's offset, checking that it's not a one-part
429 variable. */
430 #define VAR_PART_OFFSET(var, i) __extension__ \
431 (*({ variable const __v = (var); \
432 gcc_checking_assert (!__v->onepart); \
433 &__v->var_part[(i)].aux.offset; }))
434
435 /* Access VAR's one-part auxiliary data, checking that it is a
436 one-part variable. */
437 #define VAR_LOC_1PAUX(var) __extension__ \
438 (*({ variable const __v = (var); \
439 gcc_checking_assert (__v->onepart); \
440 &__v->var_part[0].aux.onepaux; }))
441
442 #else
443 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
444 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
445 #endif
446
447 /* These are accessor macros for the one-part auxiliary data. When
448 convenient for users, they're guarded by tests that the data was
449 allocated. */
450 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
451 ? VAR_LOC_1PAUX (var)->backlinks \
452 : NULL)
453 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
454 ? &VAR_LOC_1PAUX (var)->backlinks \
455 : NULL)
456 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
457 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
458 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
459 ? &VAR_LOC_1PAUX (var)->deps \
460 : NULL)
461
462 /* Alloc pool for struct attrs_def. */
463 static alloc_pool attrs_pool;
464
465 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
466 static alloc_pool var_pool;
467
468 /* Alloc pool for struct variable_def with a single var_part entry. */
469 static alloc_pool valvar_pool;
470
471 /* Alloc pool for struct location_chain_def. */
472 static alloc_pool loc_chain_pool;
473
474 /* Alloc pool for struct shared_hash_def. */
475 static alloc_pool shared_hash_pool;
476
477 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
478 static alloc_pool loc_exp_dep_pool;
479
480 /* Changed variables, notes will be emitted for them. */
481 static htab_t changed_variables;
482
483 /* Shall notes be emitted? */
484 static bool emit_notes;
485
486 /* Values whose dynamic location lists have gone empty, but whose
487 cselib location lists are still usable. Use this to hold the
488 current location, the backlinks, etc, during emit_notes. */
489 static htab_t dropped_values;
490
491 /* Empty shared hashtable. */
492 static shared_hash empty_shared_hash;
493
494 /* Scratch register bitmap used by cselib_expand_value_rtx. */
495 static bitmap scratch_regs = NULL;
496
497 #ifdef HAVE_window_save
498 typedef struct GTY(()) parm_reg {
499 rtx outgoing;
500 rtx incoming;
501 } parm_reg_t;
502
503 DEF_VEC_O(parm_reg_t);
504 DEF_VEC_ALLOC_O(parm_reg_t, gc);
505
506 /* Vector of windowed parameter registers, if any. */
507 static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
508 #endif
509
510 /* Variable used to tell whether cselib_process_insn called our hook. */
511 static bool cselib_hook_called;
512
513 /* Local function prototypes. */
514 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
515 HOST_WIDE_INT *);
516 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
517 HOST_WIDE_INT *);
518 static bool vt_stack_adjustments (void);
519 static hashval_t variable_htab_hash (const void *);
520 static int variable_htab_eq (const void *, const void *);
521 static void variable_htab_free (void *);
522
523 static void init_attrs_list_set (attrs *);
524 static void attrs_list_clear (attrs *);
525 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
526 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
527 static void attrs_list_copy (attrs *, attrs);
528 static void attrs_list_union (attrs *, attrs);
529
530 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
531 enum var_init_status);
532 static void vars_copy (htab_t, htab_t);
533 static tree var_debug_decl (tree);
534 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
535 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
536 enum var_init_status, rtx);
537 static void var_reg_delete (dataflow_set *, rtx, bool);
538 static void var_regno_delete (dataflow_set *, int);
539 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
540 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
541 enum var_init_status, rtx);
542 static void var_mem_delete (dataflow_set *, rtx, bool);
543
544 static void dataflow_set_init (dataflow_set *);
545 static void dataflow_set_clear (dataflow_set *);
546 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
547 static int variable_union_info_cmp_pos (const void *, const void *);
548 static void dataflow_set_union (dataflow_set *, dataflow_set *);
549 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
550 static bool canon_value_cmp (rtx, rtx);
551 static int loc_cmp (rtx, rtx);
552 static bool variable_part_different_p (variable_part *, variable_part *);
553 static bool onepart_variable_different_p (variable, variable);
554 static bool variable_different_p (variable, variable);
555 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
556 static void dataflow_set_destroy (dataflow_set *);
557
558 static bool contains_symbol_ref (rtx);
559 static bool track_expr_p (tree, bool);
560 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
561 static int add_uses (rtx *, void *);
562 static void add_uses_1 (rtx *, void *);
563 static void add_stores (rtx, const_rtx, void *);
564 static bool compute_bb_dataflow (basic_block);
565 static bool vt_find_locations (void);
566
567 static void dump_attrs_list (attrs);
568 static int dump_var_slot (void **, void *);
569 static void dump_var (variable);
570 static void dump_vars (htab_t);
571 static void dump_dataflow_set (dataflow_set *);
572 static void dump_dataflow_sets (void);
573
574 static void set_dv_changed (decl_or_value, bool);
575 static void variable_was_changed (variable, dataflow_set *);
576 static void **set_slot_part (dataflow_set *, rtx, void **,
577 decl_or_value, HOST_WIDE_INT,
578 enum var_init_status, rtx);
579 static void set_variable_part (dataflow_set *, rtx,
580 decl_or_value, HOST_WIDE_INT,
581 enum var_init_status, rtx, enum insert_option);
582 static void **clobber_slot_part (dataflow_set *, rtx,
583 void **, HOST_WIDE_INT, rtx);
584 static void clobber_variable_part (dataflow_set *, rtx,
585 decl_or_value, HOST_WIDE_INT, rtx);
586 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
587 static void delete_variable_part (dataflow_set *, rtx,
588 decl_or_value, HOST_WIDE_INT);
589 static int emit_note_insn_var_location (void **, void *);
590 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
591 static int emit_notes_for_differences_1 (void **, void *);
592 static int emit_notes_for_differences_2 (void **, void *);
593 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
594 static void emit_notes_in_bb (basic_block, dataflow_set *);
595 static void vt_emit_notes (void);
596
597 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
598 static void vt_add_function_parameters (void);
599 static bool vt_initialize (void);
600 static void vt_finalize (void);
601
602 /* Given a SET, calculate the amount of stack adjustment it contains
603 PRE- and POST-modifying stack pointer.
604 This function is similar to stack_adjust_offset. */
605
606 static void
607 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
608 HOST_WIDE_INT *post)
609 {
610 rtx src = SET_SRC (pattern);
611 rtx dest = SET_DEST (pattern);
612 enum rtx_code code;
613
614 if (dest == stack_pointer_rtx)
615 {
616 /* (set (reg sp) (plus (reg sp) (const_int))) */
617 code = GET_CODE (src);
618 if (! (code == PLUS || code == MINUS)
619 || XEXP (src, 0) != stack_pointer_rtx
620 || !CONST_INT_P (XEXP (src, 1)))
621 return;
622
623 if (code == MINUS)
624 *post += INTVAL (XEXP (src, 1));
625 else
626 *post -= INTVAL (XEXP (src, 1));
627 }
628 else if (MEM_P (dest))
629 {
630 /* (set (mem (pre_dec (reg sp))) (foo)) */
631 src = XEXP (dest, 0);
632 code = GET_CODE (src);
633
634 switch (code)
635 {
636 case PRE_MODIFY:
637 case POST_MODIFY:
638 if (XEXP (src, 0) == stack_pointer_rtx)
639 {
640 rtx val = XEXP (XEXP (src, 1), 1);
641 /* We handle only adjustments by constant amount. */
642 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
643 CONST_INT_P (val));
644
645 if (code == PRE_MODIFY)
646 *pre -= INTVAL (val);
647 else
648 *post -= INTVAL (val);
649 break;
650 }
651 return;
652
653 case PRE_DEC:
654 if (XEXP (src, 0) == stack_pointer_rtx)
655 {
656 *pre += GET_MODE_SIZE (GET_MODE (dest));
657 break;
658 }
659 return;
660
661 case POST_DEC:
662 if (XEXP (src, 0) == stack_pointer_rtx)
663 {
664 *post += GET_MODE_SIZE (GET_MODE (dest));
665 break;
666 }
667 return;
668
669 case PRE_INC:
670 if (XEXP (src, 0) == stack_pointer_rtx)
671 {
672 *pre -= GET_MODE_SIZE (GET_MODE (dest));
673 break;
674 }
675 return;
676
677 case POST_INC:
678 if (XEXP (src, 0) == stack_pointer_rtx)
679 {
680 *post -= GET_MODE_SIZE (GET_MODE (dest));
681 break;
682 }
683 return;
684
685 default:
686 return;
687 }
688 }
689 }
690
691 /* Given an INSN, calculate the amount of stack adjustment it contains
692 PRE- and POST-modifying stack pointer. */
693
694 static void
695 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
696 HOST_WIDE_INT *post)
697 {
698 rtx pattern;
699
700 *pre = 0;
701 *post = 0;
702
703 pattern = PATTERN (insn);
704 if (RTX_FRAME_RELATED_P (insn))
705 {
706 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
707 if (expr)
708 pattern = XEXP (expr, 0);
709 }
710
711 if (GET_CODE (pattern) == SET)
712 stack_adjust_offset_pre_post (pattern, pre, post);
713 else if (GET_CODE (pattern) == PARALLEL
714 || GET_CODE (pattern) == SEQUENCE)
715 {
716 int i;
717
718 /* There may be stack adjustments inside compound insns. Search
719 for them. */
720 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
721 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
722 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
723 }
724 }
725
726 /* Compute stack adjustments for all blocks by traversing DFS tree.
727 Return true when the adjustments on all incoming edges are consistent.
728 Heavily borrowed from pre_and_rev_post_order_compute. */
729
730 static bool
731 vt_stack_adjustments (void)
732 {
733 edge_iterator *stack;
734 int sp;
735
736 /* Initialize entry block. */
737 VTI (ENTRY_BLOCK_PTR)->visited = true;
738 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
739 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
740
741 /* Allocate stack for back-tracking up CFG. */
742 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
743 sp = 0;
744
745 /* Push the first edge on to the stack. */
746 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
747
748 while (sp)
749 {
750 edge_iterator ei;
751 basic_block src;
752 basic_block dest;
753
754 /* Look at the edge on the top of the stack. */
755 ei = stack[sp - 1];
756 src = ei_edge (ei)->src;
757 dest = ei_edge (ei)->dest;
758
759 /* Check if the edge destination has been visited yet. */
760 if (!VTI (dest)->visited)
761 {
762 rtx insn;
763 HOST_WIDE_INT pre, post, offset;
764 VTI (dest)->visited = true;
765 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
766
767 if (dest != EXIT_BLOCK_PTR)
768 for (insn = BB_HEAD (dest);
769 insn != NEXT_INSN (BB_END (dest));
770 insn = NEXT_INSN (insn))
771 if (INSN_P (insn))
772 {
773 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
774 offset += pre + post;
775 }
776
777 VTI (dest)->out.stack_adjust = offset;
778
779 if (EDGE_COUNT (dest->succs) > 0)
780 /* Since the DEST node has been visited for the first
781 time, check its successors. */
782 stack[sp++] = ei_start (dest->succs);
783 }
784 else
785 {
786 /* Check whether the adjustments on the edges are the same. */
787 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
788 {
789 free (stack);
790 return false;
791 }
792
793 if (! ei_one_before_end_p (ei))
794 /* Go to the next edge. */
795 ei_next (&stack[sp - 1]);
796 else
797 /* Return to previous level if there are no more edges. */
798 sp--;
799 }
800 }
801
802 free (stack);
803 return true;
804 }
805
806 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
807 hard_frame_pointer_rtx is being mapped to it and offset for it. */
808 static rtx cfa_base_rtx;
809 static HOST_WIDE_INT cfa_base_offset;
810
811 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
812 or hard_frame_pointer_rtx. */
813
814 static inline rtx
815 compute_cfa_pointer (HOST_WIDE_INT adjustment)
816 {
817 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
818 }
819
820 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
821 or -1 if the replacement shouldn't be done. */
822 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
823
824 /* Data for adjust_mems callback. */
825
826 struct adjust_mem_data
827 {
828 bool store;
829 enum machine_mode mem_mode;
830 HOST_WIDE_INT stack_adjust;
831 rtx side_effects;
832 };
833
834 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
835 transformation of wider mode arithmetics to narrower mode,
836 -1 if it is suitable and subexpressions shouldn't be
837 traversed and 0 if it is suitable and subexpressions should
838 be traversed. Called through for_each_rtx. */
839
840 static int
841 use_narrower_mode_test (rtx *loc, void *data)
842 {
843 rtx subreg = (rtx) data;
844
845 if (CONSTANT_P (*loc))
846 return -1;
847 switch (GET_CODE (*loc))
848 {
849 case REG:
850 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
851 return 1;
852 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
853 *loc, subreg_lowpart_offset (GET_MODE (subreg),
854 GET_MODE (*loc))))
855 return 1;
856 return -1;
857 case PLUS:
858 case MINUS:
859 case MULT:
860 return 0;
861 case ASHIFT:
862 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
863 return 1;
864 else
865 return -1;
866 default:
867 return 1;
868 }
869 }
870
871 /* Transform X into narrower mode MODE from wider mode WMODE. */
872
873 static rtx
874 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
875 {
876 rtx op0, op1;
877 if (CONSTANT_P (x))
878 return lowpart_subreg (mode, x, wmode);
879 switch (GET_CODE (x))
880 {
881 case REG:
882 return lowpart_subreg (mode, x, wmode);
883 case PLUS:
884 case MINUS:
885 case MULT:
886 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
887 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
888 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
889 case ASHIFT:
890 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
891 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
892 default:
893 gcc_unreachable ();
894 }
895 }
896
897 /* Helper function for adjusting used MEMs. */
898
899 static rtx
900 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
901 {
902 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
903 rtx mem, addr = loc, tem;
904 enum machine_mode mem_mode_save;
905 bool store_save;
906 switch (GET_CODE (loc))
907 {
908 case REG:
909 /* Don't do any sp or fp replacements outside of MEM addresses
910 on the LHS. */
911 if (amd->mem_mode == VOIDmode && amd->store)
912 return loc;
913 if (loc == stack_pointer_rtx
914 && !frame_pointer_needed
915 && cfa_base_rtx)
916 return compute_cfa_pointer (amd->stack_adjust);
917 else if (loc == hard_frame_pointer_rtx
918 && frame_pointer_needed
919 && hard_frame_pointer_adjustment != -1
920 && cfa_base_rtx)
921 return compute_cfa_pointer (hard_frame_pointer_adjustment);
922 gcc_checking_assert (loc != virtual_incoming_args_rtx);
923 return loc;
924 case MEM:
925 mem = loc;
926 if (!amd->store)
927 {
928 mem = targetm.delegitimize_address (mem);
929 if (mem != loc && !MEM_P (mem))
930 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
931 }
932
933 addr = XEXP (mem, 0);
934 mem_mode_save = amd->mem_mode;
935 amd->mem_mode = GET_MODE (mem);
936 store_save = amd->store;
937 amd->store = false;
938 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
939 amd->store = store_save;
940 amd->mem_mode = mem_mode_save;
941 if (mem == loc)
942 addr = targetm.delegitimize_address (addr);
943 if (addr != XEXP (mem, 0))
944 mem = replace_equiv_address_nv (mem, addr);
945 if (!amd->store)
946 mem = avoid_constant_pool_reference (mem);
947 return mem;
948 case PRE_INC:
949 case PRE_DEC:
950 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
951 GEN_INT (GET_CODE (loc) == PRE_INC
952 ? GET_MODE_SIZE (amd->mem_mode)
953 : -GET_MODE_SIZE (amd->mem_mode)));
954 case POST_INC:
955 case POST_DEC:
956 if (addr == loc)
957 addr = XEXP (loc, 0);
958 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
959 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
960 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
961 GEN_INT ((GET_CODE (loc) == PRE_INC
962 || GET_CODE (loc) == POST_INC)
963 ? GET_MODE_SIZE (amd->mem_mode)
964 : -GET_MODE_SIZE (amd->mem_mode)));
965 amd->side_effects = alloc_EXPR_LIST (0,
966 gen_rtx_SET (VOIDmode,
967 XEXP (loc, 0),
968 tem),
969 amd->side_effects);
970 return addr;
971 case PRE_MODIFY:
972 addr = XEXP (loc, 1);
973 case POST_MODIFY:
974 if (addr == loc)
975 addr = XEXP (loc, 0);
976 gcc_assert (amd->mem_mode != VOIDmode);
977 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
978 amd->side_effects = alloc_EXPR_LIST (0,
979 gen_rtx_SET (VOIDmode,
980 XEXP (loc, 0),
981 XEXP (loc, 1)),
982 amd->side_effects);
983 return addr;
984 case SUBREG:
985 /* First try without delegitimization of whole MEMs and
986 avoid_constant_pool_reference, which is more likely to succeed. */
987 store_save = amd->store;
988 amd->store = true;
989 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
990 data);
991 amd->store = store_save;
992 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
993 if (mem == SUBREG_REG (loc))
994 {
995 tem = loc;
996 goto finish_subreg;
997 }
998 tem = simplify_gen_subreg (GET_MODE (loc), mem,
999 GET_MODE (SUBREG_REG (loc)),
1000 SUBREG_BYTE (loc));
1001 if (tem)
1002 goto finish_subreg;
1003 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1004 GET_MODE (SUBREG_REG (loc)),
1005 SUBREG_BYTE (loc));
1006 if (tem == NULL_RTX)
1007 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1008 finish_subreg:
1009 if (MAY_HAVE_DEBUG_INSNS
1010 && GET_CODE (tem) == SUBREG
1011 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1012 || GET_CODE (SUBREG_REG (tem)) == MINUS
1013 || GET_CODE (SUBREG_REG (tem)) == MULT
1014 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1015 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1017 && GET_MODE_SIZE (GET_MODE (tem))
1018 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1019 && subreg_lowpart_p (tem)
1020 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1021 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1022 GET_MODE (SUBREG_REG (tem)));
1023 return tem;
1024 case ASM_OPERANDS:
1025 /* Don't do any replacements in second and following
1026 ASM_OPERANDS of inline-asm with multiple sets.
1027 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1028 and ASM_OPERANDS_LABEL_VEC need to be equal between
1029 all the ASM_OPERANDs in the insn and adjust_insn will
1030 fix this up. */
1031 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1032 return loc;
1033 break;
1034 default:
1035 break;
1036 }
1037 return NULL_RTX;
1038 }
1039
1040 /* Helper function for replacement of uses. */
1041
1042 static void
1043 adjust_mem_uses (rtx *x, void *data)
1044 {
1045 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1046 if (new_x != *x)
1047 validate_change (NULL_RTX, x, new_x, true);
1048 }
1049
1050 /* Helper function for replacement of stores. */
1051
1052 static void
1053 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1054 {
1055 if (MEM_P (loc))
1056 {
1057 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1058 adjust_mems, data);
1059 if (new_dest != SET_DEST (expr))
1060 {
1061 rtx xexpr = CONST_CAST_RTX (expr);
1062 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1063 }
1064 }
1065 }
1066
1067 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1068 replace them with their value in the insn and add the side-effects
1069 as other sets to the insn. */
1070
1071 static void
1072 adjust_insn (basic_block bb, rtx insn)
1073 {
1074 struct adjust_mem_data amd;
1075 rtx set;
1076
1077 #ifdef HAVE_window_save
1078 /* If the target machine has an explicit window save instruction, the
1079 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1080 if (RTX_FRAME_RELATED_P (insn)
1081 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1082 {
1083 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
1084 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1085 parm_reg_t *p;
1086
1087 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
1088 {
1089 XVECEXP (rtl, 0, i * 2)
1090 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1091 /* Do not clobber the attached DECL, but only the REG. */
1092 XVECEXP (rtl, 0, i * 2 + 1)
1093 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1094 gen_raw_REG (GET_MODE (p->outgoing),
1095 REGNO (p->outgoing)));
1096 }
1097
1098 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1099 return;
1100 }
1101 #endif
1102
1103 amd.mem_mode = VOIDmode;
1104 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1105 amd.side_effects = NULL_RTX;
1106
1107 amd.store = true;
1108 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1109
1110 amd.store = false;
1111 if (GET_CODE (PATTERN (insn)) == PARALLEL
1112 && asm_noperands (PATTERN (insn)) > 0
1113 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1114 {
1115 rtx body, set0;
1116 int i;
1117
1118 /* inline-asm with multiple sets is tiny bit more complicated,
1119 because the 3 vectors in ASM_OPERANDS need to be shared between
1120 all ASM_OPERANDS in the instruction. adjust_mems will
1121 not touch ASM_OPERANDS other than the first one, asm_noperands
1122 test above needs to be called before that (otherwise it would fail)
1123 and afterwards this code fixes it up. */
1124 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1125 body = PATTERN (insn);
1126 set0 = XVECEXP (body, 0, 0);
1127 gcc_checking_assert (GET_CODE (set0) == SET
1128 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1129 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1130 for (i = 1; i < XVECLEN (body, 0); i++)
1131 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1132 break;
1133 else
1134 {
1135 set = XVECEXP (body, 0, i);
1136 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1137 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1138 == i);
1139 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1140 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1141 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1142 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1143 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1144 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1145 {
1146 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1147 ASM_OPERANDS_INPUT_VEC (newsrc)
1148 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1149 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1150 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1151 ASM_OPERANDS_LABEL_VEC (newsrc)
1152 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1153 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1154 }
1155 }
1156 }
1157 else
1158 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1159
1160 /* For read-only MEMs containing some constant, prefer those
1161 constants. */
1162 set = single_set (insn);
1163 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1164 {
1165 rtx note = find_reg_equal_equiv_note (insn);
1166
1167 if (note && CONSTANT_P (XEXP (note, 0)))
1168 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1169 }
1170
1171 if (amd.side_effects)
1172 {
1173 rtx *pat, new_pat, s;
1174 int i, oldn, newn;
1175
1176 pat = &PATTERN (insn);
1177 if (GET_CODE (*pat) == COND_EXEC)
1178 pat = &COND_EXEC_CODE (*pat);
1179 if (GET_CODE (*pat) == PARALLEL)
1180 oldn = XVECLEN (*pat, 0);
1181 else
1182 oldn = 1;
1183 for (s = amd.side_effects, newn = 0; s; newn++)
1184 s = XEXP (s, 1);
1185 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1186 if (GET_CODE (*pat) == PARALLEL)
1187 for (i = 0; i < oldn; i++)
1188 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1189 else
1190 XVECEXP (new_pat, 0, 0) = *pat;
1191 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1192 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1193 free_EXPR_LIST_list (&amd.side_effects);
1194 validate_change (NULL_RTX, pat, new_pat, true);
1195 }
1196 }
1197
1198 /* Return true if a decl_or_value DV is a DECL or NULL. */
1199 static inline bool
1200 dv_is_decl_p (decl_or_value dv)
1201 {
1202 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1203 }
1204
1205 /* Return true if a decl_or_value is a VALUE rtl. */
1206 static inline bool
1207 dv_is_value_p (decl_or_value dv)
1208 {
1209 return dv && !dv_is_decl_p (dv);
1210 }
1211
1212 /* Return the decl in the decl_or_value. */
1213 static inline tree
1214 dv_as_decl (decl_or_value dv)
1215 {
1216 gcc_checking_assert (dv_is_decl_p (dv));
1217 return (tree) dv;
1218 }
1219
1220 /* Return the value in the decl_or_value. */
1221 static inline rtx
1222 dv_as_value (decl_or_value dv)
1223 {
1224 gcc_checking_assert (dv_is_value_p (dv));
1225 return (rtx)dv;
1226 }
1227
1228 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1229 static inline rtx
1230 dv_as_rtx (decl_or_value dv)
1231 {
1232 tree decl;
1233
1234 if (dv_is_value_p (dv))
1235 return dv_as_value (dv);
1236
1237 decl = dv_as_decl (dv);
1238
1239 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1240 return DECL_RTL_KNOWN_SET (decl);
1241 }
1242
1243 /* Return the opaque pointer in the decl_or_value. */
1244 static inline void *
1245 dv_as_opaque (decl_or_value dv)
1246 {
1247 return dv;
1248 }
1249
1250 /* Return nonzero if a decl_or_value must not have more than one
1251 variable part. The returned value discriminates among various
1252 kinds of one-part DVs ccording to enum onepart_enum. */
1253 static inline onepart_enum_t
1254 dv_onepart_p (decl_or_value dv)
1255 {
1256 tree decl;
1257
1258 if (!MAY_HAVE_DEBUG_INSNS)
1259 return NOT_ONEPART;
1260
1261 if (dv_is_value_p (dv))
1262 return ONEPART_VALUE;
1263
1264 decl = dv_as_decl (dv);
1265
1266 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1267 return ONEPART_DEXPR;
1268
1269 if (target_for_debug_bind (decl) != NULL_TREE)
1270 return ONEPART_VDECL;
1271
1272 return NOT_ONEPART;
1273 }
1274
1275 /* Return the variable pool to be used for a dv of type ONEPART. */
1276 static inline alloc_pool
1277 onepart_pool (onepart_enum_t onepart)
1278 {
1279 return onepart ? valvar_pool : var_pool;
1280 }
1281
1282 /* Build a decl_or_value out of a decl. */
1283 static inline decl_or_value
1284 dv_from_decl (tree decl)
1285 {
1286 decl_or_value dv;
1287 dv = decl;
1288 gcc_checking_assert (dv_is_decl_p (dv));
1289 return dv;
1290 }
1291
1292 /* Build a decl_or_value out of a value. */
1293 static inline decl_or_value
1294 dv_from_value (rtx value)
1295 {
1296 decl_or_value dv;
1297 dv = value;
1298 gcc_checking_assert (dv_is_value_p (dv));
1299 return dv;
1300 }
1301
1302 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1303 static inline decl_or_value
1304 dv_from_rtx (rtx x)
1305 {
1306 decl_or_value dv;
1307
1308 switch (GET_CODE (x))
1309 {
1310 case DEBUG_EXPR:
1311 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1312 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1313 break;
1314
1315 case VALUE:
1316 dv = dv_from_value (x);
1317 break;
1318
1319 default:
1320 gcc_unreachable ();
1321 }
1322
1323 return dv;
1324 }
1325
1326 extern void debug_dv (decl_or_value dv);
1327
1328 DEBUG_FUNCTION void
1329 debug_dv (decl_or_value dv)
1330 {
1331 if (dv_is_value_p (dv))
1332 debug_rtx (dv_as_value (dv));
1333 else
1334 debug_generic_stmt (dv_as_decl (dv));
1335 }
1336
1337 typedef unsigned int dvuid;
1338
1339 /* Return the uid of DV. */
1340
1341 static inline dvuid
1342 dv_uid (decl_or_value dv)
1343 {
1344 if (dv_is_value_p (dv))
1345 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1346 else
1347 return DECL_UID (dv_as_decl (dv));
1348 }
1349
1350 /* Compute the hash from the uid. */
1351
1352 static inline hashval_t
1353 dv_uid2hash (dvuid uid)
1354 {
1355 return uid;
1356 }
1357
1358 /* The hash function for a mask table in a shared_htab chain. */
1359
1360 static inline hashval_t
1361 dv_htab_hash (decl_or_value dv)
1362 {
1363 return dv_uid2hash (dv_uid (dv));
1364 }
1365
1366 /* The hash function for variable_htab, computes the hash value
1367 from the declaration of variable X. */
1368
1369 static hashval_t
1370 variable_htab_hash (const void *x)
1371 {
1372 const_variable const v = (const_variable) x;
1373
1374 return dv_htab_hash (v->dv);
1375 }
1376
1377 /* Compare the declaration of variable X with declaration Y. */
1378
1379 static int
1380 variable_htab_eq (const void *x, const void *y)
1381 {
1382 const_variable const v = (const_variable) x;
1383 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1384
1385 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1386 }
1387
1388 static void loc_exp_dep_clear (variable var);
1389
1390 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1391
1392 static void
1393 variable_htab_free (void *elem)
1394 {
1395 int i;
1396 variable var = (variable) elem;
1397 location_chain node, next;
1398
1399 gcc_checking_assert (var->refcount > 0);
1400
1401 var->refcount--;
1402 if (var->refcount > 0)
1403 return;
1404
1405 for (i = 0; i < var->n_var_parts; i++)
1406 {
1407 for (node = var->var_part[i].loc_chain; node; node = next)
1408 {
1409 next = node->next;
1410 pool_free (loc_chain_pool, node);
1411 }
1412 var->var_part[i].loc_chain = NULL;
1413 }
1414 if (var->onepart && VAR_LOC_1PAUX (var))
1415 {
1416 loc_exp_dep_clear (var);
1417 if (VAR_LOC_DEP_LST (var))
1418 VAR_LOC_DEP_LST (var)->pprev = NULL;
1419 XDELETE (VAR_LOC_1PAUX (var));
1420 /* These may be reused across functions, so reset
1421 e.g. NO_LOC_P. */
1422 if (var->onepart == ONEPART_DEXPR)
1423 set_dv_changed (var->dv, true);
1424 }
1425 pool_free (onepart_pool (var->onepart), var);
1426 }
1427
1428 /* Initialize the set (array) SET of attrs to empty lists. */
1429
1430 static void
1431 init_attrs_list_set (attrs *set)
1432 {
1433 int i;
1434
1435 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1436 set[i] = NULL;
1437 }
1438
1439 /* Make the list *LISTP empty. */
1440
1441 static void
1442 attrs_list_clear (attrs *listp)
1443 {
1444 attrs list, next;
1445
1446 for (list = *listp; list; list = next)
1447 {
1448 next = list->next;
1449 pool_free (attrs_pool, list);
1450 }
1451 *listp = NULL;
1452 }
1453
1454 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1455
1456 static attrs
1457 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1458 {
1459 for (; list; list = list->next)
1460 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1461 return list;
1462 return NULL;
1463 }
1464
1465 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1466
1467 static void
1468 attrs_list_insert (attrs *listp, decl_or_value dv,
1469 HOST_WIDE_INT offset, rtx loc)
1470 {
1471 attrs list;
1472
1473 list = (attrs) pool_alloc (attrs_pool);
1474 list->loc = loc;
1475 list->dv = dv;
1476 list->offset = offset;
1477 list->next = *listp;
1478 *listp = list;
1479 }
1480
1481 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1482
1483 static void
1484 attrs_list_copy (attrs *dstp, attrs src)
1485 {
1486 attrs n;
1487
1488 attrs_list_clear (dstp);
1489 for (; src; src = src->next)
1490 {
1491 n = (attrs) pool_alloc (attrs_pool);
1492 n->loc = src->loc;
1493 n->dv = src->dv;
1494 n->offset = src->offset;
1495 n->next = *dstp;
1496 *dstp = n;
1497 }
1498 }
1499
1500 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1501
1502 static void
1503 attrs_list_union (attrs *dstp, attrs src)
1504 {
1505 for (; src; src = src->next)
1506 {
1507 if (!attrs_list_member (*dstp, src->dv, src->offset))
1508 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1509 }
1510 }
1511
1512 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1513 *DSTP. */
1514
1515 static void
1516 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1517 {
1518 gcc_assert (!*dstp);
1519 for (; src; src = src->next)
1520 {
1521 if (!dv_onepart_p (src->dv))
1522 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1523 }
1524 for (src = src2; src; src = src->next)
1525 {
1526 if (!dv_onepart_p (src->dv)
1527 && !attrs_list_member (*dstp, src->dv, src->offset))
1528 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1529 }
1530 }
1531
1532 /* Shared hashtable support. */
1533
1534 /* Return true if VARS is shared. */
1535
1536 static inline bool
1537 shared_hash_shared (shared_hash vars)
1538 {
1539 return vars->refcount > 1;
1540 }
1541
1542 /* Return the hash table for VARS. */
1543
1544 static inline htab_t
1545 shared_hash_htab (shared_hash vars)
1546 {
1547 return vars->htab;
1548 }
1549
1550 /* Return true if VAR is shared, or maybe because VARS is shared. */
1551
1552 static inline bool
1553 shared_var_p (variable var, shared_hash vars)
1554 {
1555 /* Don't count an entry in the changed_variables table as a duplicate. */
1556 return ((var->refcount > 1 + (int) var->in_changed_variables)
1557 || shared_hash_shared (vars));
1558 }
1559
1560 /* Copy variables into a new hash table. */
1561
1562 static shared_hash
1563 shared_hash_unshare (shared_hash vars)
1564 {
1565 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1566 gcc_assert (vars->refcount > 1);
1567 new_vars->refcount = 1;
1568 new_vars->htab
1569 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1570 variable_htab_eq, variable_htab_free);
1571 vars_copy (new_vars->htab, vars->htab);
1572 vars->refcount--;
1573 return new_vars;
1574 }
1575
1576 /* Increment reference counter on VARS and return it. */
1577
1578 static inline shared_hash
1579 shared_hash_copy (shared_hash vars)
1580 {
1581 vars->refcount++;
1582 return vars;
1583 }
1584
1585 /* Decrement reference counter and destroy hash table if not shared
1586 anymore. */
1587
1588 static void
1589 shared_hash_destroy (shared_hash vars)
1590 {
1591 gcc_checking_assert (vars->refcount > 0);
1592 if (--vars->refcount == 0)
1593 {
1594 htab_delete (vars->htab);
1595 pool_free (shared_hash_pool, vars);
1596 }
1597 }
1598
1599 /* Unshare *PVARS if shared and return slot for DV. If INS is
1600 INSERT, insert it if not already present. */
1601
1602 static inline void **
1603 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1604 hashval_t dvhash, enum insert_option ins)
1605 {
1606 if (shared_hash_shared (*pvars))
1607 *pvars = shared_hash_unshare (*pvars);
1608 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1609 }
1610
1611 static inline void **
1612 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1613 enum insert_option ins)
1614 {
1615 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1616 }
1617
1618 /* Return slot for DV, if it is already present in the hash table.
1619 If it is not present, insert it only VARS is not shared, otherwise
1620 return NULL. */
1621
1622 static inline void **
1623 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1624 {
1625 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1626 shared_hash_shared (vars)
1627 ? NO_INSERT : INSERT);
1628 }
1629
1630 static inline void **
1631 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1632 {
1633 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1634 }
1635
1636 /* Return slot for DV only if it is already present in the hash table. */
1637
1638 static inline void **
1639 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1640 hashval_t dvhash)
1641 {
1642 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1643 NO_INSERT);
1644 }
1645
1646 static inline void **
1647 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1648 {
1649 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1650 }
1651
1652 /* Return variable for DV or NULL if not already present in the hash
1653 table. */
1654
1655 static inline variable
1656 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1657 {
1658 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1659 }
1660
1661 static inline variable
1662 shared_hash_find (shared_hash vars, decl_or_value dv)
1663 {
1664 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1665 }
1666
1667 /* Return true if TVAL is better than CVAL as a canonival value. We
1668 choose lowest-numbered VALUEs, using the RTX address as a
1669 tie-breaker. The idea is to arrange them into a star topology,
1670 such that all of them are at most one step away from the canonical
1671 value, and the canonical value has backlinks to all of them, in
1672 addition to all the actual locations. We don't enforce this
1673 topology throughout the entire dataflow analysis, though.
1674 */
1675
1676 static inline bool
1677 canon_value_cmp (rtx tval, rtx cval)
1678 {
1679 return !cval
1680 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1681 }
1682
1683 static bool dst_can_be_shared;
1684
1685 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1686
1687 static void **
1688 unshare_variable (dataflow_set *set, void **slot, variable var,
1689 enum var_init_status initialized)
1690 {
1691 variable new_var;
1692 int i;
1693
1694 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1695 new_var->dv = var->dv;
1696 new_var->refcount = 1;
1697 var->refcount--;
1698 new_var->n_var_parts = var->n_var_parts;
1699 new_var->onepart = var->onepart;
1700 new_var->in_changed_variables = false;
1701
1702 if (! flag_var_tracking_uninit)
1703 initialized = VAR_INIT_STATUS_INITIALIZED;
1704
1705 for (i = 0; i < var->n_var_parts; i++)
1706 {
1707 location_chain node;
1708 location_chain *nextp;
1709
1710 if (i == 0 && var->onepart)
1711 {
1712 /* One-part auxiliary data is only used while emitting
1713 notes, so propagate it to the new variable in the active
1714 dataflow set. If we're not emitting notes, this will be
1715 a no-op. */
1716 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1717 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1718 VAR_LOC_1PAUX (var) = NULL;
1719 }
1720 else
1721 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1722 nextp = &new_var->var_part[i].loc_chain;
1723 for (node = var->var_part[i].loc_chain; node; node = node->next)
1724 {
1725 location_chain new_lc;
1726
1727 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1728 new_lc->next = NULL;
1729 if (node->init > initialized)
1730 new_lc->init = node->init;
1731 else
1732 new_lc->init = initialized;
1733 if (node->set_src && !(MEM_P (node->set_src)))
1734 new_lc->set_src = node->set_src;
1735 else
1736 new_lc->set_src = NULL;
1737 new_lc->loc = node->loc;
1738
1739 *nextp = new_lc;
1740 nextp = &new_lc->next;
1741 }
1742
1743 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1744 }
1745
1746 dst_can_be_shared = false;
1747 if (shared_hash_shared (set->vars))
1748 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1749 else if (set->traversed_vars && set->vars != set->traversed_vars)
1750 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1751 *slot = new_var;
1752 if (var->in_changed_variables)
1753 {
1754 void **cslot
1755 = htab_find_slot_with_hash (changed_variables, var->dv,
1756 dv_htab_hash (var->dv), NO_INSERT);
1757 gcc_assert (*cslot == (void *) var);
1758 var->in_changed_variables = false;
1759 variable_htab_free (var);
1760 *cslot = new_var;
1761 new_var->in_changed_variables = true;
1762 }
1763 return slot;
1764 }
1765
1766 /* Copy all variables from hash table SRC to hash table DST. */
1767
1768 static void
1769 vars_copy (htab_t dst, htab_t src)
1770 {
1771 htab_iterator hi;
1772 variable var;
1773
1774 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1775 {
1776 void **dstp;
1777 var->refcount++;
1778 dstp = htab_find_slot_with_hash (dst, var->dv,
1779 dv_htab_hash (var->dv),
1780 INSERT);
1781 *dstp = var;
1782 }
1783 }
1784
1785 /* Map a decl to its main debug decl. */
1786
1787 static inline tree
1788 var_debug_decl (tree decl)
1789 {
1790 if (decl && DECL_P (decl)
1791 && DECL_DEBUG_EXPR_IS_FROM (decl))
1792 {
1793 tree debugdecl = DECL_DEBUG_EXPR (decl);
1794 if (debugdecl && DECL_P (debugdecl))
1795 decl = debugdecl;
1796 }
1797
1798 return decl;
1799 }
1800
1801 /* Set the register LOC to contain DV, OFFSET. */
1802
1803 static void
1804 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1805 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1806 enum insert_option iopt)
1807 {
1808 attrs node;
1809 bool decl_p = dv_is_decl_p (dv);
1810
1811 if (decl_p)
1812 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1813
1814 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1815 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1816 && node->offset == offset)
1817 break;
1818 if (!node)
1819 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1820 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1821 }
1822
1823 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1824
1825 static void
1826 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1827 rtx set_src)
1828 {
1829 tree decl = REG_EXPR (loc);
1830 HOST_WIDE_INT offset = REG_OFFSET (loc);
1831
1832 var_reg_decl_set (set, loc, initialized,
1833 dv_from_decl (decl), offset, set_src, INSERT);
1834 }
1835
1836 static enum var_init_status
1837 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1838 {
1839 variable var;
1840 int i;
1841 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1842
1843 if (! flag_var_tracking_uninit)
1844 return VAR_INIT_STATUS_INITIALIZED;
1845
1846 var = shared_hash_find (set->vars, dv);
1847 if (var)
1848 {
1849 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1850 {
1851 location_chain nextp;
1852 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1853 if (rtx_equal_p (nextp->loc, loc))
1854 {
1855 ret_val = nextp->init;
1856 break;
1857 }
1858 }
1859 }
1860
1861 return ret_val;
1862 }
1863
1864 /* Delete current content of register LOC in dataflow set SET and set
1865 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1866 MODIFY is true, any other live copies of the same variable part are
1867 also deleted from the dataflow set, otherwise the variable part is
1868 assumed to be copied from another location holding the same
1869 part. */
1870
1871 static void
1872 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1873 enum var_init_status initialized, rtx set_src)
1874 {
1875 tree decl = REG_EXPR (loc);
1876 HOST_WIDE_INT offset = REG_OFFSET (loc);
1877 attrs node, next;
1878 attrs *nextp;
1879
1880 decl = var_debug_decl (decl);
1881
1882 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1883 initialized = get_init_value (set, loc, dv_from_decl (decl));
1884
1885 nextp = &set->regs[REGNO (loc)];
1886 for (node = *nextp; node; node = next)
1887 {
1888 next = node->next;
1889 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1890 {
1891 delete_variable_part (set, node->loc, node->dv, node->offset);
1892 pool_free (attrs_pool, node);
1893 *nextp = next;
1894 }
1895 else
1896 {
1897 node->loc = loc;
1898 nextp = &node->next;
1899 }
1900 }
1901 if (modify)
1902 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1903 var_reg_set (set, loc, initialized, set_src);
1904 }
1905
1906 /* Delete the association of register LOC in dataflow set SET with any
1907 variables that aren't onepart. If CLOBBER is true, also delete any
1908 other live copies of the same variable part, and delete the
1909 association with onepart dvs too. */
1910
1911 static void
1912 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1913 {
1914 attrs *nextp = &set->regs[REGNO (loc)];
1915 attrs node, next;
1916
1917 if (clobber)
1918 {
1919 tree decl = REG_EXPR (loc);
1920 HOST_WIDE_INT offset = REG_OFFSET (loc);
1921
1922 decl = var_debug_decl (decl);
1923
1924 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1925 }
1926
1927 for (node = *nextp; node; node = next)
1928 {
1929 next = node->next;
1930 if (clobber || !dv_onepart_p (node->dv))
1931 {
1932 delete_variable_part (set, node->loc, node->dv, node->offset);
1933 pool_free (attrs_pool, node);
1934 *nextp = next;
1935 }
1936 else
1937 nextp = &node->next;
1938 }
1939 }
1940
1941 /* Delete content of register with number REGNO in dataflow set SET. */
1942
1943 static void
1944 var_regno_delete (dataflow_set *set, int regno)
1945 {
1946 attrs *reg = &set->regs[regno];
1947 attrs node, next;
1948
1949 for (node = *reg; node; node = next)
1950 {
1951 next = node->next;
1952 delete_variable_part (set, node->loc, node->dv, node->offset);
1953 pool_free (attrs_pool, node);
1954 }
1955 *reg = NULL;
1956 }
1957
1958 /* Hold parameters for the hashtab traversal function
1959 drop_overlapping_mem_locs, see below. */
1960
1961 struct overlapping_mems
1962 {
1963 dataflow_set *set;
1964 rtx loc, addr;
1965 };
1966
1967 /* Remove all MEMs that overlap with COMS->LOC from the location list
1968 of a hash table entry for a value. COMS->ADDR must be a
1969 canonicalized form of COMS->LOC's address, and COMS->LOC must be
1970 canonicalized itself. */
1971
1972 static int
1973 drop_overlapping_mem_locs (void **slot, void *data)
1974 {
1975 struct overlapping_mems *coms = (struct overlapping_mems *)data;
1976 dataflow_set *set = coms->set;
1977 rtx mloc = coms->loc, addr = coms->addr;
1978 variable var = (variable) *slot;
1979
1980 if (var->onepart == ONEPART_VALUE)
1981 {
1982 location_chain loc, *locp;
1983 bool changed = false;
1984 rtx cur_loc;
1985
1986 gcc_assert (var->n_var_parts == 1);
1987
1988 if (shared_var_p (var, set->vars))
1989 {
1990 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
1991 if (GET_CODE (loc->loc) == MEM
1992 && canon_true_dependence (mloc, GET_MODE (mloc), addr,
1993 loc->loc, NULL))
1994 break;
1995
1996 if (!loc)
1997 return 1;
1998
1999 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2000 var = (variable)*slot;
2001 gcc_assert (var->n_var_parts == 1);
2002 }
2003
2004 if (VAR_LOC_1PAUX (var))
2005 cur_loc = VAR_LOC_FROM (var);
2006 else
2007 cur_loc = var->var_part[0].cur_loc;
2008
2009 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2010 loc; loc = *locp)
2011 {
2012 if (GET_CODE (loc->loc) != MEM
2013 || !canon_true_dependence (mloc, GET_MODE (mloc), addr,
2014 loc->loc, NULL))
2015 {
2016 locp = &loc->next;
2017 continue;
2018 }
2019
2020 *locp = loc->next;
2021 /* If we have deleted the location which was last emitted
2022 we have to emit new location so add the variable to set
2023 of changed variables. */
2024 if (cur_loc == loc->loc)
2025 {
2026 changed = true;
2027 var->var_part[0].cur_loc = NULL;
2028 if (VAR_LOC_1PAUX (var))
2029 VAR_LOC_FROM (var) = NULL;
2030 }
2031 pool_free (loc_chain_pool, loc);
2032 }
2033
2034 if (!var->var_part[0].loc_chain)
2035 {
2036 var->n_var_parts--;
2037 changed = true;
2038 }
2039 if (changed)
2040 variable_was_changed (var, set);
2041 }
2042
2043 return 1;
2044 }
2045
2046 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2047
2048 static void
2049 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2050 {
2051 struct overlapping_mems coms;
2052
2053 coms.set = set;
2054 coms.loc = canon_rtx (loc);
2055 coms.addr = canon_rtx (get_addr (XEXP (loc, 0)));
2056
2057 set->traversed_vars = set->vars;
2058 htab_traverse (shared_hash_htab (set->vars),
2059 drop_overlapping_mem_locs, &coms);
2060 set->traversed_vars = NULL;
2061 }
2062
2063 /* Set the location of DV, OFFSET as the MEM LOC. */
2064
2065 static void
2066 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2067 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2068 enum insert_option iopt)
2069 {
2070 if (dv_is_decl_p (dv))
2071 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2072
2073 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2074 }
2075
2076 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2077 SET to LOC.
2078 Adjust the address first if it is stack pointer based. */
2079
2080 static void
2081 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2082 rtx set_src)
2083 {
2084 tree decl = MEM_EXPR (loc);
2085 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2086
2087 var_mem_decl_set (set, loc, initialized,
2088 dv_from_decl (decl), offset, set_src, INSERT);
2089 }
2090
2091 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2092 dataflow set SET to LOC. If MODIFY is true, any other live copies
2093 of the same variable part are also deleted from the dataflow set,
2094 otherwise the variable part is assumed to be copied from another
2095 location holding the same part.
2096 Adjust the address first if it is stack pointer based. */
2097
2098 static void
2099 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2100 enum var_init_status initialized, rtx set_src)
2101 {
2102 tree decl = MEM_EXPR (loc);
2103 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2104
2105 clobber_overlapping_mems (set, loc);
2106 decl = var_debug_decl (decl);
2107
2108 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2109 initialized = get_init_value (set, loc, dv_from_decl (decl));
2110
2111 if (modify)
2112 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2113 var_mem_set (set, loc, initialized, set_src);
2114 }
2115
2116 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2117 true, also delete any other live copies of the same variable part.
2118 Adjust the address first if it is stack pointer based. */
2119
2120 static void
2121 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2122 {
2123 tree decl = MEM_EXPR (loc);
2124 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2125
2126 clobber_overlapping_mems (set, loc);
2127 decl = var_debug_decl (decl);
2128 if (clobber)
2129 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2130 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2131 }
2132
2133 /* Return true if LOC should not be expanded for location expressions,
2134 or used in them. */
2135
2136 static inline bool
2137 unsuitable_loc (rtx loc)
2138 {
2139 switch (GET_CODE (loc))
2140 {
2141 case PC:
2142 case SCRATCH:
2143 case CC0:
2144 case ASM_INPUT:
2145 case ASM_OPERANDS:
2146 return true;
2147
2148 default:
2149 return false;
2150 }
2151 }
2152
2153 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2154 bound to it. */
2155
2156 static inline void
2157 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2158 {
2159 if (REG_P (loc))
2160 {
2161 if (modified)
2162 var_regno_delete (set, REGNO (loc));
2163 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2164 dv_from_value (val), 0, NULL_RTX, INSERT);
2165 }
2166 else if (MEM_P (loc))
2167 {
2168 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2169
2170 if (modified)
2171 clobber_overlapping_mems (set, loc);
2172
2173 if (l && GET_CODE (l->loc) == VALUE)
2174 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2175
2176 /* If this MEM is a global constant, we don't need it in the
2177 dynamic tables. ??? We should test this before emitting the
2178 micro-op in the first place. */
2179 while (l)
2180 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2181 break;
2182 else
2183 l = l->next;
2184
2185 if (!l)
2186 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2187 dv_from_value (val), 0, NULL_RTX, INSERT);
2188 }
2189 else
2190 {
2191 /* Other kinds of equivalences are necessarily static, at least
2192 so long as we do not perform substitutions while merging
2193 expressions. */
2194 gcc_unreachable ();
2195 set_variable_part (set, loc, dv_from_value (val), 0,
2196 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2197 }
2198 }
2199
2200 /* Bind a value to a location it was just stored in. If MODIFIED
2201 holds, assume the location was modified, detaching it from any
2202 values bound to it. */
2203
2204 static void
2205 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2206 {
2207 cselib_val *v = CSELIB_VAL_PTR (val);
2208
2209 gcc_assert (cselib_preserved_value_p (v));
2210
2211 if (dump_file)
2212 {
2213 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2214 print_inline_rtx (dump_file, loc, 0);
2215 fprintf (dump_file, " evaluates to ");
2216 print_inline_rtx (dump_file, val, 0);
2217 if (v->locs)
2218 {
2219 struct elt_loc_list *l;
2220 for (l = v->locs; l; l = l->next)
2221 {
2222 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2223 print_inline_rtx (dump_file, l->loc, 0);
2224 }
2225 }
2226 fprintf (dump_file, "\n");
2227 }
2228
2229 gcc_checking_assert (!unsuitable_loc (loc));
2230
2231 val_bind (set, val, loc, modified);
2232 }
2233
2234 /* Reset this node, detaching all its equivalences. Return the slot
2235 in the variable hash table that holds dv, if there is one. */
2236
2237 static void
2238 val_reset (dataflow_set *set, decl_or_value dv)
2239 {
2240 variable var = shared_hash_find (set->vars, dv) ;
2241 location_chain node;
2242 rtx cval;
2243
2244 if (!var || !var->n_var_parts)
2245 return;
2246
2247 gcc_assert (var->n_var_parts == 1);
2248
2249 cval = NULL;
2250 for (node = var->var_part[0].loc_chain; node; node = node->next)
2251 if (GET_CODE (node->loc) == VALUE
2252 && canon_value_cmp (node->loc, cval))
2253 cval = node->loc;
2254
2255 for (node = var->var_part[0].loc_chain; node; node = node->next)
2256 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2257 {
2258 /* Redirect the equivalence link to the new canonical
2259 value, or simply remove it if it would point at
2260 itself. */
2261 if (cval)
2262 set_variable_part (set, cval, dv_from_value (node->loc),
2263 0, node->init, node->set_src, NO_INSERT);
2264 delete_variable_part (set, dv_as_value (dv),
2265 dv_from_value (node->loc), 0);
2266 }
2267
2268 if (cval)
2269 {
2270 decl_or_value cdv = dv_from_value (cval);
2271
2272 /* Keep the remaining values connected, accummulating links
2273 in the canonical value. */
2274 for (node = var->var_part[0].loc_chain; node; node = node->next)
2275 {
2276 if (node->loc == cval)
2277 continue;
2278 else if (GET_CODE (node->loc) == REG)
2279 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2280 node->set_src, NO_INSERT);
2281 else if (GET_CODE (node->loc) == MEM)
2282 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2283 node->set_src, NO_INSERT);
2284 else
2285 set_variable_part (set, node->loc, cdv, 0,
2286 node->init, node->set_src, NO_INSERT);
2287 }
2288 }
2289
2290 /* We remove this last, to make sure that the canonical value is not
2291 removed to the point of requiring reinsertion. */
2292 if (cval)
2293 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2294
2295 clobber_variable_part (set, NULL, dv, 0, NULL);
2296 }
2297
2298 /* Find the values in a given location and map the val to another
2299 value, if it is unique, or add the location as one holding the
2300 value. */
2301
2302 static void
2303 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2304 {
2305 decl_or_value dv = dv_from_value (val);
2306
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2308 {
2309 if (insn)
2310 fprintf (dump_file, "%i: ", INSN_UID (insn));
2311 else
2312 fprintf (dump_file, "head: ");
2313 print_inline_rtx (dump_file, val, 0);
2314 fputs (" is at ", dump_file);
2315 print_inline_rtx (dump_file, loc, 0);
2316 fputc ('\n', dump_file);
2317 }
2318
2319 val_reset (set, dv);
2320
2321 gcc_checking_assert (!unsuitable_loc (loc));
2322
2323 if (REG_P (loc))
2324 {
2325 attrs node, found = NULL;
2326
2327 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2328 if (dv_is_value_p (node->dv)
2329 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2330 {
2331 found = node;
2332
2333 /* Map incoming equivalences. ??? Wouldn't it be nice if
2334 we just started sharing the location lists? Maybe a
2335 circular list ending at the value itself or some
2336 such. */
2337 set_variable_part (set, dv_as_value (node->dv),
2338 dv_from_value (val), node->offset,
2339 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2340 set_variable_part (set, val, node->dv, node->offset,
2341 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2342 }
2343
2344 /* If we didn't find any equivalence, we need to remember that
2345 this value is held in the named register. */
2346 if (found)
2347 return;
2348 }
2349 /* ??? Attempt to find and merge equivalent MEMs or other
2350 expressions too. */
2351
2352 val_bind (set, val, loc, false);
2353 }
2354
2355 /* Initialize dataflow set SET to be empty.
2356 VARS_SIZE is the initial size of hash table VARS. */
2357
2358 static void
2359 dataflow_set_init (dataflow_set *set)
2360 {
2361 init_attrs_list_set (set->regs);
2362 set->vars = shared_hash_copy (empty_shared_hash);
2363 set->stack_adjust = 0;
2364 set->traversed_vars = NULL;
2365 }
2366
2367 /* Delete the contents of dataflow set SET. */
2368
2369 static void
2370 dataflow_set_clear (dataflow_set *set)
2371 {
2372 int i;
2373
2374 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2375 attrs_list_clear (&set->regs[i]);
2376
2377 shared_hash_destroy (set->vars);
2378 set->vars = shared_hash_copy (empty_shared_hash);
2379 }
2380
2381 /* Copy the contents of dataflow set SRC to DST. */
2382
2383 static void
2384 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2385 {
2386 int i;
2387
2388 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2389 attrs_list_copy (&dst->regs[i], src->regs[i]);
2390
2391 shared_hash_destroy (dst->vars);
2392 dst->vars = shared_hash_copy (src->vars);
2393 dst->stack_adjust = src->stack_adjust;
2394 }
2395
2396 /* Information for merging lists of locations for a given offset of variable.
2397 */
2398 struct variable_union_info
2399 {
2400 /* Node of the location chain. */
2401 location_chain lc;
2402
2403 /* The sum of positions in the input chains. */
2404 int pos;
2405
2406 /* The position in the chain of DST dataflow set. */
2407 int pos_dst;
2408 };
2409
2410 /* Buffer for location list sorting and its allocated size. */
2411 static struct variable_union_info *vui_vec;
2412 static int vui_allocated;
2413
2414 /* Compare function for qsort, order the structures by POS element. */
2415
2416 static int
2417 variable_union_info_cmp_pos (const void *n1, const void *n2)
2418 {
2419 const struct variable_union_info *const i1 =
2420 (const struct variable_union_info *) n1;
2421 const struct variable_union_info *const i2 =
2422 ( const struct variable_union_info *) n2;
2423
2424 if (i1->pos != i2->pos)
2425 return i1->pos - i2->pos;
2426
2427 return (i1->pos_dst - i2->pos_dst);
2428 }
2429
2430 /* Compute union of location parts of variable *SLOT and the same variable
2431 from hash table DATA. Compute "sorted" union of the location chains
2432 for common offsets, i.e. the locations of a variable part are sorted by
2433 a priority where the priority is the sum of the positions in the 2 chains
2434 (if a location is only in one list the position in the second list is
2435 defined to be larger than the length of the chains).
2436 When we are updating the location parts the newest location is in the
2437 beginning of the chain, so when we do the described "sorted" union
2438 we keep the newest locations in the beginning. */
2439
2440 static int
2441 variable_union (variable src, dataflow_set *set)
2442 {
2443 variable dst;
2444 void **dstp;
2445 int i, j, k;
2446
2447 dstp = shared_hash_find_slot (set->vars, src->dv);
2448 if (!dstp || !*dstp)
2449 {
2450 src->refcount++;
2451
2452 dst_can_be_shared = false;
2453 if (!dstp)
2454 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2455
2456 *dstp = src;
2457
2458 /* Continue traversing the hash table. */
2459 return 1;
2460 }
2461 else
2462 dst = (variable) *dstp;
2463
2464 gcc_assert (src->n_var_parts);
2465 gcc_checking_assert (src->onepart == dst->onepart);
2466
2467 /* We can combine one-part variables very efficiently, because their
2468 entries are in canonical order. */
2469 if (src->onepart)
2470 {
2471 location_chain *nodep, dnode, snode;
2472
2473 gcc_assert (src->n_var_parts == 1
2474 && dst->n_var_parts == 1);
2475
2476 snode = src->var_part[0].loc_chain;
2477 gcc_assert (snode);
2478
2479 restart_onepart_unshared:
2480 nodep = &dst->var_part[0].loc_chain;
2481 dnode = *nodep;
2482 gcc_assert (dnode);
2483
2484 while (snode)
2485 {
2486 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2487
2488 if (r > 0)
2489 {
2490 location_chain nnode;
2491
2492 if (shared_var_p (dst, set->vars))
2493 {
2494 dstp = unshare_variable (set, dstp, dst,
2495 VAR_INIT_STATUS_INITIALIZED);
2496 dst = (variable)*dstp;
2497 goto restart_onepart_unshared;
2498 }
2499
2500 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2501 nnode->loc = snode->loc;
2502 nnode->init = snode->init;
2503 if (!snode->set_src || MEM_P (snode->set_src))
2504 nnode->set_src = NULL;
2505 else
2506 nnode->set_src = snode->set_src;
2507 nnode->next = dnode;
2508 dnode = nnode;
2509 }
2510 else if (r == 0)
2511 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2512
2513 if (r >= 0)
2514 snode = snode->next;
2515
2516 nodep = &dnode->next;
2517 dnode = *nodep;
2518 }
2519
2520 return 1;
2521 }
2522
2523 gcc_checking_assert (!src->onepart);
2524
2525 /* Count the number of location parts, result is K. */
2526 for (i = 0, j = 0, k = 0;
2527 i < src->n_var_parts && j < dst->n_var_parts; k++)
2528 {
2529 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2530 {
2531 i++;
2532 j++;
2533 }
2534 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2535 i++;
2536 else
2537 j++;
2538 }
2539 k += src->n_var_parts - i;
2540 k += dst->n_var_parts - j;
2541
2542 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2543 thus there are at most MAX_VAR_PARTS different offsets. */
2544 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2545
2546 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2547 {
2548 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2549 dst = (variable)*dstp;
2550 }
2551
2552 i = src->n_var_parts - 1;
2553 j = dst->n_var_parts - 1;
2554 dst->n_var_parts = k;
2555
2556 for (k--; k >= 0; k--)
2557 {
2558 location_chain node, node2;
2559
2560 if (i >= 0 && j >= 0
2561 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2562 {
2563 /* Compute the "sorted" union of the chains, i.e. the locations which
2564 are in both chains go first, they are sorted by the sum of
2565 positions in the chains. */
2566 int dst_l, src_l;
2567 int ii, jj, n;
2568 struct variable_union_info *vui;
2569
2570 /* If DST is shared compare the location chains.
2571 If they are different we will modify the chain in DST with
2572 high probability so make a copy of DST. */
2573 if (shared_var_p (dst, set->vars))
2574 {
2575 for (node = src->var_part[i].loc_chain,
2576 node2 = dst->var_part[j].loc_chain; node && node2;
2577 node = node->next, node2 = node2->next)
2578 {
2579 if (!((REG_P (node2->loc)
2580 && REG_P (node->loc)
2581 && REGNO (node2->loc) == REGNO (node->loc))
2582 || rtx_equal_p (node2->loc, node->loc)))
2583 {
2584 if (node2->init < node->init)
2585 node2->init = node->init;
2586 break;
2587 }
2588 }
2589 if (node || node2)
2590 {
2591 dstp = unshare_variable (set, dstp, dst,
2592 VAR_INIT_STATUS_UNKNOWN);
2593 dst = (variable)*dstp;
2594 }
2595 }
2596
2597 src_l = 0;
2598 for (node = src->var_part[i].loc_chain; node; node = node->next)
2599 src_l++;
2600 dst_l = 0;
2601 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2602 dst_l++;
2603
2604 if (dst_l == 1)
2605 {
2606 /* The most common case, much simpler, no qsort is needed. */
2607 location_chain dstnode = dst->var_part[j].loc_chain;
2608 dst->var_part[k].loc_chain = dstnode;
2609 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
2610 node2 = dstnode;
2611 for (node = src->var_part[i].loc_chain; node; node = node->next)
2612 if (!((REG_P (dstnode->loc)
2613 && REG_P (node->loc)
2614 && REGNO (dstnode->loc) == REGNO (node->loc))
2615 || rtx_equal_p (dstnode->loc, node->loc)))
2616 {
2617 location_chain new_node;
2618
2619 /* Copy the location from SRC. */
2620 new_node = (location_chain) pool_alloc (loc_chain_pool);
2621 new_node->loc = node->loc;
2622 new_node->init = node->init;
2623 if (!node->set_src || MEM_P (node->set_src))
2624 new_node->set_src = NULL;
2625 else
2626 new_node->set_src = node->set_src;
2627 node2->next = new_node;
2628 node2 = new_node;
2629 }
2630 node2->next = NULL;
2631 }
2632 else
2633 {
2634 if (src_l + dst_l > vui_allocated)
2635 {
2636 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2637 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2638 vui_allocated);
2639 }
2640 vui = vui_vec;
2641
2642 /* Fill in the locations from DST. */
2643 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2644 node = node->next, jj++)
2645 {
2646 vui[jj].lc = node;
2647 vui[jj].pos_dst = jj;
2648
2649 /* Pos plus value larger than a sum of 2 valid positions. */
2650 vui[jj].pos = jj + src_l + dst_l;
2651 }
2652
2653 /* Fill in the locations from SRC. */
2654 n = dst_l;
2655 for (node = src->var_part[i].loc_chain, ii = 0; node;
2656 node = node->next, ii++)
2657 {
2658 /* Find location from NODE. */
2659 for (jj = 0; jj < dst_l; jj++)
2660 {
2661 if ((REG_P (vui[jj].lc->loc)
2662 && REG_P (node->loc)
2663 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2664 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2665 {
2666 vui[jj].pos = jj + ii;
2667 break;
2668 }
2669 }
2670 if (jj >= dst_l) /* The location has not been found. */
2671 {
2672 location_chain new_node;
2673
2674 /* Copy the location from SRC. */
2675 new_node = (location_chain) pool_alloc (loc_chain_pool);
2676 new_node->loc = node->loc;
2677 new_node->init = node->init;
2678 if (!node->set_src || MEM_P (node->set_src))
2679 new_node->set_src = NULL;
2680 else
2681 new_node->set_src = node->set_src;
2682 vui[n].lc = new_node;
2683 vui[n].pos_dst = src_l + dst_l;
2684 vui[n].pos = ii + src_l + dst_l;
2685 n++;
2686 }
2687 }
2688
2689 if (dst_l == 2)
2690 {
2691 /* Special case still very common case. For dst_l == 2
2692 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2693 vui[i].pos == i + src_l + dst_l. */
2694 if (vui[0].pos > vui[1].pos)
2695 {
2696 /* Order should be 1, 0, 2... */
2697 dst->var_part[k].loc_chain = vui[1].lc;
2698 vui[1].lc->next = vui[0].lc;
2699 if (n >= 3)
2700 {
2701 vui[0].lc->next = vui[2].lc;
2702 vui[n - 1].lc->next = NULL;
2703 }
2704 else
2705 vui[0].lc->next = NULL;
2706 ii = 3;
2707 }
2708 else
2709 {
2710 dst->var_part[k].loc_chain = vui[0].lc;
2711 if (n >= 3 && vui[2].pos < vui[1].pos)
2712 {
2713 /* Order should be 0, 2, 1, 3... */
2714 vui[0].lc->next = vui[2].lc;
2715 vui[2].lc->next = vui[1].lc;
2716 if (n >= 4)
2717 {
2718 vui[1].lc->next = vui[3].lc;
2719 vui[n - 1].lc->next = NULL;
2720 }
2721 else
2722 vui[1].lc->next = NULL;
2723 ii = 4;
2724 }
2725 else
2726 {
2727 /* Order should be 0, 1, 2... */
2728 ii = 1;
2729 vui[n - 1].lc->next = NULL;
2730 }
2731 }
2732 for (; ii < n; ii++)
2733 vui[ii - 1].lc->next = vui[ii].lc;
2734 }
2735 else
2736 {
2737 qsort (vui, n, sizeof (struct variable_union_info),
2738 variable_union_info_cmp_pos);
2739
2740 /* Reconnect the nodes in sorted order. */
2741 for (ii = 1; ii < n; ii++)
2742 vui[ii - 1].lc->next = vui[ii].lc;
2743 vui[n - 1].lc->next = NULL;
2744 dst->var_part[k].loc_chain = vui[0].lc;
2745 }
2746
2747 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2748 }
2749 i--;
2750 j--;
2751 }
2752 else if ((i >= 0 && j >= 0
2753 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2754 || i < 0)
2755 {
2756 dst->var_part[k] = dst->var_part[j];
2757 j--;
2758 }
2759 else if ((i >= 0 && j >= 0
2760 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
2761 || j < 0)
2762 {
2763 location_chain *nextp;
2764
2765 /* Copy the chain from SRC. */
2766 nextp = &dst->var_part[k].loc_chain;
2767 for (node = src->var_part[i].loc_chain; node; node = node->next)
2768 {
2769 location_chain new_lc;
2770
2771 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2772 new_lc->next = NULL;
2773 new_lc->init = node->init;
2774 if (!node->set_src || MEM_P (node->set_src))
2775 new_lc->set_src = NULL;
2776 else
2777 new_lc->set_src = node->set_src;
2778 new_lc->loc = node->loc;
2779
2780 *nextp = new_lc;
2781 nextp = &new_lc->next;
2782 }
2783
2784 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
2785 i--;
2786 }
2787 dst->var_part[k].cur_loc = NULL;
2788 }
2789
2790 if (flag_var_tracking_uninit)
2791 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2792 {
2793 location_chain node, node2;
2794 for (node = src->var_part[i].loc_chain; node; node = node->next)
2795 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2796 if (rtx_equal_p (node->loc, node2->loc))
2797 {
2798 if (node->init > node2->init)
2799 node2->init = node->init;
2800 }
2801 }
2802
2803 /* Continue traversing the hash table. */
2804 return 1;
2805 }
2806
2807 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2808
2809 static void
2810 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2811 {
2812 int i;
2813
2814 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2815 attrs_list_union (&dst->regs[i], src->regs[i]);
2816
2817 if (dst->vars == empty_shared_hash)
2818 {
2819 shared_hash_destroy (dst->vars);
2820 dst->vars = shared_hash_copy (src->vars);
2821 }
2822 else
2823 {
2824 htab_iterator hi;
2825 variable var;
2826
2827 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2828 variable_union (var, dst);
2829 }
2830 }
2831
2832 /* Whether the value is currently being expanded. */
2833 #define VALUE_RECURSED_INTO(x) \
2834 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2835
2836 /* Whether no expansion was found, saving useless lookups.
2837 It must only be set when VALUE_CHANGED is clear. */
2838 #define NO_LOC_P(x) \
2839 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
2840
2841 /* Whether cur_loc in the value needs to be (re)computed. */
2842 #define VALUE_CHANGED(x) \
2843 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2844 /* Whether cur_loc in the decl needs to be (re)computed. */
2845 #define DECL_CHANGED(x) TREE_VISITED (x)
2846
2847 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
2848 user DECLs, this means they're in changed_variables. Values and
2849 debug exprs may be left with this flag set if no user variable
2850 requires them to be evaluated. */
2851
2852 static inline void
2853 set_dv_changed (decl_or_value dv, bool newv)
2854 {
2855 switch (dv_onepart_p (dv))
2856 {
2857 case ONEPART_VALUE:
2858 if (newv)
2859 NO_LOC_P (dv_as_value (dv)) = false;
2860 VALUE_CHANGED (dv_as_value (dv)) = newv;
2861 break;
2862
2863 case ONEPART_DEXPR:
2864 if (newv)
2865 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
2866 /* Fall through... */
2867
2868 default:
2869 DECL_CHANGED (dv_as_decl (dv)) = newv;
2870 break;
2871 }
2872 }
2873
2874 /* Return true if DV needs to have its cur_loc recomputed. */
2875
2876 static inline bool
2877 dv_changed_p (decl_or_value dv)
2878 {
2879 return (dv_is_value_p (dv)
2880 ? VALUE_CHANGED (dv_as_value (dv))
2881 : DECL_CHANGED (dv_as_decl (dv)));
2882 }
2883
2884 /* Return a location list node whose loc is rtx_equal to LOC, in the
2885 location list of a one-part variable or value VAR, or in that of
2886 any values recursively mentioned in the location lists. VARS must
2887 be in star-canonical form. */
2888
2889 static location_chain
2890 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2891 {
2892 location_chain node;
2893 enum rtx_code loc_code;
2894
2895 if (!var)
2896 return NULL;
2897
2898 gcc_checking_assert (var->onepart);
2899
2900 if (!var->n_var_parts)
2901 return NULL;
2902
2903 gcc_checking_assert (loc != dv_as_opaque (var->dv));
2904
2905 loc_code = GET_CODE (loc);
2906 for (node = var->var_part[0].loc_chain; node; node = node->next)
2907 {
2908 decl_or_value dv;
2909 variable rvar;
2910
2911 if (GET_CODE (node->loc) != loc_code)
2912 {
2913 if (GET_CODE (node->loc) != VALUE)
2914 continue;
2915 }
2916 else if (loc == node->loc)
2917 return node;
2918 else if (loc_code != VALUE)
2919 {
2920 if (rtx_equal_p (loc, node->loc))
2921 return node;
2922 continue;
2923 }
2924
2925 /* Since we're in star-canonical form, we don't need to visit
2926 non-canonical nodes: one-part variables and non-canonical
2927 values would only point back to the canonical node. */
2928 if (dv_is_value_p (var->dv)
2929 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
2930 {
2931 /* Skip all subsequent VALUEs. */
2932 while (node->next && GET_CODE (node->next->loc) == VALUE)
2933 {
2934 node = node->next;
2935 gcc_checking_assert (!canon_value_cmp (node->loc,
2936 dv_as_value (var->dv)));
2937 if (loc == node->loc)
2938 return node;
2939 }
2940 continue;
2941 }
2942
2943 gcc_checking_assert (node == var->var_part[0].loc_chain);
2944 gcc_checking_assert (!node->next);
2945
2946 dv = dv_from_value (node->loc);
2947 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2948 return find_loc_in_1pdv (loc, rvar, vars);
2949 }
2950
2951 /* ??? Gotta look in cselib_val locations too. */
2952
2953 return NULL;
2954 }
2955
2956 /* Hash table iteration argument passed to variable_merge. */
2957 struct dfset_merge
2958 {
2959 /* The set in which the merge is to be inserted. */
2960 dataflow_set *dst;
2961 /* The set that we're iterating in. */
2962 dataflow_set *cur;
2963 /* The set that may contain the other dv we are to merge with. */
2964 dataflow_set *src;
2965 /* Number of onepart dvs in src. */
2966 int src_onepart_cnt;
2967 };
2968
2969 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2970 loc_cmp order, and it is maintained as such. */
2971
2972 static void
2973 insert_into_intersection (location_chain *nodep, rtx loc,
2974 enum var_init_status status)
2975 {
2976 location_chain node;
2977 int r;
2978
2979 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2980 if ((r = loc_cmp (node->loc, loc)) == 0)
2981 {
2982 node->init = MIN (node->init, status);
2983 return;
2984 }
2985 else if (r > 0)
2986 break;
2987
2988 node = (location_chain) pool_alloc (loc_chain_pool);
2989
2990 node->loc = loc;
2991 node->set_src = NULL;
2992 node->init = status;
2993 node->next = *nodep;
2994 *nodep = node;
2995 }
2996
2997 /* Insert in DEST the intersection of the locations present in both
2998 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2999 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3000 DSM->dst. */
3001
3002 static void
3003 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3004 location_chain s1node, variable s2var)
3005 {
3006 dataflow_set *s1set = dsm->cur;
3007 dataflow_set *s2set = dsm->src;
3008 location_chain found;
3009
3010 if (s2var)
3011 {
3012 location_chain s2node;
3013
3014 gcc_checking_assert (s2var->onepart);
3015
3016 if (s2var->n_var_parts)
3017 {
3018 s2node = s2var->var_part[0].loc_chain;
3019
3020 for (; s1node && s2node;
3021 s1node = s1node->next, s2node = s2node->next)
3022 if (s1node->loc != s2node->loc)
3023 break;
3024 else if (s1node->loc == val)
3025 continue;
3026 else
3027 insert_into_intersection (dest, s1node->loc,
3028 MIN (s1node->init, s2node->init));
3029 }
3030 }
3031
3032 for (; s1node; s1node = s1node->next)
3033 {
3034 if (s1node->loc == val)
3035 continue;
3036
3037 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3038 shared_hash_htab (s2set->vars))))
3039 {
3040 insert_into_intersection (dest, s1node->loc,
3041 MIN (s1node->init, found->init));
3042 continue;
3043 }
3044
3045 if (GET_CODE (s1node->loc) == VALUE
3046 && !VALUE_RECURSED_INTO (s1node->loc))
3047 {
3048 decl_or_value dv = dv_from_value (s1node->loc);
3049 variable svar = shared_hash_find (s1set->vars, dv);
3050 if (svar)
3051 {
3052 if (svar->n_var_parts == 1)
3053 {
3054 VALUE_RECURSED_INTO (s1node->loc) = true;
3055 intersect_loc_chains (val, dest, dsm,
3056 svar->var_part[0].loc_chain,
3057 s2var);
3058 VALUE_RECURSED_INTO (s1node->loc) = false;
3059 }
3060 }
3061 }
3062
3063 /* ??? gotta look in cselib_val locations too. */
3064
3065 /* ??? if the location is equivalent to any location in src,
3066 searched recursively
3067
3068 add to dst the values needed to represent the equivalence
3069
3070 telling whether locations S is equivalent to another dv's
3071 location list:
3072
3073 for each location D in the list
3074
3075 if S and D satisfy rtx_equal_p, then it is present
3076
3077 else if D is a value, recurse without cycles
3078
3079 else if S and D have the same CODE and MODE
3080
3081 for each operand oS and the corresponding oD
3082
3083 if oS and oD are not equivalent, then S an D are not equivalent
3084
3085 else if they are RTX vectors
3086
3087 if any vector oS element is not equivalent to its respective oD,
3088 then S and D are not equivalent
3089
3090 */
3091
3092
3093 }
3094 }
3095
3096 /* Return -1 if X should be before Y in a location list for a 1-part
3097 variable, 1 if Y should be before X, and 0 if they're equivalent
3098 and should not appear in the list. */
3099
3100 static int
3101 loc_cmp (rtx x, rtx y)
3102 {
3103 int i, j, r;
3104 RTX_CODE code = GET_CODE (x);
3105 const char *fmt;
3106
3107 if (x == y)
3108 return 0;
3109
3110 if (REG_P (x))
3111 {
3112 if (!REG_P (y))
3113 return -1;
3114 gcc_assert (GET_MODE (x) == GET_MODE (y));
3115 if (REGNO (x) == REGNO (y))
3116 return 0;
3117 else if (REGNO (x) < REGNO (y))
3118 return -1;
3119 else
3120 return 1;
3121 }
3122
3123 if (REG_P (y))
3124 return 1;
3125
3126 if (MEM_P (x))
3127 {
3128 if (!MEM_P (y))
3129 return -1;
3130 gcc_assert (GET_MODE (x) == GET_MODE (y));
3131 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3132 }
3133
3134 if (MEM_P (y))
3135 return 1;
3136
3137 if (GET_CODE (x) == VALUE)
3138 {
3139 if (GET_CODE (y) != VALUE)
3140 return -1;
3141 /* Don't assert the modes are the same, that is true only
3142 when not recursing. (subreg:QI (value:SI 1:1) 0)
3143 and (subreg:QI (value:DI 2:2) 0) can be compared,
3144 even when the modes are different. */
3145 if (canon_value_cmp (x, y))
3146 return -1;
3147 else
3148 return 1;
3149 }
3150
3151 if (GET_CODE (y) == VALUE)
3152 return 1;
3153
3154 /* Entry value is the least preferable kind of expression. */
3155 if (GET_CODE (x) == ENTRY_VALUE)
3156 {
3157 if (GET_CODE (y) != ENTRY_VALUE)
3158 return 1;
3159 gcc_assert (GET_MODE (x) == GET_MODE (y));
3160 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3161 }
3162
3163 if (GET_CODE (y) == ENTRY_VALUE)
3164 return -1;
3165
3166 if (GET_CODE (x) == GET_CODE (y))
3167 /* Compare operands below. */;
3168 else if (GET_CODE (x) < GET_CODE (y))
3169 return -1;
3170 else
3171 return 1;
3172
3173 gcc_assert (GET_MODE (x) == GET_MODE (y));
3174
3175 if (GET_CODE (x) == DEBUG_EXPR)
3176 {
3177 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3178 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3179 return -1;
3180 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3181 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3182 return 1;
3183 }
3184
3185 fmt = GET_RTX_FORMAT (code);
3186 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3187 switch (fmt[i])
3188 {
3189 case 'w':
3190 if (XWINT (x, i) == XWINT (y, i))
3191 break;
3192 else if (XWINT (x, i) < XWINT (y, i))
3193 return -1;
3194 else
3195 return 1;
3196
3197 case 'n':
3198 case 'i':
3199 if (XINT (x, i) == XINT (y, i))
3200 break;
3201 else if (XINT (x, i) < XINT (y, i))
3202 return -1;
3203 else
3204 return 1;
3205
3206 case 'V':
3207 case 'E':
3208 /* Compare the vector length first. */
3209 if (XVECLEN (x, i) == XVECLEN (y, i))
3210 /* Compare the vectors elements. */;
3211 else if (XVECLEN (x, i) < XVECLEN (y, i))
3212 return -1;
3213 else
3214 return 1;
3215
3216 for (j = 0; j < XVECLEN (x, i); j++)
3217 if ((r = loc_cmp (XVECEXP (x, i, j),
3218 XVECEXP (y, i, j))))
3219 return r;
3220 break;
3221
3222 case 'e':
3223 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3224 return r;
3225 break;
3226
3227 case 'S':
3228 case 's':
3229 if (XSTR (x, i) == XSTR (y, i))
3230 break;
3231 if (!XSTR (x, i))
3232 return -1;
3233 if (!XSTR (y, i))
3234 return 1;
3235 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3236 break;
3237 else if (r < 0)
3238 return -1;
3239 else
3240 return 1;
3241
3242 case 'u':
3243 /* These are just backpointers, so they don't matter. */
3244 break;
3245
3246 case '0':
3247 case 't':
3248 break;
3249
3250 /* It is believed that rtx's at this level will never
3251 contain anything but integers and other rtx's,
3252 except for within LABEL_REFs and SYMBOL_REFs. */
3253 default:
3254 gcc_unreachable ();
3255 }
3256
3257 return 0;
3258 }
3259
3260 #if ENABLE_CHECKING
3261 /* Check the order of entries in one-part variables. */
3262
3263 static int
3264 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3265 {
3266 variable var = (variable) *slot;
3267 location_chain node, next;
3268
3269 #ifdef ENABLE_RTL_CHECKING
3270 int i;
3271 for (i = 0; i < var->n_var_parts; i++)
3272 gcc_assert (var->var_part[0].cur_loc == NULL);
3273 gcc_assert (!var->in_changed_variables);
3274 #endif
3275
3276 if (!var->onepart)
3277 return 1;
3278
3279 gcc_assert (var->n_var_parts == 1);
3280 node = var->var_part[0].loc_chain;
3281 gcc_assert (node);
3282
3283 while ((next = node->next))
3284 {
3285 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3286 node = next;
3287 }
3288
3289 return 1;
3290 }
3291 #endif
3292
3293 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3294 more likely to be chosen as canonical for an equivalence set.
3295 Ensure less likely values can reach more likely neighbors, making
3296 the connections bidirectional. */
3297
3298 static int
3299 canonicalize_values_mark (void **slot, void *data)
3300 {
3301 dataflow_set *set = (dataflow_set *)data;
3302 variable var = (variable) *slot;
3303 decl_or_value dv = var->dv;
3304 rtx val;
3305 location_chain node;
3306
3307 if (!dv_is_value_p (dv))
3308 return 1;
3309
3310 gcc_checking_assert (var->n_var_parts == 1);
3311
3312 val = dv_as_value (dv);
3313
3314 for (node = var->var_part[0].loc_chain; node; node = node->next)
3315 if (GET_CODE (node->loc) == VALUE)
3316 {
3317 if (canon_value_cmp (node->loc, val))
3318 VALUE_RECURSED_INTO (val) = true;
3319 else
3320 {
3321 decl_or_value odv = dv_from_value (node->loc);
3322 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3323
3324 set_slot_part (set, val, oslot, odv, 0,
3325 node->init, NULL_RTX);
3326
3327 VALUE_RECURSED_INTO (node->loc) = true;
3328 }
3329 }
3330
3331 return 1;
3332 }
3333
3334 /* Remove redundant entries from equivalence lists in onepart
3335 variables, canonicalizing equivalence sets into star shapes. */
3336
3337 static int
3338 canonicalize_values_star (void **slot, void *data)
3339 {
3340 dataflow_set *set = (dataflow_set *)data;
3341 variable var = (variable) *slot;
3342 decl_or_value dv = var->dv;
3343 location_chain node;
3344 decl_or_value cdv;
3345 rtx val, cval;
3346 void **cslot;
3347 bool has_value;
3348 bool has_marks;
3349
3350 if (!var->onepart)
3351 return 1;
3352
3353 gcc_checking_assert (var->n_var_parts == 1);
3354
3355 if (dv_is_value_p (dv))
3356 {
3357 cval = dv_as_value (dv);
3358 if (!VALUE_RECURSED_INTO (cval))
3359 return 1;
3360 VALUE_RECURSED_INTO (cval) = false;
3361 }
3362 else
3363 cval = NULL_RTX;
3364
3365 restart:
3366 val = cval;
3367 has_value = false;
3368 has_marks = false;
3369
3370 gcc_assert (var->n_var_parts == 1);
3371
3372 for (node = var->var_part[0].loc_chain; node; node = node->next)
3373 if (GET_CODE (node->loc) == VALUE)
3374 {
3375 has_value = true;
3376 if (VALUE_RECURSED_INTO (node->loc))
3377 has_marks = true;
3378 if (canon_value_cmp (node->loc, cval))
3379 cval = node->loc;
3380 }
3381
3382 if (!has_value)
3383 return 1;
3384
3385 if (cval == val)
3386 {
3387 if (!has_marks || dv_is_decl_p (dv))
3388 return 1;
3389
3390 /* Keep it marked so that we revisit it, either after visiting a
3391 child node, or after visiting a new parent that might be
3392 found out. */
3393 VALUE_RECURSED_INTO (val) = true;
3394
3395 for (node = var->var_part[0].loc_chain; node; node = node->next)
3396 if (GET_CODE (node->loc) == VALUE
3397 && VALUE_RECURSED_INTO (node->loc))
3398 {
3399 cval = node->loc;
3400 restart_with_cval:
3401 VALUE_RECURSED_INTO (cval) = false;
3402 dv = dv_from_value (cval);
3403 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3404 if (!slot)
3405 {
3406 gcc_assert (dv_is_decl_p (var->dv));
3407 /* The canonical value was reset and dropped.
3408 Remove it. */
3409 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3410 return 1;
3411 }
3412 var = (variable)*slot;
3413 gcc_assert (dv_is_value_p (var->dv));
3414 if (var->n_var_parts == 0)
3415 return 1;
3416 gcc_assert (var->n_var_parts == 1);
3417 goto restart;
3418 }
3419
3420 VALUE_RECURSED_INTO (val) = false;
3421
3422 return 1;
3423 }
3424
3425 /* Push values to the canonical one. */
3426 cdv = dv_from_value (cval);
3427 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3428
3429 for (node = var->var_part[0].loc_chain; node; node = node->next)
3430 if (node->loc != cval)
3431 {
3432 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3433 node->init, NULL_RTX);
3434 if (GET_CODE (node->loc) == VALUE)
3435 {
3436 decl_or_value ndv = dv_from_value (node->loc);
3437
3438 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3439 NO_INSERT);
3440
3441 if (canon_value_cmp (node->loc, val))
3442 {
3443 /* If it could have been a local minimum, it's not any more,
3444 since it's now neighbor to cval, so it may have to push
3445 to it. Conversely, if it wouldn't have prevailed over
3446 val, then whatever mark it has is fine: if it was to
3447 push, it will now push to a more canonical node, but if
3448 it wasn't, then it has already pushed any values it might
3449 have to. */
3450 VALUE_RECURSED_INTO (node->loc) = true;
3451 /* Make sure we visit node->loc by ensuring we cval is
3452 visited too. */
3453 VALUE_RECURSED_INTO (cval) = true;
3454 }
3455 else if (!VALUE_RECURSED_INTO (node->loc))
3456 /* If we have no need to "recurse" into this node, it's
3457 already "canonicalized", so drop the link to the old
3458 parent. */
3459 clobber_variable_part (set, cval, ndv, 0, NULL);
3460 }
3461 else if (GET_CODE (node->loc) == REG)
3462 {
3463 attrs list = set->regs[REGNO (node->loc)], *listp;
3464
3465 /* Change an existing attribute referring to dv so that it
3466 refers to cdv, removing any duplicate this might
3467 introduce, and checking that no previous duplicates
3468 existed, all in a single pass. */
3469
3470 while (list)
3471 {
3472 if (list->offset == 0
3473 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3474 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3475 break;
3476
3477 list = list->next;
3478 }
3479
3480 gcc_assert (list);
3481 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3482 {
3483 list->dv = cdv;
3484 for (listp = &list->next; (list = *listp); listp = &list->next)
3485 {
3486 if (list->offset)
3487 continue;
3488
3489 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3490 {
3491 *listp = list->next;
3492 pool_free (attrs_pool, list);
3493 list = *listp;
3494 break;
3495 }
3496
3497 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3498 }
3499 }
3500 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3501 {
3502 for (listp = &list->next; (list = *listp); listp = &list->next)
3503 {
3504 if (list->offset)
3505 continue;
3506
3507 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3508 {
3509 *listp = list->next;
3510 pool_free (attrs_pool, list);
3511 list = *listp;
3512 break;
3513 }
3514
3515 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3516 }
3517 }
3518 else
3519 gcc_unreachable ();
3520
3521 #if ENABLE_CHECKING
3522 while (list)
3523 {
3524 if (list->offset == 0
3525 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3526 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3527 gcc_unreachable ();
3528
3529 list = list->next;
3530 }
3531 #endif
3532 }
3533 }
3534
3535 if (val)
3536 set_slot_part (set, val, cslot, cdv, 0,
3537 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3538
3539 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3540
3541 /* Variable may have been unshared. */
3542 var = (variable)*slot;
3543 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3544 && var->var_part[0].loc_chain->next == NULL);
3545
3546 if (VALUE_RECURSED_INTO (cval))
3547 goto restart_with_cval;
3548
3549 return 1;
3550 }
3551
3552 /* Bind one-part variables to the canonical value in an equivalence
3553 set. Not doing this causes dataflow convergence failure in rare
3554 circumstances, see PR42873. Unfortunately we can't do this
3555 efficiently as part of canonicalize_values_star, since we may not
3556 have determined or even seen the canonical value of a set when we
3557 get to a variable that references another member of the set. */
3558
3559 static int
3560 canonicalize_vars_star (void **slot, void *data)
3561 {
3562 dataflow_set *set = (dataflow_set *)data;
3563 variable var = (variable) *slot;
3564 decl_or_value dv = var->dv;
3565 location_chain node;
3566 rtx cval;
3567 decl_or_value cdv;
3568 void **cslot;
3569 variable cvar;
3570 location_chain cnode;
3571
3572 if (!var->onepart || var->onepart == ONEPART_VALUE)
3573 return 1;
3574
3575 gcc_assert (var->n_var_parts == 1);
3576
3577 node = var->var_part[0].loc_chain;
3578
3579 if (GET_CODE (node->loc) != VALUE)
3580 return 1;
3581
3582 gcc_assert (!node->next);
3583 cval = node->loc;
3584
3585 /* Push values to the canonical one. */
3586 cdv = dv_from_value (cval);
3587 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3588 if (!cslot)
3589 return 1;
3590 cvar = (variable)*cslot;
3591 gcc_assert (cvar->n_var_parts == 1);
3592
3593 cnode = cvar->var_part[0].loc_chain;
3594
3595 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3596 that are not “more canonical” than it. */
3597 if (GET_CODE (cnode->loc) != VALUE
3598 || !canon_value_cmp (cnode->loc, cval))
3599 return 1;
3600
3601 /* CVAL was found to be non-canonical. Change the variable to point
3602 to the canonical VALUE. */
3603 gcc_assert (!cnode->next);
3604 cval = cnode->loc;
3605
3606 slot = set_slot_part (set, cval, slot, dv, 0,
3607 node->init, node->set_src);
3608 clobber_slot_part (set, cval, slot, 0, node->set_src);
3609
3610 return 1;
3611 }
3612
3613 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3614 corresponding entry in DSM->src. Multi-part variables are combined
3615 with variable_union, whereas onepart dvs are combined with
3616 intersection. */
3617
3618 static int
3619 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3620 {
3621 dataflow_set *dst = dsm->dst;
3622 void **dstslot;
3623 variable s2var, dvar = NULL;
3624 decl_or_value dv = s1var->dv;
3625 onepart_enum_t onepart = s1var->onepart;
3626 rtx val;
3627 hashval_t dvhash;
3628 location_chain node, *nodep;
3629
3630 /* If the incoming onepart variable has an empty location list, then
3631 the intersection will be just as empty. For other variables,
3632 it's always union. */
3633 gcc_checking_assert (s1var->n_var_parts
3634 && s1var->var_part[0].loc_chain);
3635
3636 if (!onepart)
3637 return variable_union (s1var, dst);
3638
3639 gcc_checking_assert (s1var->n_var_parts == 1);
3640
3641 dvhash = dv_htab_hash (dv);
3642 if (dv_is_value_p (dv))
3643 val = dv_as_value (dv);
3644 else
3645 val = NULL;
3646
3647 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3648 if (!s2var)
3649 {
3650 dst_can_be_shared = false;
3651 return 1;
3652 }
3653
3654 dsm->src_onepart_cnt--;
3655 gcc_assert (s2var->var_part[0].loc_chain
3656 && s2var->onepart == onepart
3657 && s2var->n_var_parts == 1);
3658
3659 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3660 if (dstslot)
3661 {
3662 dvar = (variable)*dstslot;
3663 gcc_assert (dvar->refcount == 1
3664 && dvar->onepart == onepart
3665 && dvar->n_var_parts == 1);
3666 nodep = &dvar->var_part[0].loc_chain;
3667 }
3668 else
3669 {
3670 nodep = &node;
3671 node = NULL;
3672 }
3673
3674 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3675 {
3676 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3677 dvhash, INSERT);
3678 *dstslot = dvar = s2var;
3679 dvar->refcount++;
3680 }
3681 else
3682 {
3683 dst_can_be_shared = false;
3684
3685 intersect_loc_chains (val, nodep, dsm,
3686 s1var->var_part[0].loc_chain, s2var);
3687
3688 if (!dstslot)
3689 {
3690 if (node)
3691 {
3692 dvar = (variable) pool_alloc (onepart_pool (onepart));
3693 dvar->dv = dv;
3694 dvar->refcount = 1;
3695 dvar->n_var_parts = 1;
3696 dvar->onepart = onepart;
3697 dvar->in_changed_variables = false;
3698 dvar->var_part[0].loc_chain = node;
3699 dvar->var_part[0].cur_loc = NULL;
3700 if (onepart)
3701 VAR_LOC_1PAUX (dvar) = NULL;
3702 else
3703 VAR_PART_OFFSET (dvar, 0) = 0;
3704
3705 dstslot
3706 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3707 INSERT);
3708 gcc_assert (!*dstslot);
3709 *dstslot = dvar;
3710 }
3711 else
3712 return 1;
3713 }
3714 }
3715
3716 nodep = &dvar->var_part[0].loc_chain;
3717 while ((node = *nodep))
3718 {
3719 location_chain *nextp = &node->next;
3720
3721 if (GET_CODE (node->loc) == REG)
3722 {
3723 attrs list;
3724
3725 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3726 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3727 && dv_is_value_p (list->dv))
3728 break;
3729
3730 if (!list)
3731 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3732 dv, 0, node->loc);
3733 /* If this value became canonical for another value that had
3734 this register, we want to leave it alone. */
3735 else if (dv_as_value (list->dv) != val)
3736 {
3737 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3738 dstslot, dv, 0,
3739 node->init, NULL_RTX);
3740 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3741
3742 /* Since nextp points into the removed node, we can't
3743 use it. The pointer to the next node moved to nodep.
3744 However, if the variable we're walking is unshared
3745 during our walk, we'll keep walking the location list
3746 of the previously-shared variable, in which case the
3747 node won't have been removed, and we'll want to skip
3748 it. That's why we test *nodep here. */
3749 if (*nodep != node)
3750 nextp = nodep;
3751 }
3752 }
3753 else
3754 /* Canonicalization puts registers first, so we don't have to
3755 walk it all. */
3756 break;
3757 nodep = nextp;
3758 }
3759
3760 if (dvar != (variable)*dstslot)
3761 dvar = (variable)*dstslot;
3762 nodep = &dvar->var_part[0].loc_chain;
3763
3764 if (val)
3765 {
3766 /* Mark all referenced nodes for canonicalization, and make sure
3767 we have mutual equivalence links. */
3768 VALUE_RECURSED_INTO (val) = true;
3769 for (node = *nodep; node; node = node->next)
3770 if (GET_CODE (node->loc) == VALUE)
3771 {
3772 VALUE_RECURSED_INTO (node->loc) = true;
3773 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3774 node->init, NULL, INSERT);
3775 }
3776
3777 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3778 gcc_assert (*dstslot == dvar);
3779 canonicalize_values_star (dstslot, dst);
3780 gcc_checking_assert (dstslot
3781 == shared_hash_find_slot_noinsert_1 (dst->vars,
3782 dv, dvhash));
3783 dvar = (variable)*dstslot;
3784 }
3785 else
3786 {
3787 bool has_value = false, has_other = false;
3788
3789 /* If we have one value and anything else, we're going to
3790 canonicalize this, so make sure all values have an entry in
3791 the table and are marked for canonicalization. */
3792 for (node = *nodep; node; node = node->next)
3793 {
3794 if (GET_CODE (node->loc) == VALUE)
3795 {
3796 /* If this was marked during register canonicalization,
3797 we know we have to canonicalize values. */
3798 if (has_value)
3799 has_other = true;
3800 has_value = true;
3801 if (has_other)
3802 break;
3803 }
3804 else
3805 {
3806 has_other = true;
3807 if (has_value)
3808 break;
3809 }
3810 }
3811
3812 if (has_value && has_other)
3813 {
3814 for (node = *nodep; node; node = node->next)
3815 {
3816 if (GET_CODE (node->loc) == VALUE)
3817 {
3818 decl_or_value dv = dv_from_value (node->loc);
3819 void **slot = NULL;
3820
3821 if (shared_hash_shared (dst->vars))
3822 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3823 if (!slot)
3824 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3825 INSERT);
3826 if (!*slot)
3827 {
3828 variable var = (variable) pool_alloc (onepart_pool
3829 (ONEPART_VALUE));
3830 var->dv = dv;
3831 var->refcount = 1;
3832 var->n_var_parts = 1;
3833 var->onepart = ONEPART_VALUE;
3834 var->in_changed_variables = false;
3835 var->var_part[0].loc_chain = NULL;
3836 var->var_part[0].cur_loc = NULL;
3837 VAR_LOC_1PAUX (var) = NULL;
3838 *slot = var;
3839 }
3840
3841 VALUE_RECURSED_INTO (node->loc) = true;
3842 }
3843 }
3844
3845 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3846 gcc_assert (*dstslot == dvar);
3847 canonicalize_values_star (dstslot, dst);
3848 gcc_checking_assert (dstslot
3849 == shared_hash_find_slot_noinsert_1 (dst->vars,
3850 dv, dvhash));
3851 dvar = (variable)*dstslot;
3852 }
3853 }
3854
3855 if (!onepart_variable_different_p (dvar, s2var))
3856 {
3857 variable_htab_free (dvar);
3858 *dstslot = dvar = s2var;
3859 dvar->refcount++;
3860 }
3861 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3862 {
3863 variable_htab_free (dvar);
3864 *dstslot = dvar = s1var;
3865 dvar->refcount++;
3866 dst_can_be_shared = false;
3867 }
3868 else
3869 dst_can_be_shared = false;
3870
3871 return 1;
3872 }
3873
3874 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3875 multi-part variable. Unions of multi-part variables and
3876 intersections of one-part ones will be handled in
3877 variable_merge_over_cur(). */
3878
3879 static int
3880 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3881 {
3882 dataflow_set *dst = dsm->dst;
3883 decl_or_value dv = s2var->dv;
3884
3885 if (!s2var->onepart)
3886 {
3887 void **dstp = shared_hash_find_slot (dst->vars, dv);
3888 *dstp = s2var;
3889 s2var->refcount++;
3890 return 1;
3891 }
3892
3893 dsm->src_onepart_cnt++;
3894 return 1;
3895 }
3896
3897 /* Combine dataflow set information from SRC2 into DST, using PDST
3898 to carry over information across passes. */
3899
3900 static void
3901 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3902 {
3903 dataflow_set cur = *dst;
3904 dataflow_set *src1 = &cur;
3905 struct dfset_merge dsm;
3906 int i;
3907 size_t src1_elems, src2_elems;
3908 htab_iterator hi;
3909 variable var;
3910
3911 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3912 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3913 dataflow_set_init (dst);
3914 dst->stack_adjust = cur.stack_adjust;
3915 shared_hash_destroy (dst->vars);
3916 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3917 dst->vars->refcount = 1;
3918 dst->vars->htab
3919 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3920 variable_htab_eq, variable_htab_free);
3921
3922 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3923 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3924
3925 dsm.dst = dst;
3926 dsm.src = src2;
3927 dsm.cur = src1;
3928 dsm.src_onepart_cnt = 0;
3929
3930 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3931 variable_merge_over_src (var, &dsm);
3932 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3933 variable_merge_over_cur (var, &dsm);
3934
3935 if (dsm.src_onepart_cnt)
3936 dst_can_be_shared = false;
3937
3938 dataflow_set_destroy (src1);
3939 }
3940
3941 /* Mark register equivalences. */
3942
3943 static void
3944 dataflow_set_equiv_regs (dataflow_set *set)
3945 {
3946 int i;
3947 attrs list, *listp;
3948
3949 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3950 {
3951 rtx canon[NUM_MACHINE_MODES];
3952
3953 /* If the list is empty or one entry, no need to canonicalize
3954 anything. */
3955 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3956 continue;
3957
3958 memset (canon, 0, sizeof (canon));
3959
3960 for (list = set->regs[i]; list; list = list->next)
3961 if (list->offset == 0 && dv_is_value_p (list->dv))
3962 {
3963 rtx val = dv_as_value (list->dv);
3964 rtx *cvalp = &canon[(int)GET_MODE (val)];
3965 rtx cval = *cvalp;
3966
3967 if (canon_value_cmp (val, cval))
3968 *cvalp = val;
3969 }
3970
3971 for (list = set->regs[i]; list; list = list->next)
3972 if (list->offset == 0 && dv_onepart_p (list->dv))
3973 {
3974 rtx cval = canon[(int)GET_MODE (list->loc)];
3975
3976 if (!cval)
3977 continue;
3978
3979 if (dv_is_value_p (list->dv))
3980 {
3981 rtx val = dv_as_value (list->dv);
3982
3983 if (val == cval)
3984 continue;
3985
3986 VALUE_RECURSED_INTO (val) = true;
3987 set_variable_part (set, val, dv_from_value (cval), 0,
3988 VAR_INIT_STATUS_INITIALIZED,
3989 NULL, NO_INSERT);
3990 }
3991
3992 VALUE_RECURSED_INTO (cval) = true;
3993 set_variable_part (set, cval, list->dv, 0,
3994 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3995 }
3996
3997 for (listp = &set->regs[i]; (list = *listp);
3998 listp = list ? &list->next : listp)
3999 if (list->offset == 0 && dv_onepart_p (list->dv))
4000 {
4001 rtx cval = canon[(int)GET_MODE (list->loc)];
4002 void **slot;
4003
4004 if (!cval)
4005 continue;
4006
4007 if (dv_is_value_p (list->dv))
4008 {
4009 rtx val = dv_as_value (list->dv);
4010 if (!VALUE_RECURSED_INTO (val))
4011 continue;
4012 }
4013
4014 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4015 canonicalize_values_star (slot, set);
4016 if (*listp != list)
4017 list = NULL;
4018 }
4019 }
4020 }
4021
4022 /* Remove any redundant values in the location list of VAR, which must
4023 be unshared and 1-part. */
4024
4025 static void
4026 remove_duplicate_values (variable var)
4027 {
4028 location_chain node, *nodep;
4029
4030 gcc_assert (var->onepart);
4031 gcc_assert (var->n_var_parts == 1);
4032 gcc_assert (var->refcount == 1);
4033
4034 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4035 {
4036 if (GET_CODE (node->loc) == VALUE)
4037 {
4038 if (VALUE_RECURSED_INTO (node->loc))
4039 {
4040 /* Remove duplicate value node. */
4041 *nodep = node->next;
4042 pool_free (loc_chain_pool, node);
4043 continue;
4044 }
4045 else
4046 VALUE_RECURSED_INTO (node->loc) = true;
4047 }
4048 nodep = &node->next;
4049 }
4050
4051 for (node = var->var_part[0].loc_chain; node; node = node->next)
4052 if (GET_CODE (node->loc) == VALUE)
4053 {
4054 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4055 VALUE_RECURSED_INTO (node->loc) = false;
4056 }
4057 }
4058
4059
4060 /* Hash table iteration argument passed to variable_post_merge. */
4061 struct dfset_post_merge
4062 {
4063 /* The new input set for the current block. */
4064 dataflow_set *set;
4065 /* Pointer to the permanent input set for the current block, or
4066 NULL. */
4067 dataflow_set **permp;
4068 };
4069
4070 /* Create values for incoming expressions associated with one-part
4071 variables that don't have value numbers for them. */
4072
4073 static int
4074 variable_post_merge_new_vals (void **slot, void *info)
4075 {
4076 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4077 dataflow_set *set = dfpm->set;
4078 variable var = (variable)*slot;
4079 location_chain node;
4080
4081 if (!var->onepart || !var->n_var_parts)
4082 return 1;
4083
4084 gcc_assert (var->n_var_parts == 1);
4085
4086 if (dv_is_decl_p (var->dv))
4087 {
4088 bool check_dupes = false;
4089
4090 restart:
4091 for (node = var->var_part[0].loc_chain; node; node = node->next)
4092 {
4093 if (GET_CODE (node->loc) == VALUE)
4094 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4095 else if (GET_CODE (node->loc) == REG)
4096 {
4097 attrs att, *attp, *curp = NULL;
4098
4099 if (var->refcount != 1)
4100 {
4101 slot = unshare_variable (set, slot, var,
4102 VAR_INIT_STATUS_INITIALIZED);
4103 var = (variable)*slot;
4104 goto restart;
4105 }
4106
4107 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4108 attp = &att->next)
4109 if (att->offset == 0
4110 && GET_MODE (att->loc) == GET_MODE (node->loc))
4111 {
4112 if (dv_is_value_p (att->dv))
4113 {
4114 rtx cval = dv_as_value (att->dv);
4115 node->loc = cval;
4116 check_dupes = true;
4117 break;
4118 }
4119 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4120 curp = attp;
4121 }
4122
4123 if (!curp)
4124 {
4125 curp = attp;
4126 while (*curp)
4127 if ((*curp)->offset == 0
4128 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4129 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4130 break;
4131 else
4132 curp = &(*curp)->next;
4133 gcc_assert (*curp);
4134 }
4135
4136 if (!att)
4137 {
4138 decl_or_value cdv;
4139 rtx cval;
4140
4141 if (!*dfpm->permp)
4142 {
4143 *dfpm->permp = XNEW (dataflow_set);
4144 dataflow_set_init (*dfpm->permp);
4145 }
4146
4147 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4148 att; att = att->next)
4149 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4150 {
4151 gcc_assert (att->offset == 0
4152 && dv_is_value_p (att->dv));
4153 val_reset (set, att->dv);
4154 break;
4155 }
4156
4157 if (att)
4158 {
4159 cdv = att->dv;
4160 cval = dv_as_value (cdv);
4161 }
4162 else
4163 {
4164 /* Create a unique value to hold this register,
4165 that ought to be found and reused in
4166 subsequent rounds. */
4167 cselib_val *v;
4168 gcc_assert (!cselib_lookup (node->loc,
4169 GET_MODE (node->loc), 0,
4170 VOIDmode));
4171 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4172 VOIDmode);
4173 cselib_preserve_value (v);
4174 cselib_invalidate_rtx (node->loc);
4175 cval = v->val_rtx;
4176 cdv = dv_from_value (cval);
4177 if (dump_file)
4178 fprintf (dump_file,
4179 "Created new value %u:%u for reg %i\n",
4180 v->uid, v->hash, REGNO (node->loc));
4181 }
4182
4183 var_reg_decl_set (*dfpm->permp, node->loc,
4184 VAR_INIT_STATUS_INITIALIZED,
4185 cdv, 0, NULL, INSERT);
4186
4187 node->loc = cval;
4188 check_dupes = true;
4189 }
4190
4191 /* Remove attribute referring to the decl, which now
4192 uses the value for the register, already existing or
4193 to be added when we bring perm in. */
4194 att = *curp;
4195 *curp = att->next;
4196 pool_free (attrs_pool, att);
4197 }
4198 }
4199
4200 if (check_dupes)
4201 remove_duplicate_values (var);
4202 }
4203
4204 return 1;
4205 }
4206
4207 /* Reset values in the permanent set that are not associated with the
4208 chosen expression. */
4209
4210 static int
4211 variable_post_merge_perm_vals (void **pslot, void *info)
4212 {
4213 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4214 dataflow_set *set = dfpm->set;
4215 variable pvar = (variable)*pslot, var;
4216 location_chain pnode;
4217 decl_or_value dv;
4218 attrs att;
4219
4220 gcc_assert (dv_is_value_p (pvar->dv)
4221 && pvar->n_var_parts == 1);
4222 pnode = pvar->var_part[0].loc_chain;
4223 gcc_assert (pnode
4224 && !pnode->next
4225 && REG_P (pnode->loc));
4226
4227 dv = pvar->dv;
4228
4229 var = shared_hash_find (set->vars, dv);
4230 if (var)
4231 {
4232 /* Although variable_post_merge_new_vals may have made decls
4233 non-star-canonical, values that pre-existed in canonical form
4234 remain canonical, and newly-created values reference a single
4235 REG, so they are canonical as well. Since VAR has the
4236 location list for a VALUE, using find_loc_in_1pdv for it is
4237 fine, since VALUEs don't map back to DECLs. */
4238 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4239 return 1;
4240 val_reset (set, dv);
4241 }
4242
4243 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4244 if (att->offset == 0
4245 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4246 && dv_is_value_p (att->dv))
4247 break;
4248
4249 /* If there is a value associated with this register already, create
4250 an equivalence. */
4251 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4252 {
4253 rtx cval = dv_as_value (att->dv);
4254 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4255 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4256 NULL, INSERT);
4257 }
4258 else if (!att)
4259 {
4260 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4261 dv, 0, pnode->loc);
4262 variable_union (pvar, set);
4263 }
4264
4265 return 1;
4266 }
4267
4268 /* Just checking stuff and registering register attributes for
4269 now. */
4270
4271 static void
4272 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4273 {
4274 struct dfset_post_merge dfpm;
4275
4276 dfpm.set = set;
4277 dfpm.permp = permp;
4278
4279 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4280 &dfpm);
4281 if (*permp)
4282 htab_traverse (shared_hash_htab ((*permp)->vars),
4283 variable_post_merge_perm_vals, &dfpm);
4284 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4285 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4286 }
4287
4288 /* Return a node whose loc is a MEM that refers to EXPR in the
4289 location list of a one-part variable or value VAR, or in that of
4290 any values recursively mentioned in the location lists. */
4291
4292 static location_chain
4293 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4294 {
4295 location_chain node;
4296 decl_or_value dv;
4297 variable var;
4298 location_chain where = NULL;
4299
4300 if (!val)
4301 return NULL;
4302
4303 gcc_assert (GET_CODE (val) == VALUE
4304 && !VALUE_RECURSED_INTO (val));
4305
4306 dv = dv_from_value (val);
4307 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4308
4309 if (!var)
4310 return NULL;
4311
4312 gcc_assert (var->onepart);
4313
4314 if (!var->n_var_parts)
4315 return NULL;
4316
4317 VALUE_RECURSED_INTO (val) = true;
4318
4319 for (node = var->var_part[0].loc_chain; node; node = node->next)
4320 if (MEM_P (node->loc)
4321 && MEM_EXPR (node->loc) == expr
4322 && INT_MEM_OFFSET (node->loc) == 0)
4323 {
4324 where = node;
4325 break;
4326 }
4327 else if (GET_CODE (node->loc) == VALUE
4328 && !VALUE_RECURSED_INTO (node->loc)
4329 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4330 break;
4331
4332 VALUE_RECURSED_INTO (val) = false;
4333
4334 return where;
4335 }
4336
4337 /* Return TRUE if the value of MEM may vary across a call. */
4338
4339 static bool
4340 mem_dies_at_call (rtx mem)
4341 {
4342 tree expr = MEM_EXPR (mem);
4343 tree decl;
4344
4345 if (!expr)
4346 return true;
4347
4348 decl = get_base_address (expr);
4349
4350 if (!decl)
4351 return true;
4352
4353 if (!DECL_P (decl))
4354 return true;
4355
4356 return (may_be_aliased (decl)
4357 || (!TREE_READONLY (decl) && is_global_var (decl)));
4358 }
4359
4360 /* Remove all MEMs from the location list of a hash table entry for a
4361 one-part variable, except those whose MEM attributes map back to
4362 the variable itself, directly or within a VALUE. */
4363
4364 static int
4365 dataflow_set_preserve_mem_locs (void **slot, void *data)
4366 {
4367 dataflow_set *set = (dataflow_set *) data;
4368 variable var = (variable) *slot;
4369
4370 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4371 {
4372 tree decl = dv_as_decl (var->dv);
4373 location_chain loc, *locp;
4374 bool changed = false;
4375
4376 if (!var->n_var_parts)
4377 return 1;
4378
4379 gcc_assert (var->n_var_parts == 1);
4380
4381 if (shared_var_p (var, set->vars))
4382 {
4383 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4384 {
4385 /* We want to remove dying MEMs that doesn't refer to DECL. */
4386 if (GET_CODE (loc->loc) == MEM
4387 && (MEM_EXPR (loc->loc) != decl
4388 || INT_MEM_OFFSET (loc->loc) != 0)
4389 && !mem_dies_at_call (loc->loc))
4390 break;
4391 /* We want to move here MEMs that do refer to DECL. */
4392 else if (GET_CODE (loc->loc) == VALUE
4393 && find_mem_expr_in_1pdv (decl, loc->loc,
4394 shared_hash_htab (set->vars)))
4395 break;
4396 }
4397
4398 if (!loc)
4399 return 1;
4400
4401 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4402 var = (variable)*slot;
4403 gcc_assert (var->n_var_parts == 1);
4404 }
4405
4406 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4407 loc; loc = *locp)
4408 {
4409 rtx old_loc = loc->loc;
4410 if (GET_CODE (old_loc) == VALUE)
4411 {
4412 location_chain mem_node
4413 = find_mem_expr_in_1pdv (decl, loc->loc,
4414 shared_hash_htab (set->vars));
4415
4416 /* ??? This picks up only one out of multiple MEMs that
4417 refer to the same variable. Do we ever need to be
4418 concerned about dealing with more than one, or, given
4419 that they should all map to the same variable
4420 location, their addresses will have been merged and
4421 they will be regarded as equivalent? */
4422 if (mem_node)
4423 {
4424 loc->loc = mem_node->loc;
4425 loc->set_src = mem_node->set_src;
4426 loc->init = MIN (loc->init, mem_node->init);
4427 }
4428 }
4429
4430 if (GET_CODE (loc->loc) != MEM
4431 || (MEM_EXPR (loc->loc) == decl
4432 && INT_MEM_OFFSET (loc->loc) == 0)
4433 || !mem_dies_at_call (loc->loc))
4434 {
4435 if (old_loc != loc->loc && emit_notes)
4436 {
4437 if (old_loc == var->var_part[0].cur_loc)
4438 {
4439 changed = true;
4440 var->var_part[0].cur_loc = NULL;
4441 }
4442 }
4443 locp = &loc->next;
4444 continue;
4445 }
4446
4447 if (emit_notes)
4448 {
4449 if (old_loc == var->var_part[0].cur_loc)
4450 {
4451 changed = true;
4452 var->var_part[0].cur_loc = NULL;
4453 }
4454 }
4455 *locp = loc->next;
4456 pool_free (loc_chain_pool, loc);
4457 }
4458
4459 if (!var->var_part[0].loc_chain)
4460 {
4461 var->n_var_parts--;
4462 changed = true;
4463 }
4464 if (changed)
4465 variable_was_changed (var, set);
4466 }
4467
4468 return 1;
4469 }
4470
4471 /* Remove all MEMs from the location list of a hash table entry for a
4472 value. */
4473
4474 static int
4475 dataflow_set_remove_mem_locs (void **slot, void *data)
4476 {
4477 dataflow_set *set = (dataflow_set *) data;
4478 variable var = (variable) *slot;
4479
4480 if (var->onepart == ONEPART_VALUE)
4481 {
4482 location_chain loc, *locp;
4483 bool changed = false;
4484 rtx cur_loc;
4485
4486 gcc_assert (var->n_var_parts == 1);
4487
4488 if (shared_var_p (var, set->vars))
4489 {
4490 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4491 if (GET_CODE (loc->loc) == MEM
4492 && mem_dies_at_call (loc->loc))
4493 break;
4494
4495 if (!loc)
4496 return 1;
4497
4498 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4499 var = (variable)*slot;
4500 gcc_assert (var->n_var_parts == 1);
4501 }
4502
4503 if (VAR_LOC_1PAUX (var))
4504 cur_loc = VAR_LOC_FROM (var);
4505 else
4506 cur_loc = var->var_part[0].cur_loc;
4507
4508 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4509 loc; loc = *locp)
4510 {
4511 if (GET_CODE (loc->loc) != MEM
4512 || !mem_dies_at_call (loc->loc))
4513 {
4514 locp = &loc->next;
4515 continue;
4516 }
4517
4518 *locp = loc->next;
4519 /* If we have deleted the location which was last emitted
4520 we have to emit new location so add the variable to set
4521 of changed variables. */
4522 if (cur_loc == loc->loc)
4523 {
4524 changed = true;
4525 var->var_part[0].cur_loc = NULL;
4526 if (VAR_LOC_1PAUX (var))
4527 VAR_LOC_FROM (var) = NULL;
4528 }
4529 pool_free (loc_chain_pool, loc);
4530 }
4531
4532 if (!var->var_part[0].loc_chain)
4533 {
4534 var->n_var_parts--;
4535 changed = true;
4536 }
4537 if (changed)
4538 variable_was_changed (var, set);
4539 }
4540
4541 return 1;
4542 }
4543
4544 /* Remove all variable-location information about call-clobbered
4545 registers, as well as associations between MEMs and VALUEs. */
4546
4547 static void
4548 dataflow_set_clear_at_call (dataflow_set *set)
4549 {
4550 int r;
4551
4552 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4553 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4554 var_regno_delete (set, r);
4555
4556 if (MAY_HAVE_DEBUG_INSNS)
4557 {
4558 set->traversed_vars = set->vars;
4559 htab_traverse (shared_hash_htab (set->vars),
4560 dataflow_set_preserve_mem_locs, set);
4561 set->traversed_vars = set->vars;
4562 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4563 set);
4564 set->traversed_vars = NULL;
4565 }
4566 }
4567
4568 static bool
4569 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4570 {
4571 location_chain lc1, lc2;
4572
4573 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4574 {
4575 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4576 {
4577 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4578 {
4579 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4580 break;
4581 }
4582 if (rtx_equal_p (lc1->loc, lc2->loc))
4583 break;
4584 }
4585 if (!lc2)
4586 return true;
4587 }
4588 return false;
4589 }
4590
4591 /* Return true if one-part variables VAR1 and VAR2 are different.
4592 They must be in canonical order. */
4593
4594 static bool
4595 onepart_variable_different_p (variable var1, variable var2)
4596 {
4597 location_chain lc1, lc2;
4598
4599 if (var1 == var2)
4600 return false;
4601
4602 gcc_assert (var1->n_var_parts == 1
4603 && var2->n_var_parts == 1);
4604
4605 lc1 = var1->var_part[0].loc_chain;
4606 lc2 = var2->var_part[0].loc_chain;
4607
4608 gcc_assert (lc1 && lc2);
4609
4610 while (lc1 && lc2)
4611 {
4612 if (loc_cmp (lc1->loc, lc2->loc))
4613 return true;
4614 lc1 = lc1->next;
4615 lc2 = lc2->next;
4616 }
4617
4618 return lc1 != lc2;
4619 }
4620
4621 /* Return true if variables VAR1 and VAR2 are different. */
4622
4623 static bool
4624 variable_different_p (variable var1, variable var2)
4625 {
4626 int i;
4627
4628 if (var1 == var2)
4629 return false;
4630
4631 if (var1->onepart != var2->onepart)
4632 return true;
4633
4634 if (var1->n_var_parts != var2->n_var_parts)
4635 return true;
4636
4637 if (var1->onepart && var1->n_var_parts)
4638 {
4639 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4640 && var1->n_var_parts == 1);
4641 /* One-part values have locations in a canonical order. */
4642 return onepart_variable_different_p (var1, var2);
4643 }
4644
4645 for (i = 0; i < var1->n_var_parts; i++)
4646 {
4647 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4648 return true;
4649 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4650 return true;
4651 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4652 return true;
4653 }
4654 return false;
4655 }
4656
4657 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4658
4659 static bool
4660 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4661 {
4662 htab_iterator hi;
4663 variable var1;
4664
4665 if (old_set->vars == new_set->vars)
4666 return false;
4667
4668 if (htab_elements (shared_hash_htab (old_set->vars))
4669 != htab_elements (shared_hash_htab (new_set->vars)))
4670 return true;
4671
4672 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4673 {
4674 htab_t htab = shared_hash_htab (new_set->vars);
4675 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4676 dv_htab_hash (var1->dv));
4677 if (!var2)
4678 {
4679 if (dump_file && (dump_flags & TDF_DETAILS))
4680 {
4681 fprintf (dump_file, "dataflow difference found: removal of:\n");
4682 dump_var (var1);
4683 }
4684 return true;
4685 }
4686
4687 if (variable_different_p (var1, var2))
4688 {
4689 if (dump_file && (dump_flags & TDF_DETAILS))
4690 {
4691 fprintf (dump_file, "dataflow difference found: "
4692 "old and new follow:\n");
4693 dump_var (var1);
4694 dump_var (var2);
4695 }
4696 return true;
4697 }
4698 }
4699
4700 /* No need to traverse the second hashtab, if both have the same number
4701 of elements and the second one had all entries found in the first one,
4702 then it can't have any extra entries. */
4703 return false;
4704 }
4705
4706 /* Free the contents of dataflow set SET. */
4707
4708 static void
4709 dataflow_set_destroy (dataflow_set *set)
4710 {
4711 int i;
4712
4713 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4714 attrs_list_clear (&set->regs[i]);
4715
4716 shared_hash_destroy (set->vars);
4717 set->vars = NULL;
4718 }
4719
4720 /* Return true if RTL X contains a SYMBOL_REF. */
4721
4722 static bool
4723 contains_symbol_ref (rtx x)
4724 {
4725 const char *fmt;
4726 RTX_CODE code;
4727 int i;
4728
4729 if (!x)
4730 return false;
4731
4732 code = GET_CODE (x);
4733 if (code == SYMBOL_REF)
4734 return true;
4735
4736 fmt = GET_RTX_FORMAT (code);
4737 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4738 {
4739 if (fmt[i] == 'e')
4740 {
4741 if (contains_symbol_ref (XEXP (x, i)))
4742 return true;
4743 }
4744 else if (fmt[i] == 'E')
4745 {
4746 int j;
4747 for (j = 0; j < XVECLEN (x, i); j++)
4748 if (contains_symbol_ref (XVECEXP (x, i, j)))
4749 return true;
4750 }
4751 }
4752
4753 return false;
4754 }
4755
4756 /* Shall EXPR be tracked? */
4757
4758 static bool
4759 track_expr_p (tree expr, bool need_rtl)
4760 {
4761 rtx decl_rtl;
4762 tree realdecl;
4763
4764 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4765 return DECL_RTL_SET_P (expr);
4766
4767 /* If EXPR is not a parameter or a variable do not track it. */
4768 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4769 return 0;
4770
4771 /* It also must have a name... */
4772 if (!DECL_NAME (expr) && need_rtl)
4773 return 0;
4774
4775 /* ... and a RTL assigned to it. */
4776 decl_rtl = DECL_RTL_IF_SET (expr);
4777 if (!decl_rtl && need_rtl)
4778 return 0;
4779
4780 /* If this expression is really a debug alias of some other declaration, we
4781 don't need to track this expression if the ultimate declaration is
4782 ignored. */
4783 realdecl = expr;
4784 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4785 {
4786 realdecl = DECL_DEBUG_EXPR (realdecl);
4787 if (realdecl == NULL_TREE)
4788 realdecl = expr;
4789 else if (!DECL_P (realdecl))
4790 {
4791 if (handled_component_p (realdecl))
4792 {
4793 HOST_WIDE_INT bitsize, bitpos, maxsize;
4794 tree innerdecl
4795 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4796 &maxsize);
4797 if (!DECL_P (innerdecl)
4798 || DECL_IGNORED_P (innerdecl)
4799 || TREE_STATIC (innerdecl)
4800 || bitsize <= 0
4801 || bitpos + bitsize > 256
4802 || bitsize != maxsize)
4803 return 0;
4804 else
4805 realdecl = expr;
4806 }
4807 else
4808 return 0;
4809 }
4810 }
4811
4812 /* Do not track EXPR if REALDECL it should be ignored for debugging
4813 purposes. */
4814 if (DECL_IGNORED_P (realdecl))
4815 return 0;
4816
4817 /* Do not track global variables until we are able to emit correct location
4818 list for them. */
4819 if (TREE_STATIC (realdecl))
4820 return 0;
4821
4822 /* When the EXPR is a DECL for alias of some variable (see example)
4823 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4824 DECL_RTL contains SYMBOL_REF.
4825
4826 Example:
4827 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4828 char **_dl_argv;
4829 */
4830 if (decl_rtl && MEM_P (decl_rtl)
4831 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4832 return 0;
4833
4834 /* If RTX is a memory it should not be very large (because it would be
4835 an array or struct). */
4836 if (decl_rtl && MEM_P (decl_rtl))
4837 {
4838 /* Do not track structures and arrays. */
4839 if (GET_MODE (decl_rtl) == BLKmode
4840 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4841 return 0;
4842 if (MEM_SIZE_KNOWN_P (decl_rtl)
4843 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4844 return 0;
4845 }
4846
4847 DECL_CHANGED (expr) = 0;
4848 DECL_CHANGED (realdecl) = 0;
4849 return 1;
4850 }
4851
4852 /* Determine whether a given LOC refers to the same variable part as
4853 EXPR+OFFSET. */
4854
4855 static bool
4856 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4857 {
4858 tree expr2;
4859 HOST_WIDE_INT offset2;
4860
4861 if (! DECL_P (expr))
4862 return false;
4863
4864 if (REG_P (loc))
4865 {
4866 expr2 = REG_EXPR (loc);
4867 offset2 = REG_OFFSET (loc);
4868 }
4869 else if (MEM_P (loc))
4870 {
4871 expr2 = MEM_EXPR (loc);
4872 offset2 = INT_MEM_OFFSET (loc);
4873 }
4874 else
4875 return false;
4876
4877 if (! expr2 || ! DECL_P (expr2))
4878 return false;
4879
4880 expr = var_debug_decl (expr);
4881 expr2 = var_debug_decl (expr2);
4882
4883 return (expr == expr2 && offset == offset2);
4884 }
4885
4886 /* LOC is a REG or MEM that we would like to track if possible.
4887 If EXPR is null, we don't know what expression LOC refers to,
4888 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4889 LOC is an lvalue register.
4890
4891 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4892 is something we can track. When returning true, store the mode of
4893 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4894 from EXPR in *OFFSET_OUT (if nonnull). */
4895
4896 static bool
4897 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4898 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4899 {
4900 enum machine_mode mode;
4901
4902 if (expr == NULL || !track_expr_p (expr, true))
4903 return false;
4904
4905 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4906 whole subreg, but only the old inner part is really relevant. */
4907 mode = GET_MODE (loc);
4908 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4909 {
4910 enum machine_mode pseudo_mode;
4911
4912 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4913 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4914 {
4915 offset += byte_lowpart_offset (pseudo_mode, mode);
4916 mode = pseudo_mode;
4917 }
4918 }
4919
4920 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4921 Do the same if we are storing to a register and EXPR occupies
4922 the whole of register LOC; in that case, the whole of EXPR is
4923 being changed. We exclude complex modes from the second case
4924 because the real and imaginary parts are represented as separate
4925 pseudo registers, even if the whole complex value fits into one
4926 hard register. */
4927 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4928 || (store_reg_p
4929 && !COMPLEX_MODE_P (DECL_MODE (expr))
4930 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4931 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4932 {
4933 mode = DECL_MODE (expr);
4934 offset = 0;
4935 }
4936
4937 if (offset < 0 || offset >= MAX_VAR_PARTS)
4938 return false;
4939
4940 if (mode_out)
4941 *mode_out = mode;
4942 if (offset_out)
4943 *offset_out = offset;
4944 return true;
4945 }
4946
4947 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4948 want to track. When returning nonnull, make sure that the attributes
4949 on the returned value are updated. */
4950
4951 static rtx
4952 var_lowpart (enum machine_mode mode, rtx loc)
4953 {
4954 unsigned int offset, reg_offset, regno;
4955
4956 if (!REG_P (loc) && !MEM_P (loc))
4957 return NULL;
4958
4959 if (GET_MODE (loc) == mode)
4960 return loc;
4961
4962 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4963
4964 if (MEM_P (loc))
4965 return adjust_address_nv (loc, mode, offset);
4966
4967 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4968 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4969 reg_offset, mode);
4970 return gen_rtx_REG_offset (loc, mode, regno, offset);
4971 }
4972
4973 /* Carry information about uses and stores while walking rtx. */
4974
4975 struct count_use_info
4976 {
4977 /* The insn where the RTX is. */
4978 rtx insn;
4979
4980 /* The basic block where insn is. */
4981 basic_block bb;
4982
4983 /* The array of n_sets sets in the insn, as determined by cselib. */
4984 struct cselib_set *sets;
4985 int n_sets;
4986
4987 /* True if we're counting stores, false otherwise. */
4988 bool store_p;
4989 };
4990
4991 /* Find a VALUE corresponding to X. */
4992
4993 static inline cselib_val *
4994 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4995 {
4996 int i;
4997
4998 if (cui->sets)
4999 {
5000 /* This is called after uses are set up and before stores are
5001 processed by cselib, so it's safe to look up srcs, but not
5002 dsts. So we look up expressions that appear in srcs or in
5003 dest expressions, but we search the sets array for dests of
5004 stores. */
5005 if (cui->store_p)
5006 {
5007 /* Some targets represent memset and memcpy patterns
5008 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5009 (set (mem:BLK ...) (const_int ...)) or
5010 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5011 in that case, otherwise we end up with mode mismatches. */
5012 if (mode == BLKmode && MEM_P (x))
5013 return NULL;
5014 for (i = 0; i < cui->n_sets; i++)
5015 if (cui->sets[i].dest == x)
5016 return cui->sets[i].src_elt;
5017 }
5018 else
5019 return cselib_lookup (x, mode, 0, VOIDmode);
5020 }
5021
5022 return NULL;
5023 }
5024
5025 /* Replace all registers and addresses in an expression with VALUE
5026 expressions that map back to them, unless the expression is a
5027 register. If no mapping is or can be performed, returns NULL. */
5028
5029 static rtx
5030 replace_expr_with_values (rtx loc)
5031 {
5032 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5033 return NULL;
5034 else if (MEM_P (loc))
5035 {
5036 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5037 get_address_mode (loc), 0,
5038 GET_MODE (loc));
5039 if (addr)
5040 return replace_equiv_address_nv (loc, addr->val_rtx);
5041 else
5042 return NULL;
5043 }
5044 else
5045 return cselib_subst_to_values (loc, VOIDmode);
5046 }
5047
5048 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5049 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5050 RTX. */
5051
5052 static int
5053 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5054 {
5055 rtx loc = *x;
5056
5057 return GET_CODE (loc) == DEBUG_EXPR;
5058 }
5059
5060 /* Determine what kind of micro operation to choose for a USE. Return
5061 MO_CLOBBER if no micro operation is to be generated. */
5062
5063 static enum micro_operation_type
5064 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5065 {
5066 tree expr;
5067
5068 if (cui && cui->sets)
5069 {
5070 if (GET_CODE (loc) == VAR_LOCATION)
5071 {
5072 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5073 {
5074 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5075 if (! VAR_LOC_UNKNOWN_P (ploc))
5076 {
5077 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5078 VOIDmode);
5079
5080 /* ??? flag_float_store and volatile mems are never
5081 given values, but we could in theory use them for
5082 locations. */
5083 gcc_assert (val || 1);
5084 }
5085 return MO_VAL_LOC;
5086 }
5087 else
5088 return MO_CLOBBER;
5089 }
5090
5091 if (REG_P (loc) || MEM_P (loc))
5092 {
5093 if (modep)
5094 *modep = GET_MODE (loc);
5095 if (cui->store_p)
5096 {
5097 if (REG_P (loc)
5098 || (find_use_val (loc, GET_MODE (loc), cui)
5099 && cselib_lookup (XEXP (loc, 0),
5100 get_address_mode (loc), 0,
5101 GET_MODE (loc))))
5102 return MO_VAL_SET;
5103 }
5104 else
5105 {
5106 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5107
5108 if (val && !cselib_preserved_value_p (val))
5109 return MO_VAL_USE;
5110 }
5111 }
5112 }
5113
5114 if (REG_P (loc))
5115 {
5116 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5117
5118 if (loc == cfa_base_rtx)
5119 return MO_CLOBBER;
5120 expr = REG_EXPR (loc);
5121
5122 if (!expr)
5123 return MO_USE_NO_VAR;
5124 else if (target_for_debug_bind (var_debug_decl (expr)))
5125 return MO_CLOBBER;
5126 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5127 false, modep, NULL))
5128 return MO_USE;
5129 else
5130 return MO_USE_NO_VAR;
5131 }
5132 else if (MEM_P (loc))
5133 {
5134 expr = MEM_EXPR (loc);
5135
5136 if (!expr)
5137 return MO_CLOBBER;
5138 else if (target_for_debug_bind (var_debug_decl (expr)))
5139 return MO_CLOBBER;
5140 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5141 false, modep, NULL)
5142 /* Multi-part variables shouldn't refer to one-part
5143 variable names such as VALUEs (never happens) or
5144 DEBUG_EXPRs (only happens in the presence of debug
5145 insns). */
5146 && (!MAY_HAVE_DEBUG_INSNS
5147 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5148 return MO_USE;
5149 else
5150 return MO_CLOBBER;
5151 }
5152
5153 return MO_CLOBBER;
5154 }
5155
5156 /* Log to OUT information about micro-operation MOPT involving X in
5157 INSN of BB. */
5158
5159 static inline void
5160 log_op_type (rtx x, basic_block bb, rtx insn,
5161 enum micro_operation_type mopt, FILE *out)
5162 {
5163 fprintf (out, "bb %i op %i insn %i %s ",
5164 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
5165 INSN_UID (insn), micro_operation_type_name[mopt]);
5166 print_inline_rtx (out, x, 2);
5167 fputc ('\n', out);
5168 }
5169
5170 /* Tell whether the CONCAT used to holds a VALUE and its location
5171 needs value resolution, i.e., an attempt of mapping the location
5172 back to other incoming values. */
5173 #define VAL_NEEDS_RESOLUTION(x) \
5174 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5175 /* Whether the location in the CONCAT is a tracked expression, that
5176 should also be handled like a MO_USE. */
5177 #define VAL_HOLDS_TRACK_EXPR(x) \
5178 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5179 /* Whether the location in the CONCAT should be handled like a MO_COPY
5180 as well. */
5181 #define VAL_EXPR_IS_COPIED(x) \
5182 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5183 /* Whether the location in the CONCAT should be handled like a
5184 MO_CLOBBER as well. */
5185 #define VAL_EXPR_IS_CLOBBERED(x) \
5186 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5187
5188 /* All preserved VALUEs. */
5189 static VEC (rtx, heap) *preserved_values;
5190
5191 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5192
5193 static void
5194 preserve_value (cselib_val *val)
5195 {
5196 cselib_preserve_value (val);
5197 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5198 }
5199
5200 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5201 any rtxes not suitable for CONST use not replaced by VALUEs
5202 are discovered. */
5203
5204 static int
5205 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5206 {
5207 if (*x == NULL_RTX)
5208 return 0;
5209
5210 switch (GET_CODE (*x))
5211 {
5212 case REG:
5213 case DEBUG_EXPR:
5214 case PC:
5215 case SCRATCH:
5216 case CC0:
5217 case ASM_INPUT:
5218 case ASM_OPERANDS:
5219 return 1;
5220 case MEM:
5221 return !MEM_READONLY_P (*x);
5222 default:
5223 return 0;
5224 }
5225 }
5226
5227 /* Add uses (register and memory references) LOC which will be tracked
5228 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5229
5230 static int
5231 add_uses (rtx *ploc, void *data)
5232 {
5233 rtx loc = *ploc;
5234 enum machine_mode mode = VOIDmode;
5235 struct count_use_info *cui = (struct count_use_info *)data;
5236 enum micro_operation_type type = use_type (loc, cui, &mode);
5237
5238 if (type != MO_CLOBBER)
5239 {
5240 basic_block bb = cui->bb;
5241 micro_operation mo;
5242
5243 mo.type = type;
5244 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5245 mo.insn = cui->insn;
5246
5247 if (type == MO_VAL_LOC)
5248 {
5249 rtx oloc = loc;
5250 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5251 cselib_val *val;
5252
5253 gcc_assert (cui->sets);
5254
5255 if (MEM_P (vloc)
5256 && !REG_P (XEXP (vloc, 0))
5257 && !MEM_P (XEXP (vloc, 0)))
5258 {
5259 rtx mloc = vloc;
5260 enum machine_mode address_mode = get_address_mode (mloc);
5261 cselib_val *val
5262 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5263 GET_MODE (mloc));
5264
5265 if (val && !cselib_preserved_value_p (val))
5266 preserve_value (val);
5267 }
5268
5269 if (CONSTANT_P (vloc)
5270 && (GET_CODE (vloc) != CONST
5271 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5272 /* For constants don't look up any value. */;
5273 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5274 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5275 {
5276 enum machine_mode mode2;
5277 enum micro_operation_type type2;
5278 rtx nloc = NULL;
5279 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5280
5281 if (resolvable)
5282 nloc = replace_expr_with_values (vloc);
5283
5284 if (nloc)
5285 {
5286 oloc = shallow_copy_rtx (oloc);
5287 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5288 }
5289
5290 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5291
5292 type2 = use_type (vloc, 0, &mode2);
5293
5294 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5295 || type2 == MO_CLOBBER);
5296
5297 if (type2 == MO_CLOBBER
5298 && !cselib_preserved_value_p (val))
5299 {
5300 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5301 preserve_value (val);
5302 }
5303 }
5304 else if (!VAR_LOC_UNKNOWN_P (vloc))
5305 {
5306 oloc = shallow_copy_rtx (oloc);
5307 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5308 }
5309
5310 mo.u.loc = oloc;
5311 }
5312 else if (type == MO_VAL_USE)
5313 {
5314 enum machine_mode mode2 = VOIDmode;
5315 enum micro_operation_type type2;
5316 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5317 rtx vloc, oloc = loc, nloc;
5318
5319 gcc_assert (cui->sets);
5320
5321 if (MEM_P (oloc)
5322 && !REG_P (XEXP (oloc, 0))
5323 && !MEM_P (XEXP (oloc, 0)))
5324 {
5325 rtx mloc = oloc;
5326 enum machine_mode address_mode = get_address_mode (mloc);
5327 cselib_val *val
5328 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5329 GET_MODE (mloc));
5330
5331 if (val && !cselib_preserved_value_p (val))
5332 preserve_value (val);
5333 }
5334
5335 type2 = use_type (loc, 0, &mode2);
5336
5337 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5338 || type2 == MO_CLOBBER);
5339
5340 if (type2 == MO_USE)
5341 vloc = var_lowpart (mode2, loc);
5342 else
5343 vloc = oloc;
5344
5345 /* The loc of a MO_VAL_USE may have two forms:
5346
5347 (concat val src): val is at src, a value-based
5348 representation.
5349
5350 (concat (concat val use) src): same as above, with use as
5351 the MO_USE tracked value, if it differs from src.
5352
5353 */
5354
5355 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5356 nloc = replace_expr_with_values (loc);
5357 if (!nloc)
5358 nloc = oloc;
5359
5360 if (vloc != nloc)
5361 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5362 else
5363 oloc = val->val_rtx;
5364
5365 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5366
5367 if (type2 == MO_USE)
5368 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5369 if (!cselib_preserved_value_p (val))
5370 {
5371 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5372 preserve_value (val);
5373 }
5374 }
5375 else
5376 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5377
5378 if (dump_file && (dump_flags & TDF_DETAILS))
5379 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5380 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5381 }
5382
5383 return 0;
5384 }
5385
5386 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5387
5388 static void
5389 add_uses_1 (rtx *x, void *cui)
5390 {
5391 for_each_rtx (x, add_uses, cui);
5392 }
5393
5394 /* This is the value used during expansion of locations. We want it
5395 to be unbounded, so that variables expanded deep in a recursion
5396 nest are fully evaluated, so that their values are cached
5397 correctly. We avoid recursion cycles through other means, and we
5398 don't unshare RTL, so excess complexity is not a problem. */
5399 #define EXPR_DEPTH (INT_MAX)
5400 /* We use this to keep too-complex expressions from being emitted as
5401 location notes, and then to debug information. Users can trade
5402 compile time for ridiculously complex expressions, although they're
5403 seldom useful, and they may often have to be discarded as not
5404 representable anyway. */
5405 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5406
5407 /* Attempt to reverse the EXPR operation in the debug info and record
5408 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5409 no longer live we can express its value as VAL - 6. */
5410
5411 static void
5412 reverse_op (rtx val, const_rtx expr, rtx insn)
5413 {
5414 rtx src, arg, ret;
5415 cselib_val *v;
5416 struct elt_loc_list *l;
5417 enum rtx_code code;
5418
5419 if (GET_CODE (expr) != SET)
5420 return;
5421
5422 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5423 return;
5424
5425 src = SET_SRC (expr);
5426 switch (GET_CODE (src))
5427 {
5428 case PLUS:
5429 case MINUS:
5430 case XOR:
5431 case NOT:
5432 case NEG:
5433 if (!REG_P (XEXP (src, 0)))
5434 return;
5435 break;
5436 case SIGN_EXTEND:
5437 case ZERO_EXTEND:
5438 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5439 return;
5440 break;
5441 default:
5442 return;
5443 }
5444
5445 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5446 return;
5447
5448 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5449 if (!v || !cselib_preserved_value_p (v))
5450 return;
5451
5452 /* Use canonical V to avoid creating multiple redundant expressions
5453 for different VALUES equivalent to V. */
5454 v = canonical_cselib_val (v);
5455
5456 /* Adding a reverse op isn't useful if V already has an always valid
5457 location. Ignore ENTRY_VALUE, while it is always constant, we should
5458 prefer non-ENTRY_VALUE locations whenever possible. */
5459 for (l = v->locs; l; l = l->next)
5460 if (CONSTANT_P (l->loc)
5461 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5462 return;
5463
5464 switch (GET_CODE (src))
5465 {
5466 case NOT:
5467 case NEG:
5468 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5469 return;
5470 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5471 break;
5472 case SIGN_EXTEND:
5473 case ZERO_EXTEND:
5474 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5475 break;
5476 case XOR:
5477 code = XOR;
5478 goto binary;
5479 case PLUS:
5480 code = MINUS;
5481 goto binary;
5482 case MINUS:
5483 code = PLUS;
5484 goto binary;
5485 binary:
5486 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5487 return;
5488 arg = XEXP (src, 1);
5489 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5490 {
5491 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5492 if (arg == NULL_RTX)
5493 return;
5494 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5495 return;
5496 }
5497 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5498 if (ret == val)
5499 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5500 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5501 breaks a lot of routines during var-tracking. */
5502 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5503 break;
5504 default:
5505 gcc_unreachable ();
5506 }
5507
5508 cselib_add_permanent_equiv (v, ret, insn);
5509 }
5510
5511 /* Add stores (register and memory references) LOC which will be tracked
5512 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5513 CUIP->insn is instruction which the LOC is part of. */
5514
5515 static void
5516 add_stores (rtx loc, const_rtx expr, void *cuip)
5517 {
5518 enum machine_mode mode = VOIDmode, mode2;
5519 struct count_use_info *cui = (struct count_use_info *)cuip;
5520 basic_block bb = cui->bb;
5521 micro_operation mo;
5522 rtx oloc = loc, nloc, src = NULL;
5523 enum micro_operation_type type = use_type (loc, cui, &mode);
5524 bool track_p = false;
5525 cselib_val *v;
5526 bool resolve, preserve;
5527
5528 if (type == MO_CLOBBER)
5529 return;
5530
5531 mode2 = mode;
5532
5533 if (REG_P (loc))
5534 {
5535 gcc_assert (loc != cfa_base_rtx);
5536 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5537 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5538 || GET_CODE (expr) == CLOBBER)
5539 {
5540 mo.type = MO_CLOBBER;
5541 mo.u.loc = loc;
5542 if (GET_CODE (expr) == SET
5543 && SET_DEST (expr) == loc
5544 && !unsuitable_loc (SET_SRC (expr))
5545 && find_use_val (loc, mode, cui))
5546 {
5547 gcc_checking_assert (type == MO_VAL_SET);
5548 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5549 }
5550 }
5551 else
5552 {
5553 if (GET_CODE (expr) == SET
5554 && SET_DEST (expr) == loc
5555 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5556 src = var_lowpart (mode2, SET_SRC (expr));
5557 loc = var_lowpart (mode2, loc);
5558
5559 if (src == NULL)
5560 {
5561 mo.type = MO_SET;
5562 mo.u.loc = loc;
5563 }
5564 else
5565 {
5566 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5567 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5568 mo.type = MO_COPY;
5569 else
5570 mo.type = MO_SET;
5571 mo.u.loc = xexpr;
5572 }
5573 }
5574 mo.insn = cui->insn;
5575 }
5576 else if (MEM_P (loc)
5577 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5578 || cui->sets))
5579 {
5580 if (MEM_P (loc) && type == MO_VAL_SET
5581 && !REG_P (XEXP (loc, 0))
5582 && !MEM_P (XEXP (loc, 0)))
5583 {
5584 rtx mloc = loc;
5585 enum machine_mode address_mode = get_address_mode (mloc);
5586 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5587 address_mode, 0,
5588 GET_MODE (mloc));
5589
5590 if (val && !cselib_preserved_value_p (val))
5591 preserve_value (val);
5592 }
5593
5594 if (GET_CODE (expr) == CLOBBER || !track_p)
5595 {
5596 mo.type = MO_CLOBBER;
5597 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5598 }
5599 else
5600 {
5601 if (GET_CODE (expr) == SET
5602 && SET_DEST (expr) == loc
5603 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5604 src = var_lowpart (mode2, SET_SRC (expr));
5605 loc = var_lowpart (mode2, loc);
5606
5607 if (src == NULL)
5608 {
5609 mo.type = MO_SET;
5610 mo.u.loc = loc;
5611 }
5612 else
5613 {
5614 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5615 if (same_variable_part_p (SET_SRC (xexpr),
5616 MEM_EXPR (loc),
5617 INT_MEM_OFFSET (loc)))
5618 mo.type = MO_COPY;
5619 else
5620 mo.type = MO_SET;
5621 mo.u.loc = xexpr;
5622 }
5623 }
5624 mo.insn = cui->insn;
5625 }
5626 else
5627 return;
5628
5629 if (type != MO_VAL_SET)
5630 goto log_and_return;
5631
5632 v = find_use_val (oloc, mode, cui);
5633
5634 if (!v)
5635 goto log_and_return;
5636
5637 resolve = preserve = !cselib_preserved_value_p (v);
5638
5639 nloc = replace_expr_with_values (oloc);
5640 if (nloc)
5641 oloc = nloc;
5642
5643 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5644 {
5645 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5646
5647 gcc_assert (oval != v);
5648 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5649
5650 if (oval && !cselib_preserved_value_p (oval))
5651 {
5652 micro_operation moa;
5653
5654 preserve_value (oval);
5655
5656 moa.type = MO_VAL_USE;
5657 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5658 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5659 moa.insn = cui->insn;
5660
5661 if (dump_file && (dump_flags & TDF_DETAILS))
5662 log_op_type (moa.u.loc, cui->bb, cui->insn,
5663 moa.type, dump_file);
5664 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5665 }
5666
5667 resolve = false;
5668 }
5669 else if (resolve && GET_CODE (mo.u.loc) == SET)
5670 {
5671 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5672 nloc = replace_expr_with_values (SET_SRC (expr));
5673 else
5674 nloc = NULL_RTX;
5675
5676 /* Avoid the mode mismatch between oexpr and expr. */
5677 if (!nloc && mode != mode2)
5678 {
5679 nloc = SET_SRC (expr);
5680 gcc_assert (oloc == SET_DEST (expr));
5681 }
5682
5683 if (nloc && nloc != SET_SRC (mo.u.loc))
5684 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5685 else
5686 {
5687 if (oloc == SET_DEST (mo.u.loc))
5688 /* No point in duplicating. */
5689 oloc = mo.u.loc;
5690 if (!REG_P (SET_SRC (mo.u.loc)))
5691 resolve = false;
5692 }
5693 }
5694 else if (!resolve)
5695 {
5696 if (GET_CODE (mo.u.loc) == SET
5697 && oloc == SET_DEST (mo.u.loc))
5698 /* No point in duplicating. */
5699 oloc = mo.u.loc;
5700 }
5701 else
5702 resolve = false;
5703
5704 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5705
5706 if (mo.u.loc != oloc)
5707 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5708
5709 /* The loc of a MO_VAL_SET may have various forms:
5710
5711 (concat val dst): dst now holds val
5712
5713 (concat val (set dst src)): dst now holds val, copied from src
5714
5715 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5716 after replacing mems and non-top-level regs with values.
5717
5718 (concat (concat val dstv) (set dst src)): dst now holds val,
5719 copied from src. dstv is a value-based representation of dst, if
5720 it differs from dst. If resolution is needed, src is a REG, and
5721 its mode is the same as that of val.
5722
5723 (concat (concat val (set dstv srcv)) (set dst src)): src
5724 copied to dst, holding val. dstv and srcv are value-based
5725 representations of dst and src, respectively.
5726
5727 */
5728
5729 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5730 reverse_op (v->val_rtx, expr, cui->insn);
5731
5732 mo.u.loc = loc;
5733
5734 if (track_p)
5735 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5736 if (preserve)
5737 {
5738 VAL_NEEDS_RESOLUTION (loc) = resolve;
5739 preserve_value (v);
5740 }
5741 if (mo.type == MO_CLOBBER)
5742 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5743 if (mo.type == MO_COPY)
5744 VAL_EXPR_IS_COPIED (loc) = 1;
5745
5746 mo.type = MO_VAL_SET;
5747
5748 log_and_return:
5749 if (dump_file && (dump_flags & TDF_DETAILS))
5750 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5751 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5752 }
5753
5754 /* Arguments to the call. */
5755 static rtx call_arguments;
5756
5757 /* Compute call_arguments. */
5758
5759 static void
5760 prepare_call_arguments (basic_block bb, rtx insn)
5761 {
5762 rtx link, x;
5763 rtx prev, cur, next;
5764 rtx call = PATTERN (insn);
5765 rtx this_arg = NULL_RTX;
5766 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5767 tree obj_type_ref = NULL_TREE;
5768 CUMULATIVE_ARGS args_so_far_v;
5769 cumulative_args_t args_so_far;
5770
5771 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5772 args_so_far = pack_cumulative_args (&args_so_far_v);
5773 if (GET_CODE (call) == PARALLEL)
5774 call = XVECEXP (call, 0, 0);
5775 if (GET_CODE (call) == SET)
5776 call = SET_SRC (call);
5777 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
5778 {
5779 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5780 {
5781 rtx symbol = XEXP (XEXP (call, 0), 0);
5782 if (SYMBOL_REF_DECL (symbol))
5783 fndecl = SYMBOL_REF_DECL (symbol);
5784 }
5785 if (fndecl == NULL_TREE)
5786 fndecl = MEM_EXPR (XEXP (call, 0));
5787 if (fndecl
5788 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5789 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5790 fndecl = NULL_TREE;
5791 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5792 type = TREE_TYPE (fndecl);
5793 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5794 {
5795 if (TREE_CODE (fndecl) == INDIRECT_REF
5796 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5797 obj_type_ref = TREE_OPERAND (fndecl, 0);
5798 fndecl = NULL_TREE;
5799 }
5800 if (type)
5801 {
5802 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5803 t = TREE_CHAIN (t))
5804 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5805 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5806 break;
5807 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5808 type = NULL;
5809 else
5810 {
5811 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5812 link = CALL_INSN_FUNCTION_USAGE (insn);
5813 #ifndef PCC_STATIC_STRUCT_RETURN
5814 if (aggregate_value_p (TREE_TYPE (type), type)
5815 && targetm.calls.struct_value_rtx (type, 0) == 0)
5816 {
5817 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5818 enum machine_mode mode = TYPE_MODE (struct_addr);
5819 rtx reg;
5820 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5821 nargs + 1);
5822 reg = targetm.calls.function_arg (args_so_far, mode,
5823 struct_addr, true);
5824 targetm.calls.function_arg_advance (args_so_far, mode,
5825 struct_addr, true);
5826 if (reg == NULL_RTX)
5827 {
5828 for (; link; link = XEXP (link, 1))
5829 if (GET_CODE (XEXP (link, 0)) == USE
5830 && MEM_P (XEXP (XEXP (link, 0), 0)))
5831 {
5832 link = XEXP (link, 1);
5833 break;
5834 }
5835 }
5836 }
5837 else
5838 #endif
5839 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5840 nargs);
5841 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5842 {
5843 enum machine_mode mode;
5844 t = TYPE_ARG_TYPES (type);
5845 mode = TYPE_MODE (TREE_VALUE (t));
5846 this_arg = targetm.calls.function_arg (args_so_far, mode,
5847 TREE_VALUE (t), true);
5848 if (this_arg && !REG_P (this_arg))
5849 this_arg = NULL_RTX;
5850 else if (this_arg == NULL_RTX)
5851 {
5852 for (; link; link = XEXP (link, 1))
5853 if (GET_CODE (XEXP (link, 0)) == USE
5854 && MEM_P (XEXP (XEXP (link, 0), 0)))
5855 {
5856 this_arg = XEXP (XEXP (link, 0), 0);
5857 break;
5858 }
5859 }
5860 }
5861 }
5862 }
5863 }
5864 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5865
5866 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
5867 if (GET_CODE (XEXP (link, 0)) == USE)
5868 {
5869 rtx item = NULL_RTX;
5870 x = XEXP (XEXP (link, 0), 0);
5871 if (GET_MODE (link) == VOIDmode
5872 || GET_MODE (link) == BLKmode
5873 || (GET_MODE (link) != GET_MODE (x)
5874 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
5875 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
5876 /* Can't do anything for these, if the original type mode
5877 isn't known or can't be converted. */;
5878 else if (REG_P (x))
5879 {
5880 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5881 if (val && cselib_preserved_value_p (val))
5882 item = val->val_rtx;
5883 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
5884 {
5885 enum machine_mode mode = GET_MODE (x);
5886
5887 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
5888 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
5889 {
5890 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
5891
5892 if (reg == NULL_RTX || !REG_P (reg))
5893 continue;
5894 val = cselib_lookup (reg, mode, 0, VOIDmode);
5895 if (val && cselib_preserved_value_p (val))
5896 {
5897 item = val->val_rtx;
5898 break;
5899 }
5900 }
5901 }
5902 }
5903 else if (MEM_P (x))
5904 {
5905 rtx mem = x;
5906 cselib_val *val;
5907
5908 if (!frame_pointer_needed)
5909 {
5910 struct adjust_mem_data amd;
5911 amd.mem_mode = VOIDmode;
5912 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
5913 amd.side_effects = NULL_RTX;
5914 amd.store = true;
5915 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
5916 &amd);
5917 gcc_assert (amd.side_effects == NULL_RTX);
5918 }
5919 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
5920 if (val && cselib_preserved_value_p (val))
5921 item = val->val_rtx;
5922 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
5923 {
5924 /* For non-integer stack argument see also if they weren't
5925 initialized by integers. */
5926 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
5927 if (imode != GET_MODE (mem) && imode != BLKmode)
5928 {
5929 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
5930 imode, 0, VOIDmode);
5931 if (val && cselib_preserved_value_p (val))
5932 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
5933 imode);
5934 }
5935 }
5936 }
5937 if (item)
5938 {
5939 rtx x2 = x;
5940 if (GET_MODE (item) != GET_MODE (link))
5941 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
5942 if (GET_MODE (x2) != GET_MODE (link))
5943 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
5944 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
5945 call_arguments
5946 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
5947 }
5948 if (t && t != void_list_node)
5949 {
5950 tree argtype = TREE_VALUE (t);
5951 enum machine_mode mode = TYPE_MODE (argtype);
5952 rtx reg;
5953 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
5954 {
5955 argtype = build_pointer_type (argtype);
5956 mode = TYPE_MODE (argtype);
5957 }
5958 reg = targetm.calls.function_arg (args_so_far, mode,
5959 argtype, true);
5960 if (TREE_CODE (argtype) == REFERENCE_TYPE
5961 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
5962 && reg
5963 && REG_P (reg)
5964 && GET_MODE (reg) == mode
5965 && GET_MODE_CLASS (mode) == MODE_INT
5966 && REG_P (x)
5967 && REGNO (x) == REGNO (reg)
5968 && GET_MODE (x) == mode
5969 && item)
5970 {
5971 enum machine_mode indmode
5972 = TYPE_MODE (TREE_TYPE (argtype));
5973 rtx mem = gen_rtx_MEM (indmode, x);
5974 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
5975 if (val && cselib_preserved_value_p (val))
5976 {
5977 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
5978 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5979 call_arguments);
5980 }
5981 else
5982 {
5983 struct elt_loc_list *l;
5984 tree initial;
5985
5986 /* Try harder, when passing address of a constant
5987 pool integer it can be easily read back. */
5988 item = XEXP (item, 1);
5989 if (GET_CODE (item) == SUBREG)
5990 item = SUBREG_REG (item);
5991 gcc_assert (GET_CODE (item) == VALUE);
5992 val = CSELIB_VAL_PTR (item);
5993 for (l = val->locs; l; l = l->next)
5994 if (GET_CODE (l->loc) == SYMBOL_REF
5995 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
5996 && SYMBOL_REF_DECL (l->loc)
5997 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
5998 {
5999 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6000 if (host_integerp (initial, 0))
6001 {
6002 item = GEN_INT (tree_low_cst (initial, 0));
6003 item = gen_rtx_CONCAT (indmode, mem, item);
6004 call_arguments
6005 = gen_rtx_EXPR_LIST (VOIDmode, item,
6006 call_arguments);
6007 }
6008 break;
6009 }
6010 }
6011 }
6012 targetm.calls.function_arg_advance (args_so_far, mode,
6013 argtype, true);
6014 t = TREE_CHAIN (t);
6015 }
6016 }
6017
6018 /* Add debug arguments. */
6019 if (fndecl
6020 && TREE_CODE (fndecl) == FUNCTION_DECL
6021 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6022 {
6023 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
6024 if (debug_args)
6025 {
6026 unsigned int ix;
6027 tree param;
6028 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
6029 {
6030 rtx item;
6031 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
6032 enum machine_mode mode = DECL_MODE (dtemp);
6033 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6034 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6035 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6036 call_arguments);
6037 }
6038 }
6039 }
6040
6041 /* Reverse call_arguments chain. */
6042 prev = NULL_RTX;
6043 for (cur = call_arguments; cur; cur = next)
6044 {
6045 next = XEXP (cur, 1);
6046 XEXP (cur, 1) = prev;
6047 prev = cur;
6048 }
6049 call_arguments = prev;
6050
6051 x = PATTERN (insn);
6052 if (GET_CODE (x) == PARALLEL)
6053 x = XVECEXP (x, 0, 0);
6054 if (GET_CODE (x) == SET)
6055 x = SET_SRC (x);
6056 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
6057 {
6058 x = XEXP (XEXP (x, 0), 0);
6059 if (GET_CODE (x) == SYMBOL_REF)
6060 /* Don't record anything. */;
6061 else if (CONSTANT_P (x))
6062 {
6063 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6064 pc_rtx, x);
6065 call_arguments
6066 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6067 }
6068 else
6069 {
6070 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6071 if (val && cselib_preserved_value_p (val))
6072 {
6073 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6074 call_arguments
6075 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6076 }
6077 }
6078 }
6079 if (this_arg)
6080 {
6081 enum machine_mode mode
6082 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6083 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6084 HOST_WIDE_INT token
6085 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
6086 if (token)
6087 clobbered = plus_constant (mode, clobbered,
6088 token * GET_MODE_SIZE (mode));
6089 clobbered = gen_rtx_MEM (mode, clobbered);
6090 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6091 call_arguments
6092 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6093 }
6094 }
6095
6096 /* Callback for cselib_record_sets_hook, that records as micro
6097 operations uses and stores in an insn after cselib_record_sets has
6098 analyzed the sets in an insn, but before it modifies the stored
6099 values in the internal tables, unless cselib_record_sets doesn't
6100 call it directly (perhaps because we're not doing cselib in the
6101 first place, in which case sets and n_sets will be 0). */
6102
6103 static void
6104 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6105 {
6106 basic_block bb = BLOCK_FOR_INSN (insn);
6107 int n1, n2;
6108 struct count_use_info cui;
6109 micro_operation *mos;
6110
6111 cselib_hook_called = true;
6112
6113 cui.insn = insn;
6114 cui.bb = bb;
6115 cui.sets = sets;
6116 cui.n_sets = n_sets;
6117
6118 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6119 cui.store_p = false;
6120 note_uses (&PATTERN (insn), add_uses_1, &cui);
6121 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6122 mos = VEC_address (micro_operation, VTI (bb)->mos);
6123
6124 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6125 MO_VAL_LOC last. */
6126 while (n1 < n2)
6127 {
6128 while (n1 < n2 && mos[n1].type == MO_USE)
6129 n1++;
6130 while (n1 < n2 && mos[n2].type != MO_USE)
6131 n2--;
6132 if (n1 < n2)
6133 {
6134 micro_operation sw;
6135
6136 sw = mos[n1];
6137 mos[n1] = mos[n2];
6138 mos[n2] = sw;
6139 }
6140 }
6141
6142 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6143 while (n1 < n2)
6144 {
6145 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6146 n1++;
6147 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6148 n2--;
6149 if (n1 < n2)
6150 {
6151 micro_operation sw;
6152
6153 sw = mos[n1];
6154 mos[n1] = mos[n2];
6155 mos[n2] = sw;
6156 }
6157 }
6158
6159 if (CALL_P (insn))
6160 {
6161 micro_operation mo;
6162
6163 mo.type = MO_CALL;
6164 mo.insn = insn;
6165 mo.u.loc = call_arguments;
6166 call_arguments = NULL_RTX;
6167
6168 if (dump_file && (dump_flags & TDF_DETAILS))
6169 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6170 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
6171 }
6172
6173 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6174 /* This will record NEXT_INSN (insn), such that we can
6175 insert notes before it without worrying about any
6176 notes that MO_USEs might emit after the insn. */
6177 cui.store_p = true;
6178 note_stores (PATTERN (insn), add_stores, &cui);
6179 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6180 mos = VEC_address (micro_operation, VTI (bb)->mos);
6181
6182 /* Order the MO_VAL_USEs first (note_stores does nothing
6183 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6184 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6185 while (n1 < n2)
6186 {
6187 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6188 n1++;
6189 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6190 n2--;
6191 if (n1 < n2)
6192 {
6193 micro_operation sw;
6194
6195 sw = mos[n1];
6196 mos[n1] = mos[n2];
6197 mos[n2] = sw;
6198 }
6199 }
6200
6201 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6202 while (n1 < n2)
6203 {
6204 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6205 n1++;
6206 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6207 n2--;
6208 if (n1 < n2)
6209 {
6210 micro_operation sw;
6211
6212 sw = mos[n1];
6213 mos[n1] = mos[n2];
6214 mos[n2] = sw;
6215 }
6216 }
6217 }
6218
6219 static enum var_init_status
6220 find_src_status (dataflow_set *in, rtx src)
6221 {
6222 tree decl = NULL_TREE;
6223 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6224
6225 if (! flag_var_tracking_uninit)
6226 status = VAR_INIT_STATUS_INITIALIZED;
6227
6228 if (src && REG_P (src))
6229 decl = var_debug_decl (REG_EXPR (src));
6230 else if (src && MEM_P (src))
6231 decl = var_debug_decl (MEM_EXPR (src));
6232
6233 if (src && decl)
6234 status = get_init_value (in, src, dv_from_decl (decl));
6235
6236 return status;
6237 }
6238
6239 /* SRC is the source of an assignment. Use SET to try to find what
6240 was ultimately assigned to SRC. Return that value if known,
6241 otherwise return SRC itself. */
6242
6243 static rtx
6244 find_src_set_src (dataflow_set *set, rtx src)
6245 {
6246 tree decl = NULL_TREE; /* The variable being copied around. */
6247 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6248 variable var;
6249 location_chain nextp;
6250 int i;
6251 bool found;
6252
6253 if (src && REG_P (src))
6254 decl = var_debug_decl (REG_EXPR (src));
6255 else if (src && MEM_P (src))
6256 decl = var_debug_decl (MEM_EXPR (src));
6257
6258 if (src && decl)
6259 {
6260 decl_or_value dv = dv_from_decl (decl);
6261
6262 var = shared_hash_find (set->vars, dv);
6263 if (var)
6264 {
6265 found = false;
6266 for (i = 0; i < var->n_var_parts && !found; i++)
6267 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6268 nextp = nextp->next)
6269 if (rtx_equal_p (nextp->loc, src))
6270 {
6271 set_src = nextp->set_src;
6272 found = true;
6273 }
6274
6275 }
6276 }
6277
6278 return set_src;
6279 }
6280
6281 /* Compute the changes of variable locations in the basic block BB. */
6282
6283 static bool
6284 compute_bb_dataflow (basic_block bb)
6285 {
6286 unsigned int i;
6287 micro_operation *mo;
6288 bool changed;
6289 dataflow_set old_out;
6290 dataflow_set *in = &VTI (bb)->in;
6291 dataflow_set *out = &VTI (bb)->out;
6292
6293 dataflow_set_init (&old_out);
6294 dataflow_set_copy (&old_out, out);
6295 dataflow_set_copy (out, in);
6296
6297 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
6298 {
6299 rtx insn = mo->insn;
6300
6301 switch (mo->type)
6302 {
6303 case MO_CALL:
6304 dataflow_set_clear_at_call (out);
6305 break;
6306
6307 case MO_USE:
6308 {
6309 rtx loc = mo->u.loc;
6310
6311 if (REG_P (loc))
6312 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6313 else if (MEM_P (loc))
6314 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6315 }
6316 break;
6317
6318 case MO_VAL_LOC:
6319 {
6320 rtx loc = mo->u.loc;
6321 rtx val, vloc;
6322 tree var;
6323
6324 if (GET_CODE (loc) == CONCAT)
6325 {
6326 val = XEXP (loc, 0);
6327 vloc = XEXP (loc, 1);
6328 }
6329 else
6330 {
6331 val = NULL_RTX;
6332 vloc = loc;
6333 }
6334
6335 var = PAT_VAR_LOCATION_DECL (vloc);
6336
6337 clobber_variable_part (out, NULL_RTX,
6338 dv_from_decl (var), 0, NULL_RTX);
6339 if (val)
6340 {
6341 if (VAL_NEEDS_RESOLUTION (loc))
6342 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6343 set_variable_part (out, val, dv_from_decl (var), 0,
6344 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6345 INSERT);
6346 }
6347 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6348 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6349 dv_from_decl (var), 0,
6350 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6351 INSERT);
6352 }
6353 break;
6354
6355 case MO_VAL_USE:
6356 {
6357 rtx loc = mo->u.loc;
6358 rtx val, vloc, uloc;
6359
6360 vloc = uloc = XEXP (loc, 1);
6361 val = XEXP (loc, 0);
6362
6363 if (GET_CODE (val) == CONCAT)
6364 {
6365 uloc = XEXP (val, 1);
6366 val = XEXP (val, 0);
6367 }
6368
6369 if (VAL_NEEDS_RESOLUTION (loc))
6370 val_resolve (out, val, vloc, insn);
6371 else
6372 val_store (out, val, uloc, insn, false);
6373
6374 if (VAL_HOLDS_TRACK_EXPR (loc))
6375 {
6376 if (GET_CODE (uloc) == REG)
6377 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6378 NULL);
6379 else if (GET_CODE (uloc) == MEM)
6380 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6381 NULL);
6382 }
6383 }
6384 break;
6385
6386 case MO_VAL_SET:
6387 {
6388 rtx loc = mo->u.loc;
6389 rtx val, vloc, uloc;
6390 rtx dstv, srcv;
6391
6392 vloc = loc;
6393 uloc = XEXP (vloc, 1);
6394 val = XEXP (vloc, 0);
6395 vloc = uloc;
6396
6397 if (GET_CODE (uloc) == SET)
6398 {
6399 dstv = SET_DEST (uloc);
6400 srcv = SET_SRC (uloc);
6401 }
6402 else
6403 {
6404 dstv = uloc;
6405 srcv = NULL;
6406 }
6407
6408 if (GET_CODE (val) == CONCAT)
6409 {
6410 dstv = vloc = XEXP (val, 1);
6411 val = XEXP (val, 0);
6412 }
6413
6414 if (GET_CODE (vloc) == SET)
6415 {
6416 srcv = SET_SRC (vloc);
6417
6418 gcc_assert (val != srcv);
6419 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6420
6421 dstv = vloc = SET_DEST (vloc);
6422
6423 if (VAL_NEEDS_RESOLUTION (loc))
6424 val_resolve (out, val, srcv, insn);
6425 }
6426 else if (VAL_NEEDS_RESOLUTION (loc))
6427 {
6428 gcc_assert (GET_CODE (uloc) == SET
6429 && GET_CODE (SET_SRC (uloc)) == REG);
6430 val_resolve (out, val, SET_SRC (uloc), insn);
6431 }
6432
6433 if (VAL_HOLDS_TRACK_EXPR (loc))
6434 {
6435 if (VAL_EXPR_IS_CLOBBERED (loc))
6436 {
6437 if (REG_P (uloc))
6438 var_reg_delete (out, uloc, true);
6439 else if (MEM_P (uloc))
6440 {
6441 gcc_assert (MEM_P (dstv));
6442 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6443 var_mem_delete (out, dstv, true);
6444 }
6445 }
6446 else
6447 {
6448 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6449 rtx src = NULL, dst = uloc;
6450 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6451
6452 if (GET_CODE (uloc) == SET)
6453 {
6454 src = SET_SRC (uloc);
6455 dst = SET_DEST (uloc);
6456 }
6457
6458 if (copied_p)
6459 {
6460 if (flag_var_tracking_uninit)
6461 {
6462 status = find_src_status (in, src);
6463
6464 if (status == VAR_INIT_STATUS_UNKNOWN)
6465 status = find_src_status (out, src);
6466 }
6467
6468 src = find_src_set_src (in, src);
6469 }
6470
6471 if (REG_P (dst))
6472 var_reg_delete_and_set (out, dst, !copied_p,
6473 status, srcv);
6474 else if (MEM_P (dst))
6475 {
6476 gcc_assert (MEM_P (dstv));
6477 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6478 var_mem_delete_and_set (out, dstv, !copied_p,
6479 status, srcv);
6480 }
6481 }
6482 }
6483 else if (REG_P (uloc))
6484 var_regno_delete (out, REGNO (uloc));
6485 else if (MEM_P (uloc))
6486 clobber_overlapping_mems (out, uloc);
6487
6488 val_store (out, val, dstv, insn, true);
6489 }
6490 break;
6491
6492 case MO_SET:
6493 {
6494 rtx loc = mo->u.loc;
6495 rtx set_src = NULL;
6496
6497 if (GET_CODE (loc) == SET)
6498 {
6499 set_src = SET_SRC (loc);
6500 loc = SET_DEST (loc);
6501 }
6502
6503 if (REG_P (loc))
6504 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6505 set_src);
6506 else if (MEM_P (loc))
6507 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6508 set_src);
6509 }
6510 break;
6511
6512 case MO_COPY:
6513 {
6514 rtx loc = mo->u.loc;
6515 enum var_init_status src_status;
6516 rtx set_src = NULL;
6517
6518 if (GET_CODE (loc) == SET)
6519 {
6520 set_src = SET_SRC (loc);
6521 loc = SET_DEST (loc);
6522 }
6523
6524 if (! flag_var_tracking_uninit)
6525 src_status = VAR_INIT_STATUS_INITIALIZED;
6526 else
6527 {
6528 src_status = find_src_status (in, set_src);
6529
6530 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6531 src_status = find_src_status (out, set_src);
6532 }
6533
6534 set_src = find_src_set_src (in, set_src);
6535
6536 if (REG_P (loc))
6537 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6538 else if (MEM_P (loc))
6539 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6540 }
6541 break;
6542
6543 case MO_USE_NO_VAR:
6544 {
6545 rtx loc = mo->u.loc;
6546
6547 if (REG_P (loc))
6548 var_reg_delete (out, loc, false);
6549 else if (MEM_P (loc))
6550 var_mem_delete (out, loc, false);
6551 }
6552 break;
6553
6554 case MO_CLOBBER:
6555 {
6556 rtx loc = mo->u.loc;
6557
6558 if (REG_P (loc))
6559 var_reg_delete (out, loc, true);
6560 else if (MEM_P (loc))
6561 var_mem_delete (out, loc, true);
6562 }
6563 break;
6564
6565 case MO_ADJUST:
6566 out->stack_adjust += mo->u.adjust;
6567 break;
6568 }
6569 }
6570
6571 if (MAY_HAVE_DEBUG_INSNS)
6572 {
6573 dataflow_set_equiv_regs (out);
6574 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6575 out);
6576 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6577 out);
6578 #if ENABLE_CHECKING
6579 htab_traverse (shared_hash_htab (out->vars),
6580 canonicalize_loc_order_check, out);
6581 #endif
6582 }
6583 changed = dataflow_set_different (&old_out, out);
6584 dataflow_set_destroy (&old_out);
6585 return changed;
6586 }
6587
6588 /* Find the locations of variables in the whole function. */
6589
6590 static bool
6591 vt_find_locations (void)
6592 {
6593 fibheap_t worklist, pending, fibheap_swap;
6594 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6595 basic_block bb;
6596 edge e;
6597 int *bb_order;
6598 int *rc_order;
6599 int i;
6600 int htabsz = 0;
6601 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6602 bool success = true;
6603
6604 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6605 /* Compute reverse completion order of depth first search of the CFG
6606 so that the data-flow runs faster. */
6607 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6608 bb_order = XNEWVEC (int, last_basic_block);
6609 pre_and_rev_post_order_compute (NULL, rc_order, false);
6610 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6611 bb_order[rc_order[i]] = i;
6612 free (rc_order);
6613
6614 worklist = fibheap_new ();
6615 pending = fibheap_new ();
6616 visited = sbitmap_alloc (last_basic_block);
6617 in_worklist = sbitmap_alloc (last_basic_block);
6618 in_pending = sbitmap_alloc (last_basic_block);
6619 sbitmap_zero (in_worklist);
6620
6621 FOR_EACH_BB (bb)
6622 fibheap_insert (pending, bb_order[bb->index], bb);
6623 sbitmap_ones (in_pending);
6624
6625 while (success && !fibheap_empty (pending))
6626 {
6627 fibheap_swap = pending;
6628 pending = worklist;
6629 worklist = fibheap_swap;
6630 sbitmap_swap = in_pending;
6631 in_pending = in_worklist;
6632 in_worklist = sbitmap_swap;
6633
6634 sbitmap_zero (visited);
6635
6636 while (!fibheap_empty (worklist))
6637 {
6638 bb = (basic_block) fibheap_extract_min (worklist);
6639 RESET_BIT (in_worklist, bb->index);
6640 gcc_assert (!TEST_BIT (visited, bb->index));
6641 if (!TEST_BIT (visited, bb->index))
6642 {
6643 bool changed;
6644 edge_iterator ei;
6645 int oldinsz, oldoutsz;
6646
6647 SET_BIT (visited, bb->index);
6648
6649 if (VTI (bb)->in.vars)
6650 {
6651 htabsz
6652 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6653 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6654 oldinsz
6655 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6656 oldoutsz
6657 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6658 }
6659 else
6660 oldinsz = oldoutsz = 0;
6661
6662 if (MAY_HAVE_DEBUG_INSNS)
6663 {
6664 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6665 bool first = true, adjust = false;
6666
6667 /* Calculate the IN set as the intersection of
6668 predecessor OUT sets. */
6669
6670 dataflow_set_clear (in);
6671 dst_can_be_shared = true;
6672
6673 FOR_EACH_EDGE (e, ei, bb->preds)
6674 if (!VTI (e->src)->flooded)
6675 gcc_assert (bb_order[bb->index]
6676 <= bb_order[e->src->index]);
6677 else if (first)
6678 {
6679 dataflow_set_copy (in, &VTI (e->src)->out);
6680 first_out = &VTI (e->src)->out;
6681 first = false;
6682 }
6683 else
6684 {
6685 dataflow_set_merge (in, &VTI (e->src)->out);
6686 adjust = true;
6687 }
6688
6689 if (adjust)
6690 {
6691 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6692 #if ENABLE_CHECKING
6693 /* Merge and merge_adjust should keep entries in
6694 canonical order. */
6695 htab_traverse (shared_hash_htab (in->vars),
6696 canonicalize_loc_order_check,
6697 in);
6698 #endif
6699 if (dst_can_be_shared)
6700 {
6701 shared_hash_destroy (in->vars);
6702 in->vars = shared_hash_copy (first_out->vars);
6703 }
6704 }
6705
6706 VTI (bb)->flooded = true;
6707 }
6708 else
6709 {
6710 /* Calculate the IN set as union of predecessor OUT sets. */
6711 dataflow_set_clear (&VTI (bb)->in);
6712 FOR_EACH_EDGE (e, ei, bb->preds)
6713 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6714 }
6715
6716 changed = compute_bb_dataflow (bb);
6717 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6718 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6719
6720 if (htabmax && htabsz > htabmax)
6721 {
6722 if (MAY_HAVE_DEBUG_INSNS)
6723 inform (DECL_SOURCE_LOCATION (cfun->decl),
6724 "variable tracking size limit exceeded with "
6725 "-fvar-tracking-assignments, retrying without");
6726 else
6727 inform (DECL_SOURCE_LOCATION (cfun->decl),
6728 "variable tracking size limit exceeded");
6729 success = false;
6730 break;
6731 }
6732
6733 if (changed)
6734 {
6735 FOR_EACH_EDGE (e, ei, bb->succs)
6736 {
6737 if (e->dest == EXIT_BLOCK_PTR)
6738 continue;
6739
6740 if (TEST_BIT (visited, e->dest->index))
6741 {
6742 if (!TEST_BIT (in_pending, e->dest->index))
6743 {
6744 /* Send E->DEST to next round. */
6745 SET_BIT (in_pending, e->dest->index);
6746 fibheap_insert (pending,
6747 bb_order[e->dest->index],
6748 e->dest);
6749 }
6750 }
6751 else if (!TEST_BIT (in_worklist, e->dest->index))
6752 {
6753 /* Add E->DEST to current round. */
6754 SET_BIT (in_worklist, e->dest->index);
6755 fibheap_insert (worklist, bb_order[e->dest->index],
6756 e->dest);
6757 }
6758 }
6759 }
6760
6761 if (dump_file)
6762 fprintf (dump_file,
6763 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6764 bb->index,
6765 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6766 oldinsz,
6767 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6768 oldoutsz,
6769 (int)worklist->nodes, (int)pending->nodes, htabsz);
6770
6771 if (dump_file && (dump_flags & TDF_DETAILS))
6772 {
6773 fprintf (dump_file, "BB %i IN:\n", bb->index);
6774 dump_dataflow_set (&VTI (bb)->in);
6775 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6776 dump_dataflow_set (&VTI (bb)->out);
6777 }
6778 }
6779 }
6780 }
6781
6782 if (success && MAY_HAVE_DEBUG_INSNS)
6783 FOR_EACH_BB (bb)
6784 gcc_assert (VTI (bb)->flooded);
6785
6786 free (bb_order);
6787 fibheap_delete (worklist);
6788 fibheap_delete (pending);
6789 sbitmap_free (visited);
6790 sbitmap_free (in_worklist);
6791 sbitmap_free (in_pending);
6792
6793 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6794 return success;
6795 }
6796
6797 /* Print the content of the LIST to dump file. */
6798
6799 static void
6800 dump_attrs_list (attrs list)
6801 {
6802 for (; list; list = list->next)
6803 {
6804 if (dv_is_decl_p (list->dv))
6805 print_mem_expr (dump_file, dv_as_decl (list->dv));
6806 else
6807 print_rtl_single (dump_file, dv_as_value (list->dv));
6808 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6809 }
6810 fprintf (dump_file, "\n");
6811 }
6812
6813 /* Print the information about variable *SLOT to dump file. */
6814
6815 static int
6816 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6817 {
6818 variable var = (variable) *slot;
6819
6820 dump_var (var);
6821
6822 /* Continue traversing the hash table. */
6823 return 1;
6824 }
6825
6826 /* Print the information about variable VAR to dump file. */
6827
6828 static void
6829 dump_var (variable var)
6830 {
6831 int i;
6832 location_chain node;
6833
6834 if (dv_is_decl_p (var->dv))
6835 {
6836 const_tree decl = dv_as_decl (var->dv);
6837
6838 if (DECL_NAME (decl))
6839 {
6840 fprintf (dump_file, " name: %s",
6841 IDENTIFIER_POINTER (DECL_NAME (decl)));
6842 if (dump_flags & TDF_UID)
6843 fprintf (dump_file, "D.%u", DECL_UID (decl));
6844 }
6845 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6846 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6847 else
6848 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6849 fprintf (dump_file, "\n");
6850 }
6851 else
6852 {
6853 fputc (' ', dump_file);
6854 print_rtl_single (dump_file, dv_as_value (var->dv));
6855 }
6856
6857 for (i = 0; i < var->n_var_parts; i++)
6858 {
6859 fprintf (dump_file, " offset %ld\n",
6860 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
6861 for (node = var->var_part[i].loc_chain; node; node = node->next)
6862 {
6863 fprintf (dump_file, " ");
6864 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6865 fprintf (dump_file, "[uninit]");
6866 print_rtl_single (dump_file, node->loc);
6867 }
6868 }
6869 }
6870
6871 /* Print the information about variables from hash table VARS to dump file. */
6872
6873 static void
6874 dump_vars (htab_t vars)
6875 {
6876 if (htab_elements (vars) > 0)
6877 {
6878 fprintf (dump_file, "Variables:\n");
6879 htab_traverse (vars, dump_var_slot, NULL);
6880 }
6881 }
6882
6883 /* Print the dataflow set SET to dump file. */
6884
6885 static void
6886 dump_dataflow_set (dataflow_set *set)
6887 {
6888 int i;
6889
6890 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6891 set->stack_adjust);
6892 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6893 {
6894 if (set->regs[i])
6895 {
6896 fprintf (dump_file, "Reg %d:", i);
6897 dump_attrs_list (set->regs[i]);
6898 }
6899 }
6900 dump_vars (shared_hash_htab (set->vars));
6901 fprintf (dump_file, "\n");
6902 }
6903
6904 /* Print the IN and OUT sets for each basic block to dump file. */
6905
6906 static void
6907 dump_dataflow_sets (void)
6908 {
6909 basic_block bb;
6910
6911 FOR_EACH_BB (bb)
6912 {
6913 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6914 fprintf (dump_file, "IN:\n");
6915 dump_dataflow_set (&VTI (bb)->in);
6916 fprintf (dump_file, "OUT:\n");
6917 dump_dataflow_set (&VTI (bb)->out);
6918 }
6919 }
6920
6921 /* Return the variable for DV in dropped_values, inserting one if
6922 requested with INSERT. */
6923
6924 static inline variable
6925 variable_from_dropped (decl_or_value dv, enum insert_option insert)
6926 {
6927 void **slot;
6928 variable empty_var;
6929 onepart_enum_t onepart;
6930
6931 slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
6932 insert);
6933
6934 if (!slot)
6935 return NULL;
6936
6937 if (*slot)
6938 return (variable) *slot;
6939
6940 gcc_checking_assert (insert == INSERT);
6941
6942 onepart = dv_onepart_p (dv);
6943
6944 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
6945
6946 empty_var = (variable) pool_alloc (onepart_pool (onepart));
6947 empty_var->dv = dv;
6948 empty_var->refcount = 1;
6949 empty_var->n_var_parts = 0;
6950 empty_var->onepart = onepart;
6951 empty_var->in_changed_variables = false;
6952 empty_var->var_part[0].loc_chain = NULL;
6953 empty_var->var_part[0].cur_loc = NULL;
6954 VAR_LOC_1PAUX (empty_var) = NULL;
6955 set_dv_changed (dv, true);
6956
6957 *slot = empty_var;
6958
6959 return empty_var;
6960 }
6961
6962 /* Recover the one-part aux from dropped_values. */
6963
6964 static struct onepart_aux *
6965 recover_dropped_1paux (variable var)
6966 {
6967 variable dvar;
6968
6969 gcc_checking_assert (var->onepart);
6970
6971 if (VAR_LOC_1PAUX (var))
6972 return VAR_LOC_1PAUX (var);
6973
6974 if (var->onepart == ONEPART_VDECL)
6975 return NULL;
6976
6977 dvar = variable_from_dropped (var->dv, NO_INSERT);
6978
6979 if (!dvar)
6980 return NULL;
6981
6982 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
6983 VAR_LOC_1PAUX (dvar) = NULL;
6984
6985 return VAR_LOC_1PAUX (var);
6986 }
6987
6988 /* Add variable VAR to the hash table of changed variables and
6989 if it has no locations delete it from SET's hash table. */
6990
6991 static void
6992 variable_was_changed (variable var, dataflow_set *set)
6993 {
6994 hashval_t hash = dv_htab_hash (var->dv);
6995
6996 if (emit_notes)
6997 {
6998 void **slot;
6999
7000 /* Remember this decl or VALUE has been added to changed_variables. */
7001 set_dv_changed (var->dv, true);
7002
7003 slot = htab_find_slot_with_hash (changed_variables,
7004 var->dv,
7005 hash, INSERT);
7006
7007 if (*slot)
7008 {
7009 variable old_var = (variable) *slot;
7010 gcc_assert (old_var->in_changed_variables);
7011 old_var->in_changed_variables = false;
7012 if (var != old_var && var->onepart)
7013 {
7014 /* Restore the auxiliary info from an empty variable
7015 previously created for changed_variables, so it is
7016 not lost. */
7017 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7018 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7019 VAR_LOC_1PAUX (old_var) = NULL;
7020 }
7021 variable_htab_free (*slot);
7022 }
7023
7024 if (set && var->n_var_parts == 0)
7025 {
7026 onepart_enum_t onepart = var->onepart;
7027 variable empty_var = NULL;
7028 void **dslot = NULL;
7029
7030 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7031 {
7032 dslot = htab_find_slot_with_hash (dropped_values, var->dv,
7033 dv_htab_hash (var->dv),
7034 INSERT);
7035 empty_var = (variable) *dslot;
7036
7037 if (empty_var)
7038 {
7039 gcc_checking_assert (!empty_var->in_changed_variables);
7040 if (!VAR_LOC_1PAUX (var))
7041 {
7042 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7043 VAR_LOC_1PAUX (empty_var) = NULL;
7044 }
7045 else
7046 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7047 }
7048 }
7049
7050 if (!empty_var)
7051 {
7052 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7053 empty_var->dv = var->dv;
7054 empty_var->refcount = 1;
7055 empty_var->n_var_parts = 0;
7056 empty_var->onepart = onepart;
7057 if (dslot)
7058 {
7059 empty_var->refcount++;
7060 *dslot = empty_var;
7061 }
7062 }
7063 else
7064 empty_var->refcount++;
7065 empty_var->in_changed_variables = true;
7066 *slot = empty_var;
7067 if (onepart)
7068 {
7069 empty_var->var_part[0].loc_chain = NULL;
7070 empty_var->var_part[0].cur_loc = NULL;
7071 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7072 VAR_LOC_1PAUX (var) = NULL;
7073 }
7074 goto drop_var;
7075 }
7076 else
7077 {
7078 if (var->onepart && !VAR_LOC_1PAUX (var))
7079 recover_dropped_1paux (var);
7080 var->refcount++;
7081 var->in_changed_variables = true;
7082 *slot = var;
7083 }
7084 }
7085 else
7086 {
7087 gcc_assert (set);
7088 if (var->n_var_parts == 0)
7089 {
7090 void **slot;
7091
7092 drop_var:
7093 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7094 if (slot)
7095 {
7096 if (shared_hash_shared (set->vars))
7097 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7098 NO_INSERT);
7099 htab_clear_slot (shared_hash_htab (set->vars), slot);
7100 }
7101 }
7102 }
7103 }
7104
7105 /* Look for the index in VAR->var_part corresponding to OFFSET.
7106 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7107 referenced int will be set to the index that the part has or should
7108 have, if it should be inserted. */
7109
7110 static inline int
7111 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7112 int *insertion_point)
7113 {
7114 int pos, low, high;
7115
7116 if (var->onepart)
7117 {
7118 if (offset != 0)
7119 return -1;
7120
7121 if (insertion_point)
7122 *insertion_point = 0;
7123
7124 return var->n_var_parts - 1;
7125 }
7126
7127 /* Find the location part. */
7128 low = 0;
7129 high = var->n_var_parts;
7130 while (low != high)
7131 {
7132 pos = (low + high) / 2;
7133 if (VAR_PART_OFFSET (var, pos) < offset)
7134 low = pos + 1;
7135 else
7136 high = pos;
7137 }
7138 pos = low;
7139
7140 if (insertion_point)
7141 *insertion_point = pos;
7142
7143 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7144 return pos;
7145
7146 return -1;
7147 }
7148
7149 static void **
7150 set_slot_part (dataflow_set *set, rtx loc, void **slot,
7151 decl_or_value dv, HOST_WIDE_INT offset,
7152 enum var_init_status initialized, rtx set_src)
7153 {
7154 int pos;
7155 location_chain node, next;
7156 location_chain *nextp;
7157 variable var;
7158 onepart_enum_t onepart;
7159
7160 var = (variable) *slot;
7161
7162 if (var)
7163 onepart = var->onepart;
7164 else
7165 onepart = dv_onepart_p (dv);
7166
7167 gcc_checking_assert (offset == 0 || !onepart);
7168 gcc_checking_assert (loc != dv_as_opaque (dv));
7169
7170 if (! flag_var_tracking_uninit)
7171 initialized = VAR_INIT_STATUS_INITIALIZED;
7172
7173 if (!var)
7174 {
7175 /* Create new variable information. */
7176 var = (variable) pool_alloc (onepart_pool (onepart));
7177 var->dv = dv;
7178 var->refcount = 1;
7179 var->n_var_parts = 1;
7180 var->onepart = onepart;
7181 var->in_changed_variables = false;
7182 if (var->onepart)
7183 VAR_LOC_1PAUX (var) = NULL;
7184 else
7185 VAR_PART_OFFSET (var, 0) = offset;
7186 var->var_part[0].loc_chain = NULL;
7187 var->var_part[0].cur_loc = NULL;
7188 *slot = var;
7189 pos = 0;
7190 nextp = &var->var_part[0].loc_chain;
7191 }
7192 else if (onepart)
7193 {
7194 int r = -1, c = 0;
7195
7196 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7197
7198 pos = 0;
7199
7200 if (GET_CODE (loc) == VALUE)
7201 {
7202 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7203 nextp = &node->next)
7204 if (GET_CODE (node->loc) == VALUE)
7205 {
7206 if (node->loc == loc)
7207 {
7208 r = 0;
7209 break;
7210 }
7211 if (canon_value_cmp (node->loc, loc))
7212 c++;
7213 else
7214 {
7215 r = 1;
7216 break;
7217 }
7218 }
7219 else if (REG_P (node->loc) || MEM_P (node->loc))
7220 c++;
7221 else
7222 {
7223 r = 1;
7224 break;
7225 }
7226 }
7227 else if (REG_P (loc))
7228 {
7229 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7230 nextp = &node->next)
7231 if (REG_P (node->loc))
7232 {
7233 if (REGNO (node->loc) < REGNO (loc))
7234 c++;
7235 else
7236 {
7237 if (REGNO (node->loc) == REGNO (loc))
7238 r = 0;
7239 else
7240 r = 1;
7241 break;
7242 }
7243 }
7244 else
7245 {
7246 r = 1;
7247 break;
7248 }
7249 }
7250 else if (MEM_P (loc))
7251 {
7252 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7253 nextp = &node->next)
7254 if (REG_P (node->loc))
7255 c++;
7256 else if (MEM_P (node->loc))
7257 {
7258 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7259 break;
7260 else
7261 c++;
7262 }
7263 else
7264 {
7265 r = 1;
7266 break;
7267 }
7268 }
7269 else
7270 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7271 nextp = &node->next)
7272 if ((r = loc_cmp (node->loc, loc)) >= 0)
7273 break;
7274 else
7275 c++;
7276
7277 if (r == 0)
7278 return slot;
7279
7280 if (shared_var_p (var, set->vars))
7281 {
7282 slot = unshare_variable (set, slot, var, initialized);
7283 var = (variable)*slot;
7284 for (nextp = &var->var_part[0].loc_chain; c;
7285 nextp = &(*nextp)->next)
7286 c--;
7287 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7288 }
7289 }
7290 else
7291 {
7292 int inspos = 0;
7293
7294 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7295
7296 pos = find_variable_location_part (var, offset, &inspos);
7297
7298 if (pos >= 0)
7299 {
7300 node = var->var_part[pos].loc_chain;
7301
7302 if (node
7303 && ((REG_P (node->loc) && REG_P (loc)
7304 && REGNO (node->loc) == REGNO (loc))
7305 || rtx_equal_p (node->loc, loc)))
7306 {
7307 /* LOC is in the beginning of the chain so we have nothing
7308 to do. */
7309 if (node->init < initialized)
7310 node->init = initialized;
7311 if (set_src != NULL)
7312 node->set_src = set_src;
7313
7314 return slot;
7315 }
7316 else
7317 {
7318 /* We have to make a copy of a shared variable. */
7319 if (shared_var_p (var, set->vars))
7320 {
7321 slot = unshare_variable (set, slot, var, initialized);
7322 var = (variable)*slot;
7323 }
7324 }
7325 }
7326 else
7327 {
7328 /* We have not found the location part, new one will be created. */
7329
7330 /* We have to make a copy of the shared variable. */
7331 if (shared_var_p (var, set->vars))
7332 {
7333 slot = unshare_variable (set, slot, var, initialized);
7334 var = (variable)*slot;
7335 }
7336
7337 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7338 thus there are at most MAX_VAR_PARTS different offsets. */
7339 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7340 && (!var->n_var_parts || !onepart));
7341
7342 /* We have to move the elements of array starting at index
7343 inspos to the next position. */
7344 for (pos = var->n_var_parts; pos > inspos; pos--)
7345 var->var_part[pos] = var->var_part[pos - 1];
7346
7347 var->n_var_parts++;
7348 gcc_checking_assert (!onepart);
7349 VAR_PART_OFFSET (var, pos) = offset;
7350 var->var_part[pos].loc_chain = NULL;
7351 var->var_part[pos].cur_loc = NULL;
7352 }
7353
7354 /* Delete the location from the list. */
7355 nextp = &var->var_part[pos].loc_chain;
7356 for (node = var->var_part[pos].loc_chain; node; node = next)
7357 {
7358 next = node->next;
7359 if ((REG_P (node->loc) && REG_P (loc)
7360 && REGNO (node->loc) == REGNO (loc))
7361 || rtx_equal_p (node->loc, loc))
7362 {
7363 /* Save these values, to assign to the new node, before
7364 deleting this one. */
7365 if (node->init > initialized)
7366 initialized = node->init;
7367 if (node->set_src != NULL && set_src == NULL)
7368 set_src = node->set_src;
7369 if (var->var_part[pos].cur_loc == node->loc)
7370 var->var_part[pos].cur_loc = NULL;
7371 pool_free (loc_chain_pool, node);
7372 *nextp = next;
7373 break;
7374 }
7375 else
7376 nextp = &node->next;
7377 }
7378
7379 nextp = &var->var_part[pos].loc_chain;
7380 }
7381
7382 /* Add the location to the beginning. */
7383 node = (location_chain) pool_alloc (loc_chain_pool);
7384 node->loc = loc;
7385 node->init = initialized;
7386 node->set_src = set_src;
7387 node->next = *nextp;
7388 *nextp = node;
7389
7390 /* If no location was emitted do so. */
7391 if (var->var_part[pos].cur_loc == NULL)
7392 variable_was_changed (var, set);
7393
7394 return slot;
7395 }
7396
7397 /* Set the part of variable's location in the dataflow set SET. The
7398 variable part is specified by variable's declaration in DV and
7399 offset OFFSET and the part's location by LOC. IOPT should be
7400 NO_INSERT if the variable is known to be in SET already and the
7401 variable hash table must not be resized, and INSERT otherwise. */
7402
7403 static void
7404 set_variable_part (dataflow_set *set, rtx loc,
7405 decl_or_value dv, HOST_WIDE_INT offset,
7406 enum var_init_status initialized, rtx set_src,
7407 enum insert_option iopt)
7408 {
7409 void **slot;
7410
7411 if (iopt == NO_INSERT)
7412 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7413 else
7414 {
7415 slot = shared_hash_find_slot (set->vars, dv);
7416 if (!slot)
7417 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7418 }
7419 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7420 }
7421
7422 /* Remove all recorded register locations for the given variable part
7423 from dataflow set SET, except for those that are identical to loc.
7424 The variable part is specified by variable's declaration or value
7425 DV and offset OFFSET. */
7426
7427 static void **
7428 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7429 HOST_WIDE_INT offset, rtx set_src)
7430 {
7431 variable var = (variable) *slot;
7432 int pos = find_variable_location_part (var, offset, NULL);
7433
7434 if (pos >= 0)
7435 {
7436 location_chain node, next;
7437
7438 /* Remove the register locations from the dataflow set. */
7439 next = var->var_part[pos].loc_chain;
7440 for (node = next; node; node = next)
7441 {
7442 next = node->next;
7443 if (node->loc != loc
7444 && (!flag_var_tracking_uninit
7445 || !set_src
7446 || MEM_P (set_src)
7447 || !rtx_equal_p (set_src, node->set_src)))
7448 {
7449 if (REG_P (node->loc))
7450 {
7451 attrs anode, anext;
7452 attrs *anextp;
7453
7454 /* Remove the variable part from the register's
7455 list, but preserve any other variable parts
7456 that might be regarded as live in that same
7457 register. */
7458 anextp = &set->regs[REGNO (node->loc)];
7459 for (anode = *anextp; anode; anode = anext)
7460 {
7461 anext = anode->next;
7462 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7463 && anode->offset == offset)
7464 {
7465 pool_free (attrs_pool, anode);
7466 *anextp = anext;
7467 }
7468 else
7469 anextp = &anode->next;
7470 }
7471 }
7472
7473 slot = delete_slot_part (set, node->loc, slot, offset);
7474 }
7475 }
7476 }
7477
7478 return slot;
7479 }
7480
7481 /* Remove all recorded register locations for the given variable part
7482 from dataflow set SET, except for those that are identical to loc.
7483 The variable part is specified by variable's declaration or value
7484 DV and offset OFFSET. */
7485
7486 static void
7487 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7488 HOST_WIDE_INT offset, rtx set_src)
7489 {
7490 void **slot;
7491
7492 if (!dv_as_opaque (dv)
7493 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7494 return;
7495
7496 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7497 if (!slot)
7498 return;
7499
7500 clobber_slot_part (set, loc, slot, offset, set_src);
7501 }
7502
7503 /* Delete the part of variable's location from dataflow set SET. The
7504 variable part is specified by its SET->vars slot SLOT and offset
7505 OFFSET and the part's location by LOC. */
7506
7507 static void **
7508 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7509 HOST_WIDE_INT offset)
7510 {
7511 variable var = (variable) *slot;
7512 int pos = find_variable_location_part (var, offset, NULL);
7513
7514 if (pos >= 0)
7515 {
7516 location_chain node, next;
7517 location_chain *nextp;
7518 bool changed;
7519 rtx cur_loc;
7520
7521 if (shared_var_p (var, set->vars))
7522 {
7523 /* If the variable contains the location part we have to
7524 make a copy of the variable. */
7525 for (node = var->var_part[pos].loc_chain; node;
7526 node = node->next)
7527 {
7528 if ((REG_P (node->loc) && REG_P (loc)
7529 && REGNO (node->loc) == REGNO (loc))
7530 || rtx_equal_p (node->loc, loc))
7531 {
7532 slot = unshare_variable (set, slot, var,
7533 VAR_INIT_STATUS_UNKNOWN);
7534 var = (variable)*slot;
7535 break;
7536 }
7537 }
7538 }
7539
7540 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7541 cur_loc = VAR_LOC_FROM (var);
7542 else
7543 cur_loc = var->var_part[pos].cur_loc;
7544
7545 /* Delete the location part. */
7546 changed = false;
7547 nextp = &var->var_part[pos].loc_chain;
7548 for (node = *nextp; node; node = next)
7549 {
7550 next = node->next;
7551 if ((REG_P (node->loc) && REG_P (loc)
7552 && REGNO (node->loc) == REGNO (loc))
7553 || rtx_equal_p (node->loc, loc))
7554 {
7555 /* If we have deleted the location which was last emitted
7556 we have to emit new location so add the variable to set
7557 of changed variables. */
7558 if (cur_loc == node->loc)
7559 {
7560 changed = true;
7561 var->var_part[pos].cur_loc = NULL;
7562 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7563 VAR_LOC_FROM (var) = NULL;
7564 }
7565 pool_free (loc_chain_pool, node);
7566 *nextp = next;
7567 break;
7568 }
7569 else
7570 nextp = &node->next;
7571 }
7572
7573 if (var->var_part[pos].loc_chain == NULL)
7574 {
7575 changed = true;
7576 var->n_var_parts--;
7577 while (pos < var->n_var_parts)
7578 {
7579 var->var_part[pos] = var->var_part[pos + 1];
7580 pos++;
7581 }
7582 }
7583 if (changed)
7584 variable_was_changed (var, set);
7585 }
7586
7587 return slot;
7588 }
7589
7590 /* Delete the part of variable's location from dataflow set SET. The
7591 variable part is specified by variable's declaration or value DV
7592 and offset OFFSET and the part's location by LOC. */
7593
7594 static void
7595 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7596 HOST_WIDE_INT offset)
7597 {
7598 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7599 if (!slot)
7600 return;
7601
7602 delete_slot_part (set, loc, slot, offset);
7603 }
7604
7605 DEF_VEC_P (variable);
7606 DEF_VEC_ALLOC_P (variable, heap);
7607
7608 DEF_VEC_ALLOC_P_STACK (rtx);
7609 #define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
7610
7611 /* Structure for passing some other parameters to function
7612 vt_expand_loc_callback. */
7613 struct expand_loc_callback_data
7614 {
7615 /* The variables and values active at this point. */
7616 htab_t vars;
7617
7618 /* Stack of values and debug_exprs under expansion, and their
7619 children. */
7620 VEC (rtx, stack) *expanding;
7621
7622 /* Stack of values and debug_exprs whose expansion hit recursion
7623 cycles. They will have VALUE_RECURSED_INTO marked when added to
7624 this list. This flag will be cleared if any of its dependencies
7625 resolves to a valid location. So, if the flag remains set at the
7626 end of the search, we know no valid location for this one can
7627 possibly exist. */
7628 VEC (rtx, stack) *pending;
7629
7630 /* The maximum depth among the sub-expressions under expansion.
7631 Zero indicates no expansion so far. */
7632 expand_depth depth;
7633 };
7634
7635 /* Allocate the one-part auxiliary data structure for VAR, with enough
7636 room for COUNT dependencies. */
7637
7638 static void
7639 loc_exp_dep_alloc (variable var, int count)
7640 {
7641 size_t allocsize;
7642
7643 gcc_checking_assert (var->onepart);
7644
7645 /* We can be called with COUNT == 0 to allocate the data structure
7646 without any dependencies, e.g. for the backlinks only. However,
7647 if we are specifying a COUNT, then the dependency list must have
7648 been emptied before. It would be possible to adjust pointers or
7649 force it empty here, but this is better done at an earlier point
7650 in the algorithm, so we instead leave an assertion to catch
7651 errors. */
7652 gcc_checking_assert (!count
7653 || VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7654
7655 if (VAR_LOC_1PAUX (var)
7656 && VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
7657 return;
7658
7659 allocsize = offsetof (struct onepart_aux, deps)
7660 + VEC_embedded_size (loc_exp_dep, count);
7661
7662 if (VAR_LOC_1PAUX (var))
7663 {
7664 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7665 VAR_LOC_1PAUX (var), allocsize);
7666 /* If the reallocation moves the onepaux structure, the
7667 back-pointer to BACKLINKS in the first list member will still
7668 point to its old location. Adjust it. */
7669 if (VAR_LOC_DEP_LST (var))
7670 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7671 }
7672 else
7673 {
7674 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7675 *VAR_LOC_DEP_LSTP (var) = NULL;
7676 VAR_LOC_FROM (var) = NULL;
7677 VAR_LOC_DEPTH (var).complexity = 0;
7678 VAR_LOC_DEPTH (var).entryvals = 0;
7679 }
7680 VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
7681 }
7682
7683 /* Remove all entries from the vector of active dependencies of VAR,
7684 removing them from the back-links lists too. */
7685
7686 static void
7687 loc_exp_dep_clear (variable var)
7688 {
7689 while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
7690 {
7691 loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7692 if (led->next)
7693 led->next->pprev = led->pprev;
7694 if (led->pprev)
7695 *led->pprev = led->next;
7696 VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7697 }
7698 }
7699
7700 /* Insert an active dependency from VAR on X to the vector of
7701 dependencies, and add the corresponding back-link to X's list of
7702 back-links in VARS. */
7703
7704 static void
7705 loc_exp_insert_dep (variable var, rtx x, htab_t vars)
7706 {
7707 decl_or_value dv;
7708 variable xvar;
7709 loc_exp_dep *led;
7710
7711 dv = dv_from_rtx (x);
7712
7713 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
7714 an additional look up? */
7715 xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7716
7717 if (!xvar)
7718 {
7719 xvar = variable_from_dropped (dv, NO_INSERT);
7720 gcc_checking_assert (xvar);
7721 }
7722
7723 /* No point in adding the same backlink more than once. This may
7724 arise if say the same value appears in two complex expressions in
7725 the same loc_list, or even more than once in a single
7726 expression. */
7727 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
7728 return;
7729
7730 if (var->onepart == NOT_ONEPART)
7731 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
7732 else
7733 {
7734 VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
7735 led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7736 }
7737 led->dv = var->dv;
7738 led->value = x;
7739
7740 loc_exp_dep_alloc (xvar, 0);
7741 led->pprev = VAR_LOC_DEP_LSTP (xvar);
7742 led->next = *led->pprev;
7743 if (led->next)
7744 led->next->pprev = &led->next;
7745 *led->pprev = led;
7746 }
7747
7748 /* Create active dependencies of VAR on COUNT values starting at
7749 VALUE, and corresponding back-links to the entries in VARS. Return
7750 true if we found any pending-recursion results. */
7751
7752 static bool
7753 loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
7754 {
7755 bool pending_recursion = false;
7756
7757 gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7758
7759 /* Set up all dependencies from last_child (as set up at the end of
7760 the loop above) to the end. */
7761 loc_exp_dep_alloc (var, count);
7762
7763 while (count--)
7764 {
7765 rtx x = *value++;
7766
7767 if (!pending_recursion)
7768 pending_recursion = !result && VALUE_RECURSED_INTO (x);
7769
7770 loc_exp_insert_dep (var, x, vars);
7771 }
7772
7773 return pending_recursion;
7774 }
7775
7776 /* Notify the back-links of IVAR that are pending recursion that we
7777 have found a non-NIL value for it, so they are cleared for another
7778 attempt to compute a current location. */
7779
7780 static void
7781 notify_dependents_of_resolved_value (variable ivar, htab_t vars)
7782 {
7783 loc_exp_dep *led, *next;
7784
7785 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
7786 {
7787 decl_or_value dv = led->dv;
7788 variable var;
7789
7790 next = led->next;
7791
7792 if (dv_is_value_p (dv))
7793 {
7794 rtx value = dv_as_value (dv);
7795
7796 /* If we have already resolved it, leave it alone. */
7797 if (!VALUE_RECURSED_INTO (value))
7798 continue;
7799
7800 /* Check that VALUE_RECURSED_INTO, true from the test above,
7801 implies NO_LOC_P. */
7802 gcc_checking_assert (NO_LOC_P (value));
7803
7804 /* We won't notify variables that are being expanded,
7805 because their dependency list is cleared before
7806 recursing. */
7807 NO_LOC_P (value) = false;
7808 VALUE_RECURSED_INTO (value) = false;
7809
7810 gcc_checking_assert (dv_changed_p (dv));
7811 }
7812 else
7813 {
7814 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
7815 if (!dv_changed_p (dv))
7816 continue;
7817 }
7818
7819 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7820
7821 if (!var)
7822 var = variable_from_dropped (dv, NO_INSERT);
7823
7824 if (var)
7825 notify_dependents_of_resolved_value (var, vars);
7826
7827 if (next)
7828 next->pprev = led->pprev;
7829 if (led->pprev)
7830 *led->pprev = next;
7831 led->next = NULL;
7832 led->pprev = NULL;
7833 }
7834 }
7835
7836 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
7837 int max_depth, void *data);
7838
7839 /* Return the combined depth, when one sub-expression evaluated to
7840 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
7841
7842 static inline expand_depth
7843 update_depth (expand_depth saved_depth, expand_depth best_depth)
7844 {
7845 /* If we didn't find anything, stick with what we had. */
7846 if (!best_depth.complexity)
7847 return saved_depth;
7848
7849 /* If we found hadn't found anything, use the depth of the current
7850 expression. Do NOT add one extra level, we want to compute the
7851 maximum depth among sub-expressions. We'll increment it later,
7852 if appropriate. */
7853 if (!saved_depth.complexity)
7854 return best_depth;
7855
7856 /* Combine the entryval count so that regardless of which one we
7857 return, the entryval count is accurate. */
7858 best_depth.entryvals = saved_depth.entryvals
7859 = best_depth.entryvals + saved_depth.entryvals;
7860
7861 if (saved_depth.complexity < best_depth.complexity)
7862 return best_depth;
7863 else
7864 return saved_depth;
7865 }
7866
7867 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
7868 DATA for cselib expand callback. If PENDRECP is given, indicate in
7869 it whether any sub-expression couldn't be fully evaluated because
7870 it is pending recursion resolution. */
7871
7872 static inline rtx
7873 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
7874 {
7875 struct expand_loc_callback_data *elcd
7876 = (struct expand_loc_callback_data *) data;
7877 location_chain loc, next;
7878 rtx result = NULL;
7879 int first_child, result_first_child, last_child;
7880 bool pending_recursion;
7881 rtx loc_from = NULL;
7882 struct elt_loc_list *cloc = NULL;
7883 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
7884 int wanted_entryvals, found_entryvals = 0;
7885
7886 /* Clear all backlinks pointing at this, so that we're not notified
7887 while we're active. */
7888 loc_exp_dep_clear (var);
7889
7890 retry:
7891 if (var->onepart == ONEPART_VALUE)
7892 {
7893 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
7894
7895 gcc_checking_assert (cselib_preserved_value_p (val));
7896
7897 cloc = val->locs;
7898 }
7899
7900 first_child = result_first_child = last_child
7901 = VEC_length (rtx, elcd->expanding);
7902
7903 wanted_entryvals = found_entryvals;
7904
7905 /* Attempt to expand each available location in turn. */
7906 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
7907 loc || cloc; loc = next)
7908 {
7909 result_first_child = last_child;
7910
7911 if (!loc)
7912 {
7913 loc_from = cloc->loc;
7914 next = loc;
7915 cloc = cloc->next;
7916 if (unsuitable_loc (loc_from))
7917 continue;
7918 }
7919 else
7920 {
7921 loc_from = loc->loc;
7922 next = loc->next;
7923 }
7924
7925 gcc_checking_assert (!unsuitable_loc (loc_from));
7926
7927 elcd->depth.complexity = elcd->depth.entryvals = 0;
7928 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
7929 vt_expand_loc_callback, data);
7930 last_child = VEC_length (rtx, elcd->expanding);
7931
7932 if (result)
7933 {
7934 depth = elcd->depth;
7935
7936 gcc_checking_assert (depth.complexity
7937 || result_first_child == last_child);
7938
7939 if (last_child - result_first_child != 1)
7940 {
7941 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
7942 depth.entryvals++;
7943 depth.complexity++;
7944 }
7945
7946 if (depth.complexity <= EXPR_USE_DEPTH)
7947 {
7948 if (depth.entryvals <= wanted_entryvals)
7949 break;
7950 else if (!found_entryvals || depth.entryvals < found_entryvals)
7951 found_entryvals = depth.entryvals;
7952 }
7953
7954 result = NULL;
7955 }
7956
7957 /* Set it up in case we leave the loop. */
7958 depth.complexity = depth.entryvals = 0;
7959 loc_from = NULL;
7960 result_first_child = first_child;
7961 }
7962
7963 if (!loc_from && wanted_entryvals < found_entryvals)
7964 {
7965 /* We found entries with ENTRY_VALUEs and skipped them. Since
7966 we could not find any expansions without ENTRY_VALUEs, but we
7967 found at least one with them, go back and get an entry with
7968 the minimum number ENTRY_VALUE count that we found. We could
7969 avoid looping, but since each sub-loc is already resolved,
7970 the re-expansion should be trivial. ??? Should we record all
7971 attempted locs as dependencies, so that we retry the
7972 expansion should any of them change, in the hope it can give
7973 us a new entry without an ENTRY_VALUE? */
7974 VEC_truncate (rtx, elcd->expanding, first_child);
7975 goto retry;
7976 }
7977
7978 /* Register all encountered dependencies as active. */
7979 pending_recursion = loc_exp_dep_set
7980 (var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
7981 last_child - result_first_child, elcd->vars);
7982
7983 VEC_truncate (rtx, elcd->expanding, first_child);
7984
7985 /* Record where the expansion came from. */
7986 gcc_checking_assert (!result || !pending_recursion);
7987 VAR_LOC_FROM (var) = loc_from;
7988 VAR_LOC_DEPTH (var) = depth;
7989
7990 gcc_checking_assert (!depth.complexity == !result);
7991
7992 elcd->depth = update_depth (saved_depth, depth);
7993
7994 /* Indicate whether any of the dependencies are pending recursion
7995 resolution. */
7996 if (pendrecp)
7997 *pendrecp = pending_recursion;
7998
7999 if (!pendrecp || !pending_recursion)
8000 var->var_part[0].cur_loc = result;
8001
8002 return result;
8003 }
8004
8005 /* Callback for cselib_expand_value, that looks for expressions
8006 holding the value in the var-tracking hash tables. Return X for
8007 standard processing, anything else is to be used as-is. */
8008
8009 static rtx
8010 vt_expand_loc_callback (rtx x, bitmap regs,
8011 int max_depth ATTRIBUTE_UNUSED,
8012 void *data)
8013 {
8014 struct expand_loc_callback_data *elcd
8015 = (struct expand_loc_callback_data *) data;
8016 decl_or_value dv;
8017 variable var;
8018 rtx result, subreg;
8019 bool pending_recursion = false;
8020 bool from_empty = false;
8021
8022 switch (GET_CODE (x))
8023 {
8024 case SUBREG:
8025 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8026 EXPR_DEPTH,
8027 vt_expand_loc_callback, data);
8028
8029 if (!subreg)
8030 return NULL;
8031
8032 result = simplify_gen_subreg (GET_MODE (x), subreg,
8033 GET_MODE (SUBREG_REG (x)),
8034 SUBREG_BYTE (x));
8035
8036 /* Invalid SUBREGs are ok in debug info. ??? We could try
8037 alternate expansions for the VALUE as well. */
8038 if (!result)
8039 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8040
8041 return result;
8042
8043 case DEBUG_EXPR:
8044 case VALUE:
8045 dv = dv_from_rtx (x);
8046 break;
8047
8048 default:
8049 return x;
8050 }
8051
8052 VEC_safe_push (rtx, stack, elcd->expanding, x);
8053
8054 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8055 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8056
8057 if (NO_LOC_P (x))
8058 {
8059 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8060 return NULL;
8061 }
8062
8063 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
8064
8065 if (!var)
8066 {
8067 from_empty = true;
8068 var = variable_from_dropped (dv, INSERT);
8069 }
8070
8071 gcc_checking_assert (var);
8072
8073 if (!dv_changed_p (dv))
8074 {
8075 gcc_checking_assert (!NO_LOC_P (x));
8076 gcc_checking_assert (var->var_part[0].cur_loc);
8077 gcc_checking_assert (VAR_LOC_1PAUX (var));
8078 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8079
8080 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8081
8082 return var->var_part[0].cur_loc;
8083 }
8084
8085 VALUE_RECURSED_INTO (x) = true;
8086 /* This is tentative, but it makes some tests simpler. */
8087 NO_LOC_P (x) = true;
8088
8089 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8090
8091 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8092
8093 if (pending_recursion)
8094 {
8095 gcc_checking_assert (!result);
8096 VEC_safe_push (rtx, stack, elcd->pending, x);
8097 }
8098 else
8099 {
8100 NO_LOC_P (x) = !result;
8101 VALUE_RECURSED_INTO (x) = false;
8102 set_dv_changed (dv, false);
8103
8104 if (result)
8105 notify_dependents_of_resolved_value (var, elcd->vars);
8106 }
8107
8108 return result;
8109 }
8110
8111 /* While expanding variables, we may encounter recursion cycles
8112 because of mutual (possibly indirect) dependencies between two
8113 particular variables (or values), say A and B. If we're trying to
8114 expand A when we get to B, which in turn attempts to expand A, if
8115 we can't find any other expansion for B, we'll add B to this
8116 pending-recursion stack, and tentatively return NULL for its
8117 location. This tentative value will be used for any other
8118 occurrences of B, unless A gets some other location, in which case
8119 it will notify B that it is worth another try at computing a
8120 location for it, and it will use the location computed for A then.
8121 At the end of the expansion, the tentative NULL locations become
8122 final for all members of PENDING that didn't get a notification.
8123 This function performs this finalization of NULL locations. */
8124
8125 static void
8126 resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
8127 {
8128 while (!VEC_empty (rtx, pending))
8129 {
8130 rtx x = VEC_pop (rtx, pending);
8131 decl_or_value dv;
8132
8133 if (!VALUE_RECURSED_INTO (x))
8134 continue;
8135
8136 gcc_checking_assert (NO_LOC_P (x));
8137 VALUE_RECURSED_INTO (x) = false;
8138 dv = dv_from_rtx (x);
8139 gcc_checking_assert (dv_changed_p (dv));
8140 set_dv_changed (dv, false);
8141 }
8142 }
8143
8144 /* Initialize expand_loc_callback_data D with variable hash table V.
8145 It must be a macro because of alloca (VEC stack). */
8146 #define INIT_ELCD(d, v) \
8147 do \
8148 { \
8149 (d).vars = (v); \
8150 (d).expanding = VEC_alloc (rtx, stack, 4); \
8151 (d).pending = VEC_alloc (rtx, stack, 4); \
8152 (d).depth.complexity = (d).depth.entryvals = 0; \
8153 } \
8154 while (0)
8155 /* Finalize expand_loc_callback_data D, resolved to location L. */
8156 #define FINI_ELCD(d, l) \
8157 do \
8158 { \
8159 resolve_expansions_pending_recursion ((d).pending); \
8160 VEC_free (rtx, stack, (d).pending); \
8161 VEC_free (rtx, stack, (d).expanding); \
8162 \
8163 if ((l) && MEM_P (l)) \
8164 (l) = targetm.delegitimize_address (l); \
8165 } \
8166 while (0)
8167
8168 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8169 equivalences in VARS, updating their CUR_LOCs in the process. */
8170
8171 static rtx
8172 vt_expand_loc (rtx loc, htab_t vars)
8173 {
8174 struct expand_loc_callback_data data;
8175 rtx result;
8176
8177 if (!MAY_HAVE_DEBUG_INSNS)
8178 return loc;
8179
8180 INIT_ELCD (data, vars);
8181
8182 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8183 vt_expand_loc_callback, &data);
8184
8185 FINI_ELCD (data, result);
8186
8187 return result;
8188 }
8189
8190 /* Expand the one-part VARiable to a location, using the equivalences
8191 in VARS, updating their CUR_LOCs in the process. */
8192
8193 static rtx
8194 vt_expand_1pvar (variable var, htab_t vars)
8195 {
8196 struct expand_loc_callback_data data;
8197 rtx loc;
8198
8199 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8200
8201 if (!dv_changed_p (var->dv))
8202 return var->var_part[0].cur_loc;
8203
8204 INIT_ELCD (data, vars);
8205
8206 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8207
8208 gcc_checking_assert (VEC_empty (rtx, data.expanding));
8209
8210 FINI_ELCD (data, loc);
8211
8212 return loc;
8213 }
8214
8215 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8216 additional parameters: WHERE specifies whether the note shall be emitted
8217 before or after instruction INSN. */
8218
8219 static int
8220 emit_note_insn_var_location (void **varp, void *data)
8221 {
8222 variable var = (variable) *varp;
8223 rtx insn = ((emit_note_data *)data)->insn;
8224 enum emit_note_where where = ((emit_note_data *)data)->where;
8225 htab_t vars = ((emit_note_data *)data)->vars;
8226 rtx note, note_vl;
8227 int i, j, n_var_parts;
8228 bool complete;
8229 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8230 HOST_WIDE_INT last_limit;
8231 tree type_size_unit;
8232 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8233 rtx loc[MAX_VAR_PARTS];
8234 tree decl;
8235 location_chain lc;
8236
8237 gcc_checking_assert (var->onepart == NOT_ONEPART
8238 || var->onepart == ONEPART_VDECL);
8239
8240 decl = dv_as_decl (var->dv);
8241
8242 complete = true;
8243 last_limit = 0;
8244 n_var_parts = 0;
8245 if (!var->onepart)
8246 for (i = 0; i < var->n_var_parts; i++)
8247 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8248 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8249 for (i = 0; i < var->n_var_parts; i++)
8250 {
8251 enum machine_mode mode, wider_mode;
8252 rtx loc2;
8253 HOST_WIDE_INT offset;
8254
8255 if (i == 0 && var->onepart)
8256 {
8257 gcc_checking_assert (var->n_var_parts == 1);
8258 offset = 0;
8259 initialized = VAR_INIT_STATUS_INITIALIZED;
8260 loc2 = vt_expand_1pvar (var, vars);
8261 }
8262 else
8263 {
8264 if (last_limit < VAR_PART_OFFSET (var, i))
8265 {
8266 complete = false;
8267 break;
8268 }
8269 else if (last_limit > VAR_PART_OFFSET (var, i))
8270 continue;
8271 offset = VAR_PART_OFFSET (var, i);
8272 loc2 = var->var_part[i].cur_loc;
8273 if (loc2 && GET_CODE (loc2) == MEM
8274 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8275 {
8276 rtx depval = XEXP (loc2, 0);
8277
8278 loc2 = vt_expand_loc (loc2, vars);
8279
8280 if (loc2)
8281 loc_exp_insert_dep (var, depval, vars);
8282 }
8283 if (!loc2)
8284 {
8285 complete = false;
8286 continue;
8287 }
8288 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8289 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8290 if (var->var_part[i].cur_loc == lc->loc)
8291 {
8292 initialized = lc->init;
8293 break;
8294 }
8295 gcc_assert (lc);
8296 }
8297
8298 offsets[n_var_parts] = offset;
8299 if (!loc2)
8300 {
8301 complete = false;
8302 continue;
8303 }
8304 loc[n_var_parts] = loc2;
8305 mode = GET_MODE (var->var_part[i].cur_loc);
8306 if (mode == VOIDmode && var->onepart)
8307 mode = DECL_MODE (decl);
8308 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8309
8310 /* Attempt to merge adjacent registers or memory. */
8311 wider_mode = GET_MODE_WIDER_MODE (mode);
8312 for (j = i + 1; j < var->n_var_parts; j++)
8313 if (last_limit <= VAR_PART_OFFSET (var, j))
8314 break;
8315 if (j < var->n_var_parts
8316 && wider_mode != VOIDmode
8317 && var->var_part[j].cur_loc
8318 && mode == GET_MODE (var->var_part[j].cur_loc)
8319 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8320 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8321 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8322 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8323 {
8324 rtx new_loc = NULL;
8325
8326 if (REG_P (loc[n_var_parts])
8327 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8328 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8329 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8330 == REGNO (loc2))
8331 {
8332 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8333 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8334 mode, 0);
8335 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8336 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8337 if (new_loc)
8338 {
8339 if (!REG_P (new_loc)
8340 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8341 new_loc = NULL;
8342 else
8343 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8344 }
8345 }
8346 else if (MEM_P (loc[n_var_parts])
8347 && GET_CODE (XEXP (loc2, 0)) == PLUS
8348 && REG_P (XEXP (XEXP (loc2, 0), 0))
8349 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8350 {
8351 if ((REG_P (XEXP (loc[n_var_parts], 0))
8352 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8353 XEXP (XEXP (loc2, 0), 0))
8354 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8355 == GET_MODE_SIZE (mode))
8356 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8357 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8358 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8359 XEXP (XEXP (loc2, 0), 0))
8360 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8361 + GET_MODE_SIZE (mode)
8362 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8363 new_loc = adjust_address_nv (loc[n_var_parts],
8364 wider_mode, 0);
8365 }
8366
8367 if (new_loc)
8368 {
8369 loc[n_var_parts] = new_loc;
8370 mode = wider_mode;
8371 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8372 i = j;
8373 }
8374 }
8375 ++n_var_parts;
8376 }
8377 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8378 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8379 complete = false;
8380
8381 if (! flag_var_tracking_uninit)
8382 initialized = VAR_INIT_STATUS_INITIALIZED;
8383
8384 note_vl = NULL_RTX;
8385 if (!complete)
8386 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8387 (int) initialized);
8388 else if (n_var_parts == 1)
8389 {
8390 rtx expr_list;
8391
8392 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8393 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8394 else
8395 expr_list = loc[0];
8396
8397 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8398 (int) initialized);
8399 }
8400 else if (n_var_parts)
8401 {
8402 rtx parallel;
8403
8404 for (i = 0; i < n_var_parts; i++)
8405 loc[i]
8406 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8407
8408 parallel = gen_rtx_PARALLEL (VOIDmode,
8409 gen_rtvec_v (n_var_parts, loc));
8410 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8411 parallel, (int) initialized);
8412 }
8413
8414 if (where != EMIT_NOTE_BEFORE_INSN)
8415 {
8416 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8417 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8418 NOTE_DURING_CALL_P (note) = true;
8419 }
8420 else
8421 {
8422 /* Make sure that the call related notes come first. */
8423 while (NEXT_INSN (insn)
8424 && NOTE_P (insn)
8425 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8426 && NOTE_DURING_CALL_P (insn))
8427 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8428 insn = NEXT_INSN (insn);
8429 if (NOTE_P (insn)
8430 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8431 && NOTE_DURING_CALL_P (insn))
8432 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8433 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8434 else
8435 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8436 }
8437 NOTE_VAR_LOCATION (note) = note_vl;
8438
8439 set_dv_changed (var->dv, false);
8440 gcc_assert (var->in_changed_variables);
8441 var->in_changed_variables = false;
8442 htab_clear_slot (changed_variables, varp);
8443
8444 /* Continue traversing the hash table. */
8445 return 1;
8446 }
8447
8448 /* While traversing changed_variables, push onto DATA (a stack of RTX
8449 values) entries that aren't user variables. */
8450
8451 static int
8452 values_to_stack (void **slot, void *data)
8453 {
8454 VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
8455 variable var = (variable) *slot;
8456
8457 if (var->onepart == ONEPART_VALUE)
8458 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
8459 else if (var->onepart == ONEPART_DEXPR)
8460 VEC_safe_push (rtx, stack, *changed_values_stack,
8461 DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8462
8463 return 1;
8464 }
8465
8466 /* Remove from changed_variables the entry whose DV corresponds to
8467 value or debug_expr VAL. */
8468 static void
8469 remove_value_from_changed_variables (rtx val)
8470 {
8471 decl_or_value dv = dv_from_rtx (val);
8472 void **slot;
8473 variable var;
8474
8475 slot = htab_find_slot_with_hash (changed_variables,
8476 dv, dv_htab_hash (dv), NO_INSERT);
8477 var = (variable) *slot;
8478 var->in_changed_variables = false;
8479 htab_clear_slot (changed_variables, slot);
8480 }
8481
8482 /* If VAL (a value or debug_expr) has backlinks to variables actively
8483 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8484 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8485 have dependencies of their own to notify. */
8486
8487 static void
8488 notify_dependents_of_changed_value (rtx val, htab_t htab,
8489 VEC (rtx, stack) **changed_values_stack)
8490 {
8491 void **slot;
8492 variable var;
8493 loc_exp_dep *led;
8494 decl_or_value dv = dv_from_rtx (val);
8495
8496 slot = htab_find_slot_with_hash (changed_variables,
8497 dv, dv_htab_hash (dv), NO_INSERT);
8498 if (!slot)
8499 slot = htab_find_slot_with_hash (htab,
8500 dv, dv_htab_hash (dv), NO_INSERT);
8501 if (!slot)
8502 slot = htab_find_slot_with_hash (dropped_values,
8503 dv, dv_htab_hash (dv), NO_INSERT);
8504 var = (variable) *slot;
8505
8506 while ((led = VAR_LOC_DEP_LST (var)))
8507 {
8508 decl_or_value ldv = led->dv;
8509 variable ivar;
8510
8511 /* Deactivate and remove the backlink, as it was “used up”. It
8512 makes no sense to attempt to notify the same entity again:
8513 either it will be recomputed and re-register an active
8514 dependency, or it will still have the changed mark. */
8515 if (led->next)
8516 led->next->pprev = led->pprev;
8517 if (led->pprev)
8518 *led->pprev = led->next;
8519 led->next = NULL;
8520 led->pprev = NULL;
8521
8522 if (dv_changed_p (ldv))
8523 continue;
8524
8525 switch (dv_onepart_p (ldv))
8526 {
8527 case ONEPART_VALUE:
8528 case ONEPART_DEXPR:
8529 set_dv_changed (ldv, true);
8530 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
8531 break;
8532
8533 case ONEPART_VDECL:
8534 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
8535 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8536 variable_was_changed (ivar, NULL);
8537 break;
8538
8539 case NOT_ONEPART:
8540 pool_free (loc_exp_dep_pool, led);
8541 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
8542 if (ivar)
8543 {
8544 int i = ivar->n_var_parts;
8545 while (i--)
8546 {
8547 rtx loc = ivar->var_part[i].cur_loc;
8548
8549 if (loc && GET_CODE (loc) == MEM
8550 && XEXP (loc, 0) == val)
8551 {
8552 variable_was_changed (ivar, NULL);
8553 break;
8554 }
8555 }
8556 }
8557 break;
8558
8559 default:
8560 gcc_unreachable ();
8561 }
8562 }
8563 }
8564
8565 /* Take out of changed_variables any entries that don't refer to use
8566 variables. Back-propagate change notifications from values and
8567 debug_exprs to their active dependencies in HTAB or in
8568 CHANGED_VARIABLES. */
8569
8570 static void
8571 process_changed_values (htab_t htab)
8572 {
8573 int i, n;
8574 rtx val;
8575 VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
8576
8577 /* Move values from changed_variables to changed_values_stack. */
8578 htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
8579
8580 /* Back-propagate change notifications in values while popping
8581 them from the stack. */
8582 for (n = i = VEC_length (rtx, changed_values_stack);
8583 i > 0; i = VEC_length (rtx, changed_values_stack))
8584 {
8585 val = VEC_pop (rtx, changed_values_stack);
8586 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8587
8588 /* This condition will hold when visiting each of the entries
8589 originally in changed_variables. We can't remove them
8590 earlier because this could drop the backlinks before we got a
8591 chance to use them. */
8592 if (i == n)
8593 {
8594 remove_value_from_changed_variables (val);
8595 n--;
8596 }
8597 }
8598
8599 VEC_free (rtx, stack, changed_values_stack);
8600 }
8601
8602 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8603 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8604 the notes shall be emitted before of after instruction INSN. */
8605
8606 static void
8607 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8608 shared_hash vars)
8609 {
8610 emit_note_data data;
8611 htab_t htab = shared_hash_htab (vars);
8612
8613 if (!htab_elements (changed_variables))
8614 return;
8615
8616 if (MAY_HAVE_DEBUG_INSNS)
8617 process_changed_values (htab);
8618
8619 data.insn = insn;
8620 data.where = where;
8621 data.vars = htab;
8622
8623 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
8624 }
8625
8626 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8627 same variable in hash table DATA or is not there at all. */
8628
8629 static int
8630 emit_notes_for_differences_1 (void **slot, void *data)
8631 {
8632 htab_t new_vars = (htab_t) data;
8633 variable old_var, new_var;
8634
8635 old_var = (variable) *slot;
8636 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
8637 dv_htab_hash (old_var->dv));
8638
8639 if (!new_var)
8640 {
8641 /* Variable has disappeared. */
8642 variable empty_var = NULL;
8643
8644 if (old_var->onepart == ONEPART_VALUE
8645 || old_var->onepart == ONEPART_DEXPR)
8646 {
8647 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8648 if (empty_var)
8649 {
8650 gcc_checking_assert (!empty_var->in_changed_variables);
8651 if (!VAR_LOC_1PAUX (old_var))
8652 {
8653 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8654 VAR_LOC_1PAUX (empty_var) = NULL;
8655 }
8656 else
8657 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8658 }
8659 }
8660
8661 if (!empty_var)
8662 {
8663 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8664 empty_var->dv = old_var->dv;
8665 empty_var->refcount = 0;
8666 empty_var->n_var_parts = 0;
8667 empty_var->onepart = old_var->onepart;
8668 empty_var->in_changed_variables = false;
8669 }
8670
8671 if (empty_var->onepart)
8672 {
8673 /* Propagate the auxiliary data to (ultimately)
8674 changed_variables. */
8675 empty_var->var_part[0].loc_chain = NULL;
8676 empty_var->var_part[0].cur_loc = NULL;
8677 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8678 VAR_LOC_1PAUX (old_var) = NULL;
8679 }
8680 variable_was_changed (empty_var, NULL);
8681 /* Continue traversing the hash table. */
8682 return 1;
8683 }
8684 /* Update cur_loc and one-part auxiliary data, before new_var goes
8685 through variable_was_changed. */
8686 if (old_var != new_var && new_var->onepart)
8687 {
8688 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8689 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8690 VAR_LOC_1PAUX (old_var) = NULL;
8691 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8692 }
8693 if (variable_different_p (old_var, new_var))
8694 variable_was_changed (new_var, NULL);
8695
8696 /* Continue traversing the hash table. */
8697 return 1;
8698 }
8699
8700 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8701 table DATA. */
8702
8703 static int
8704 emit_notes_for_differences_2 (void **slot, void *data)
8705 {
8706 htab_t old_vars = (htab_t) data;
8707 variable old_var, new_var;
8708
8709 new_var = (variable) *slot;
8710 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8711 dv_htab_hash (new_var->dv));
8712 if (!old_var)
8713 {
8714 int i;
8715 for (i = 0; i < new_var->n_var_parts; i++)
8716 new_var->var_part[i].cur_loc = NULL;
8717 variable_was_changed (new_var, NULL);
8718 }
8719
8720 /* Continue traversing the hash table. */
8721 return 1;
8722 }
8723
8724 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8725 NEW_SET. */
8726
8727 static void
8728 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8729 dataflow_set *new_set)
8730 {
8731 htab_traverse (shared_hash_htab (old_set->vars),
8732 emit_notes_for_differences_1,
8733 shared_hash_htab (new_set->vars));
8734 htab_traverse (shared_hash_htab (new_set->vars),
8735 emit_notes_for_differences_2,
8736 shared_hash_htab (old_set->vars));
8737 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8738 }
8739
8740 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8741
8742 static rtx
8743 next_non_note_insn_var_location (rtx insn)
8744 {
8745 while (insn)
8746 {
8747 insn = NEXT_INSN (insn);
8748 if (insn == 0
8749 || !NOTE_P (insn)
8750 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8751 break;
8752 }
8753
8754 return insn;
8755 }
8756
8757 /* Emit the notes for changes of location parts in the basic block BB. */
8758
8759 static void
8760 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8761 {
8762 unsigned int i;
8763 micro_operation *mo;
8764
8765 dataflow_set_clear (set);
8766 dataflow_set_copy (set, &VTI (bb)->in);
8767
8768 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
8769 {
8770 rtx insn = mo->insn;
8771 rtx next_insn = next_non_note_insn_var_location (insn);
8772
8773 switch (mo->type)
8774 {
8775 case MO_CALL:
8776 dataflow_set_clear_at_call (set);
8777 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8778 {
8779 rtx arguments = mo->u.loc, *p = &arguments, note;
8780 while (*p)
8781 {
8782 XEXP (XEXP (*p, 0), 1)
8783 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8784 shared_hash_htab (set->vars));
8785 /* If expansion is successful, keep it in the list. */
8786 if (XEXP (XEXP (*p, 0), 1))
8787 p = &XEXP (*p, 1);
8788 /* Otherwise, if the following item is data_value for it,
8789 drop it too too. */
8790 else if (XEXP (*p, 1)
8791 && REG_P (XEXP (XEXP (*p, 0), 0))
8792 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8793 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8794 0))
8795 && REGNO (XEXP (XEXP (*p, 0), 0))
8796 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8797 0), 0)))
8798 *p = XEXP (XEXP (*p, 1), 1);
8799 /* Just drop this item. */
8800 else
8801 *p = XEXP (*p, 1);
8802 }
8803 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8804 NOTE_VAR_LOCATION (note) = arguments;
8805 }
8806 break;
8807
8808 case MO_USE:
8809 {
8810 rtx loc = mo->u.loc;
8811
8812 if (REG_P (loc))
8813 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8814 else
8815 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8816
8817 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8818 }
8819 break;
8820
8821 case MO_VAL_LOC:
8822 {
8823 rtx loc = mo->u.loc;
8824 rtx val, vloc;
8825 tree var;
8826
8827 if (GET_CODE (loc) == CONCAT)
8828 {
8829 val = XEXP (loc, 0);
8830 vloc = XEXP (loc, 1);
8831 }
8832 else
8833 {
8834 val = NULL_RTX;
8835 vloc = loc;
8836 }
8837
8838 var = PAT_VAR_LOCATION_DECL (vloc);
8839
8840 clobber_variable_part (set, NULL_RTX,
8841 dv_from_decl (var), 0, NULL_RTX);
8842 if (val)
8843 {
8844 if (VAL_NEEDS_RESOLUTION (loc))
8845 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8846 set_variable_part (set, val, dv_from_decl (var), 0,
8847 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8848 INSERT);
8849 }
8850 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8851 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8852 dv_from_decl (var), 0,
8853 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8854 INSERT);
8855
8856 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8857 }
8858 break;
8859
8860 case MO_VAL_USE:
8861 {
8862 rtx loc = mo->u.loc;
8863 rtx val, vloc, uloc;
8864
8865 vloc = uloc = XEXP (loc, 1);
8866 val = XEXP (loc, 0);
8867
8868 if (GET_CODE (val) == CONCAT)
8869 {
8870 uloc = XEXP (val, 1);
8871 val = XEXP (val, 0);
8872 }
8873
8874 if (VAL_NEEDS_RESOLUTION (loc))
8875 val_resolve (set, val, vloc, insn);
8876 else
8877 val_store (set, val, uloc, insn, false);
8878
8879 if (VAL_HOLDS_TRACK_EXPR (loc))
8880 {
8881 if (GET_CODE (uloc) == REG)
8882 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8883 NULL);
8884 else if (GET_CODE (uloc) == MEM)
8885 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8886 NULL);
8887 }
8888
8889 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
8890 }
8891 break;
8892
8893 case MO_VAL_SET:
8894 {
8895 rtx loc = mo->u.loc;
8896 rtx val, vloc, uloc;
8897 rtx dstv, srcv;
8898
8899 vloc = loc;
8900 uloc = XEXP (vloc, 1);
8901 val = XEXP (vloc, 0);
8902 vloc = uloc;
8903
8904 if (GET_CODE (uloc) == SET)
8905 {
8906 dstv = SET_DEST (uloc);
8907 srcv = SET_SRC (uloc);
8908 }
8909 else
8910 {
8911 dstv = uloc;
8912 srcv = NULL;
8913 }
8914
8915 if (GET_CODE (val) == CONCAT)
8916 {
8917 dstv = vloc = XEXP (val, 1);
8918 val = XEXP (val, 0);
8919 }
8920
8921 if (GET_CODE (vloc) == SET)
8922 {
8923 srcv = SET_SRC (vloc);
8924
8925 gcc_assert (val != srcv);
8926 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
8927
8928 dstv = vloc = SET_DEST (vloc);
8929
8930 if (VAL_NEEDS_RESOLUTION (loc))
8931 val_resolve (set, val, srcv, insn);
8932 }
8933 else if (VAL_NEEDS_RESOLUTION (loc))
8934 {
8935 gcc_assert (GET_CODE (uloc) == SET
8936 && GET_CODE (SET_SRC (uloc)) == REG);
8937 val_resolve (set, val, SET_SRC (uloc), insn);
8938 }
8939
8940 if (VAL_HOLDS_TRACK_EXPR (loc))
8941 {
8942 if (VAL_EXPR_IS_CLOBBERED (loc))
8943 {
8944 if (REG_P (uloc))
8945 var_reg_delete (set, uloc, true);
8946 else if (MEM_P (uloc))
8947 {
8948 gcc_assert (MEM_P (dstv));
8949 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
8950 var_mem_delete (set, dstv, true);
8951 }
8952 }
8953 else
8954 {
8955 bool copied_p = VAL_EXPR_IS_COPIED (loc);
8956 rtx src = NULL, dst = uloc;
8957 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
8958
8959 if (GET_CODE (uloc) == SET)
8960 {
8961 src = SET_SRC (uloc);
8962 dst = SET_DEST (uloc);
8963 }
8964
8965 if (copied_p)
8966 {
8967 status = find_src_status (set, src);
8968
8969 src = find_src_set_src (set, src);
8970 }
8971
8972 if (REG_P (dst))
8973 var_reg_delete_and_set (set, dst, !copied_p,
8974 status, srcv);
8975 else if (MEM_P (dst))
8976 {
8977 gcc_assert (MEM_P (dstv));
8978 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
8979 var_mem_delete_and_set (set, dstv, !copied_p,
8980 status, srcv);
8981 }
8982 }
8983 }
8984 else if (REG_P (uloc))
8985 var_regno_delete (set, REGNO (uloc));
8986 else if (MEM_P (uloc))
8987 clobber_overlapping_mems (set, uloc);
8988
8989 val_store (set, val, dstv, insn, true);
8990
8991 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8992 set->vars);
8993 }
8994 break;
8995
8996 case MO_SET:
8997 {
8998 rtx loc = mo->u.loc;
8999 rtx set_src = NULL;
9000
9001 if (GET_CODE (loc) == SET)
9002 {
9003 set_src = SET_SRC (loc);
9004 loc = SET_DEST (loc);
9005 }
9006
9007 if (REG_P (loc))
9008 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9009 set_src);
9010 else
9011 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9012 set_src);
9013
9014 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9015 set->vars);
9016 }
9017 break;
9018
9019 case MO_COPY:
9020 {
9021 rtx loc = mo->u.loc;
9022 enum var_init_status src_status;
9023 rtx set_src = NULL;
9024
9025 if (GET_CODE (loc) == SET)
9026 {
9027 set_src = SET_SRC (loc);
9028 loc = SET_DEST (loc);
9029 }
9030
9031 src_status = find_src_status (set, set_src);
9032 set_src = find_src_set_src (set, set_src);
9033
9034 if (REG_P (loc))
9035 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9036 else
9037 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9038
9039 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9040 set->vars);
9041 }
9042 break;
9043
9044 case MO_USE_NO_VAR:
9045 {
9046 rtx loc = mo->u.loc;
9047
9048 if (REG_P (loc))
9049 var_reg_delete (set, loc, false);
9050 else
9051 var_mem_delete (set, loc, false);
9052
9053 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9054 }
9055 break;
9056
9057 case MO_CLOBBER:
9058 {
9059 rtx loc = mo->u.loc;
9060
9061 if (REG_P (loc))
9062 var_reg_delete (set, loc, true);
9063 else
9064 var_mem_delete (set, loc, true);
9065
9066 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9067 set->vars);
9068 }
9069 break;
9070
9071 case MO_ADJUST:
9072 set->stack_adjust += mo->u.adjust;
9073 break;
9074 }
9075 }
9076 }
9077
9078 /* Emit notes for the whole function. */
9079
9080 static void
9081 vt_emit_notes (void)
9082 {
9083 basic_block bb;
9084 dataflow_set cur;
9085
9086 gcc_assert (!htab_elements (changed_variables));
9087
9088 /* Free memory occupied by the out hash tables, as they aren't used
9089 anymore. */
9090 FOR_EACH_BB (bb)
9091 dataflow_set_clear (&VTI (bb)->out);
9092
9093 /* Enable emitting notes by functions (mainly by set_variable_part and
9094 delete_variable_part). */
9095 emit_notes = true;
9096
9097 if (MAY_HAVE_DEBUG_INSNS)
9098 {
9099 dropped_values = htab_create (cselib_get_next_uid () * 2,
9100 variable_htab_hash, variable_htab_eq,
9101 variable_htab_free);
9102 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9103 sizeof (loc_exp_dep), 64);
9104 }
9105
9106 dataflow_set_init (&cur);
9107
9108 FOR_EACH_BB (bb)
9109 {
9110 /* Emit the notes for changes of variable locations between two
9111 subsequent basic blocks. */
9112 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9113
9114 /* Emit the notes for the changes in the basic block itself. */
9115 emit_notes_in_bb (bb, &cur);
9116
9117 /* Free memory occupied by the in hash table, we won't need it
9118 again. */
9119 dataflow_set_clear (&VTI (bb)->in);
9120 }
9121 #ifdef ENABLE_CHECKING
9122 htab_traverse (shared_hash_htab (cur.vars),
9123 emit_notes_for_differences_1,
9124 shared_hash_htab (empty_shared_hash));
9125 #endif
9126 dataflow_set_destroy (&cur);
9127
9128 if (MAY_HAVE_DEBUG_INSNS)
9129 {
9130 free_alloc_pool (loc_exp_dep_pool);
9131 loc_exp_dep_pool = NULL;
9132 htab_delete (dropped_values);
9133 }
9134
9135 emit_notes = false;
9136 }
9137
9138 /* If there is a declaration and offset associated with register/memory RTL
9139 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9140
9141 static bool
9142 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9143 {
9144 if (REG_P (rtl))
9145 {
9146 if (REG_ATTRS (rtl))
9147 {
9148 *declp = REG_EXPR (rtl);
9149 *offsetp = REG_OFFSET (rtl);
9150 return true;
9151 }
9152 }
9153 else if (MEM_P (rtl))
9154 {
9155 if (MEM_ATTRS (rtl))
9156 {
9157 *declp = MEM_EXPR (rtl);
9158 *offsetp = INT_MEM_OFFSET (rtl);
9159 return true;
9160 }
9161 }
9162 return false;
9163 }
9164
9165 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9166 of VAL. */
9167
9168 static void
9169 record_entry_value (cselib_val *val, rtx rtl)
9170 {
9171 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9172
9173 ENTRY_VALUE_EXP (ev) = rtl;
9174
9175 cselib_add_permanent_equiv (val, ev, get_insns ());
9176 }
9177
9178 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9179
9180 static void
9181 vt_add_function_parameter (tree parm)
9182 {
9183 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9184 rtx incoming = DECL_INCOMING_RTL (parm);
9185 tree decl;
9186 enum machine_mode mode;
9187 HOST_WIDE_INT offset;
9188 dataflow_set *out;
9189 decl_or_value dv;
9190
9191 if (TREE_CODE (parm) != PARM_DECL)
9192 return;
9193
9194 if (!decl_rtl || !incoming)
9195 return;
9196
9197 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9198 return;
9199
9200 /* If there is a DRAP register, rewrite the incoming location of parameters
9201 passed on the stack into MEMs based on the argument pointer, as the DRAP
9202 register can be reused for other purposes and we do not track locations
9203 based on generic registers. But the prerequisite is that this argument
9204 pointer be also the virtual CFA pointer, see vt_initialize. */
9205 if (MEM_P (incoming)
9206 && stack_realign_drap
9207 && arg_pointer_rtx == cfa_base_rtx
9208 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9209 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9210 && XEXP (XEXP (incoming, 0), 0)
9211 == crtl->args.internal_arg_pointer
9212 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9213 {
9214 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9215 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9216 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9217 incoming
9218 = replace_equiv_address_nv (incoming,
9219 plus_constant (Pmode,
9220 arg_pointer_rtx, off));
9221 }
9222
9223 #ifdef HAVE_window_save
9224 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9225 If the target machine has an explicit window save instruction, the
9226 actual entry value is the corresponding OUTGOING_REGNO instead. */
9227 if (REG_P (incoming)
9228 && HARD_REGISTER_P (incoming)
9229 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9230 {
9231 parm_reg_t *p
9232 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9233 p->incoming = incoming;
9234 incoming
9235 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9236 OUTGOING_REGNO (REGNO (incoming)), 0);
9237 p->outgoing = incoming;
9238 }
9239 else if (MEM_P (incoming)
9240 && REG_P (XEXP (incoming, 0))
9241 && HARD_REGISTER_P (XEXP (incoming, 0)))
9242 {
9243 rtx reg = XEXP (incoming, 0);
9244 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9245 {
9246 parm_reg_t *p
9247 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9248 p->incoming = reg;
9249 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9250 p->outgoing = reg;
9251 incoming = replace_equiv_address_nv (incoming, reg);
9252 }
9253 }
9254 #endif
9255
9256 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9257 {
9258 if (REG_P (incoming) || MEM_P (incoming))
9259 {
9260 /* This means argument is passed by invisible reference. */
9261 offset = 0;
9262 decl = parm;
9263 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
9264 }
9265 else
9266 {
9267 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9268 return;
9269 offset += byte_lowpart_offset (GET_MODE (incoming),
9270 GET_MODE (decl_rtl));
9271 }
9272 }
9273
9274 if (!decl)
9275 return;
9276
9277 if (parm != decl)
9278 {
9279 /* Assume that DECL_RTL was a pseudo that got spilled to
9280 memory. The spill slot sharing code will force the
9281 memory to reference spill_slot_decl (%sfp), so we don't
9282 match above. That's ok, the pseudo must have referenced
9283 the entire parameter, so just reset OFFSET. */
9284 gcc_assert (decl == get_spill_slot_decl (false));
9285 offset = 0;
9286 }
9287
9288 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9289 return;
9290
9291 out = &VTI (ENTRY_BLOCK_PTR)->out;
9292
9293 dv = dv_from_decl (parm);
9294
9295 if (target_for_debug_bind (parm)
9296 /* We can't deal with these right now, because this kind of
9297 variable is single-part. ??? We could handle parallels
9298 that describe multiple locations for the same single
9299 value, but ATM we don't. */
9300 && GET_CODE (incoming) != PARALLEL)
9301 {
9302 cselib_val *val;
9303
9304 /* ??? We shouldn't ever hit this, but it may happen because
9305 arguments passed by invisible reference aren't dealt with
9306 above: incoming-rtl will have Pmode rather than the
9307 expected mode for the type. */
9308 if (offset)
9309 return;
9310
9311 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
9312 VOIDmode, get_insns ());
9313
9314 /* ??? Float-typed values in memory are not handled by
9315 cselib. */
9316 if (val)
9317 {
9318 preserve_value (val);
9319 set_variable_part (out, val->val_rtx, dv, offset,
9320 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9321 dv = dv_from_value (val->val_rtx);
9322 }
9323 }
9324
9325 if (REG_P (incoming))
9326 {
9327 incoming = var_lowpart (mode, incoming);
9328 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9329 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9330 incoming);
9331 set_variable_part (out, incoming, dv, offset,
9332 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9333 if (dv_is_value_p (dv))
9334 {
9335 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9336 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9337 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9338 {
9339 enum machine_mode indmode
9340 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9341 rtx mem = gen_rtx_MEM (indmode, incoming);
9342 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9343 VOIDmode,
9344 get_insns ());
9345 if (val)
9346 {
9347 preserve_value (val);
9348 record_entry_value (val, mem);
9349 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9350 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9351 }
9352 }
9353 }
9354 }
9355 else if (MEM_P (incoming))
9356 {
9357 incoming = var_lowpart (mode, incoming);
9358 set_variable_part (out, incoming, dv, offset,
9359 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9360 }
9361 }
9362
9363 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9364
9365 static void
9366 vt_add_function_parameters (void)
9367 {
9368 tree parm;
9369
9370 for (parm = DECL_ARGUMENTS (current_function_decl);
9371 parm; parm = DECL_CHAIN (parm))
9372 vt_add_function_parameter (parm);
9373
9374 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9375 {
9376 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9377
9378 if (TREE_CODE (vexpr) == INDIRECT_REF)
9379 vexpr = TREE_OPERAND (vexpr, 0);
9380
9381 if (TREE_CODE (vexpr) == PARM_DECL
9382 && DECL_ARTIFICIAL (vexpr)
9383 && !DECL_IGNORED_P (vexpr)
9384 && DECL_NAMELESS (vexpr))
9385 vt_add_function_parameter (vexpr);
9386 }
9387 }
9388
9389 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
9390
9391 static bool
9392 fp_setter (rtx insn)
9393 {
9394 rtx pat = PATTERN (insn);
9395 if (RTX_FRAME_RELATED_P (insn))
9396 {
9397 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
9398 if (expr)
9399 pat = XEXP (expr, 0);
9400 }
9401 if (GET_CODE (pat) == SET)
9402 return SET_DEST (pat) == hard_frame_pointer_rtx;
9403 else if (GET_CODE (pat) == PARALLEL)
9404 {
9405 int i;
9406 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9407 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
9408 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
9409 return true;
9410 }
9411 return false;
9412 }
9413
9414 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9415 ensure it isn't flushed during cselib_reset_table.
9416 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9417 has been eliminated. */
9418
9419 static void
9420 vt_init_cfa_base (void)
9421 {
9422 cselib_val *val;
9423
9424 #ifdef FRAME_POINTER_CFA_OFFSET
9425 cfa_base_rtx = frame_pointer_rtx;
9426 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9427 #else
9428 cfa_base_rtx = arg_pointer_rtx;
9429 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9430 #endif
9431 if (cfa_base_rtx == hard_frame_pointer_rtx
9432 || !fixed_regs[REGNO (cfa_base_rtx)])
9433 {
9434 cfa_base_rtx = NULL_RTX;
9435 return;
9436 }
9437 if (!MAY_HAVE_DEBUG_INSNS)
9438 return;
9439
9440 /* Tell alias analysis that cfa_base_rtx should share
9441 find_base_term value with stack pointer or hard frame pointer. */
9442 if (!frame_pointer_needed)
9443 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9444 else if (!crtl->stack_realign_tried)
9445 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9446
9447 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9448 VOIDmode, get_insns ());
9449 preserve_value (val);
9450 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9451 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
9452 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
9453 0, NULL_RTX, INSERT);
9454 }
9455
9456 /* Allocate and initialize the data structures for variable tracking
9457 and parse the RTL to get the micro operations. */
9458
9459 static bool
9460 vt_initialize (void)
9461 {
9462 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
9463 HOST_WIDE_INT fp_cfa_offset = -1;
9464
9465 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9466
9467 attrs_pool = create_alloc_pool ("attrs_def pool",
9468 sizeof (struct attrs_def), 1024);
9469 var_pool = create_alloc_pool ("variable_def pool",
9470 sizeof (struct variable_def)
9471 + (MAX_VAR_PARTS - 1)
9472 * sizeof (((variable)NULL)->var_part[0]), 64);
9473 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9474 sizeof (struct location_chain_def),
9475 1024);
9476 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9477 sizeof (struct shared_hash_def), 256);
9478 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9479 empty_shared_hash->refcount = 1;
9480 empty_shared_hash->htab
9481 = htab_create (1, variable_htab_hash, variable_htab_eq,
9482 variable_htab_free);
9483 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
9484 variable_htab_free);
9485
9486 /* Init the IN and OUT sets. */
9487 FOR_ALL_BB (bb)
9488 {
9489 VTI (bb)->visited = false;
9490 VTI (bb)->flooded = false;
9491 dataflow_set_init (&VTI (bb)->in);
9492 dataflow_set_init (&VTI (bb)->out);
9493 VTI (bb)->permp = NULL;
9494 }
9495
9496 if (MAY_HAVE_DEBUG_INSNS)
9497 {
9498 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9499 scratch_regs = BITMAP_ALLOC (NULL);
9500 valvar_pool = create_alloc_pool ("small variable_def pool",
9501 sizeof (struct variable_def), 256);
9502 preserved_values = VEC_alloc (rtx, heap, 256);
9503 }
9504 else
9505 {
9506 scratch_regs = NULL;
9507 valvar_pool = NULL;
9508 }
9509
9510 /* In order to factor out the adjustments made to the stack pointer or to
9511 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9512 instead of individual location lists, we're going to rewrite MEMs based
9513 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9514 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9515 resp. arg_pointer_rtx. We can do this either when there is no frame
9516 pointer in the function and stack adjustments are consistent for all
9517 basic blocks or when there is a frame pointer and no stack realignment.
9518 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9519 has been eliminated. */
9520 if (!frame_pointer_needed)
9521 {
9522 rtx reg, elim;
9523
9524 if (!vt_stack_adjustments ())
9525 return false;
9526
9527 #ifdef FRAME_POINTER_CFA_OFFSET
9528 reg = frame_pointer_rtx;
9529 #else
9530 reg = arg_pointer_rtx;
9531 #endif
9532 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9533 if (elim != reg)
9534 {
9535 if (GET_CODE (elim) == PLUS)
9536 elim = XEXP (elim, 0);
9537 if (elim == stack_pointer_rtx)
9538 vt_init_cfa_base ();
9539 }
9540 }
9541 else if (!crtl->stack_realign_tried)
9542 {
9543 rtx reg, elim;
9544
9545 #ifdef FRAME_POINTER_CFA_OFFSET
9546 reg = frame_pointer_rtx;
9547 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9548 #else
9549 reg = arg_pointer_rtx;
9550 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9551 #endif
9552 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9553 if (elim != reg)
9554 {
9555 if (GET_CODE (elim) == PLUS)
9556 {
9557 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9558 elim = XEXP (elim, 0);
9559 }
9560 if (elim != hard_frame_pointer_rtx)
9561 fp_cfa_offset = -1;
9562 }
9563 else
9564 fp_cfa_offset = -1;
9565 }
9566
9567 /* If the stack is realigned and a DRAP register is used, we're going to
9568 rewrite MEMs based on it representing incoming locations of parameters
9569 passed on the stack into MEMs based on the argument pointer. Although
9570 we aren't going to rewrite other MEMs, we still need to initialize the
9571 virtual CFA pointer in order to ensure that the argument pointer will
9572 be seen as a constant throughout the function.
9573
9574 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9575 else if (stack_realign_drap)
9576 {
9577 rtx reg, elim;
9578
9579 #ifdef FRAME_POINTER_CFA_OFFSET
9580 reg = frame_pointer_rtx;
9581 #else
9582 reg = arg_pointer_rtx;
9583 #endif
9584 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9585 if (elim != reg)
9586 {
9587 if (GET_CODE (elim) == PLUS)
9588 elim = XEXP (elim, 0);
9589 if (elim == hard_frame_pointer_rtx)
9590 vt_init_cfa_base ();
9591 }
9592 }
9593
9594 hard_frame_pointer_adjustment = -1;
9595
9596 vt_add_function_parameters ();
9597
9598 FOR_EACH_BB (bb)
9599 {
9600 rtx insn;
9601 HOST_WIDE_INT pre, post = 0;
9602 basic_block first_bb, last_bb;
9603
9604 if (MAY_HAVE_DEBUG_INSNS)
9605 {
9606 cselib_record_sets_hook = add_with_sets;
9607 if (dump_file && (dump_flags & TDF_DETAILS))
9608 fprintf (dump_file, "first value: %i\n",
9609 cselib_get_next_uid ());
9610 }
9611
9612 first_bb = bb;
9613 for (;;)
9614 {
9615 edge e;
9616 if (bb->next_bb == EXIT_BLOCK_PTR
9617 || ! single_pred_p (bb->next_bb))
9618 break;
9619 e = find_edge (bb, bb->next_bb);
9620 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9621 break;
9622 bb = bb->next_bb;
9623 }
9624 last_bb = bb;
9625
9626 /* Add the micro-operations to the vector. */
9627 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9628 {
9629 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9630 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9631 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9632 insn = NEXT_INSN (insn))
9633 {
9634 if (INSN_P (insn))
9635 {
9636 if (!frame_pointer_needed)
9637 {
9638 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9639 if (pre)
9640 {
9641 micro_operation mo;
9642 mo.type = MO_ADJUST;
9643 mo.u.adjust = pre;
9644 mo.insn = insn;
9645 if (dump_file && (dump_flags & TDF_DETAILS))
9646 log_op_type (PATTERN (insn), bb, insn,
9647 MO_ADJUST, dump_file);
9648 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9649 &mo);
9650 VTI (bb)->out.stack_adjust += pre;
9651 }
9652 }
9653
9654 cselib_hook_called = false;
9655 adjust_insn (bb, insn);
9656 if (MAY_HAVE_DEBUG_INSNS)
9657 {
9658 if (CALL_P (insn))
9659 prepare_call_arguments (bb, insn);
9660 cselib_process_insn (insn);
9661 if (dump_file && (dump_flags & TDF_DETAILS))
9662 {
9663 print_rtl_single (dump_file, insn);
9664 dump_cselib_table (dump_file);
9665 }
9666 }
9667 if (!cselib_hook_called)
9668 add_with_sets (insn, 0, 0);
9669 cancel_changes (0);
9670
9671 if (!frame_pointer_needed && post)
9672 {
9673 micro_operation mo;
9674 mo.type = MO_ADJUST;
9675 mo.u.adjust = post;
9676 mo.insn = insn;
9677 if (dump_file && (dump_flags & TDF_DETAILS))
9678 log_op_type (PATTERN (insn), bb, insn,
9679 MO_ADJUST, dump_file);
9680 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9681 &mo);
9682 VTI (bb)->out.stack_adjust += post;
9683 }
9684
9685 if (bb == prologue_bb
9686 && fp_cfa_offset != -1
9687 && hard_frame_pointer_adjustment == -1
9688 && RTX_FRAME_RELATED_P (insn)
9689 && fp_setter (insn))
9690 {
9691 vt_init_cfa_base ();
9692 hard_frame_pointer_adjustment = fp_cfa_offset;
9693 }
9694 }
9695 }
9696 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9697 }
9698
9699 bb = last_bb;
9700
9701 if (MAY_HAVE_DEBUG_INSNS)
9702 {
9703 cselib_preserve_only_values ();
9704 cselib_reset_table (cselib_get_next_uid ());
9705 cselib_record_sets_hook = NULL;
9706 }
9707 }
9708
9709 hard_frame_pointer_adjustment = -1;
9710 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9711 cfa_base_rtx = NULL_RTX;
9712 return true;
9713 }
9714
9715 /* This is *not* reset after each function. It gives each
9716 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
9717 a unique label number. */
9718
9719 static int debug_label_num = 1;
9720
9721 /* Get rid of all debug insns from the insn stream. */
9722
9723 static void
9724 delete_debug_insns (void)
9725 {
9726 basic_block bb;
9727 rtx insn, next;
9728
9729 if (!MAY_HAVE_DEBUG_INSNS)
9730 return;
9731
9732 FOR_EACH_BB (bb)
9733 {
9734 FOR_BB_INSNS_SAFE (bb, insn, next)
9735 if (DEBUG_INSN_P (insn))
9736 {
9737 tree decl = INSN_VAR_LOCATION_DECL (insn);
9738 if (TREE_CODE (decl) == LABEL_DECL
9739 && DECL_NAME (decl)
9740 && !DECL_RTL_SET_P (decl))
9741 {
9742 PUT_CODE (insn, NOTE);
9743 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
9744 NOTE_DELETED_LABEL_NAME (insn)
9745 = IDENTIFIER_POINTER (DECL_NAME (decl));
9746 SET_DECL_RTL (decl, insn);
9747 CODE_LABEL_NUMBER (insn) = debug_label_num++;
9748 }
9749 else
9750 delete_insn (insn);
9751 }
9752 }
9753 }
9754
9755 /* Run a fast, BB-local only version of var tracking, to take care of
9756 information that we don't do global analysis on, such that not all
9757 information is lost. If SKIPPED holds, we're skipping the global
9758 pass entirely, so we should try to use information it would have
9759 handled as well.. */
9760
9761 static void
9762 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9763 {
9764 /* ??? Just skip it all for now. */
9765 delete_debug_insns ();
9766 }
9767
9768 /* Free the data structures needed for variable tracking. */
9769
9770 static void
9771 vt_finalize (void)
9772 {
9773 basic_block bb;
9774
9775 FOR_EACH_BB (bb)
9776 {
9777 VEC_free (micro_operation, heap, VTI (bb)->mos);
9778 }
9779
9780 FOR_ALL_BB (bb)
9781 {
9782 dataflow_set_destroy (&VTI (bb)->in);
9783 dataflow_set_destroy (&VTI (bb)->out);
9784 if (VTI (bb)->permp)
9785 {
9786 dataflow_set_destroy (VTI (bb)->permp);
9787 XDELETE (VTI (bb)->permp);
9788 }
9789 }
9790 free_aux_for_blocks ();
9791 htab_delete (empty_shared_hash->htab);
9792 htab_delete (changed_variables);
9793 free_alloc_pool (attrs_pool);
9794 free_alloc_pool (var_pool);
9795 free_alloc_pool (loc_chain_pool);
9796 free_alloc_pool (shared_hash_pool);
9797
9798 if (MAY_HAVE_DEBUG_INSNS)
9799 {
9800 free_alloc_pool (valvar_pool);
9801 VEC_free (rtx, heap, preserved_values);
9802 cselib_finish ();
9803 BITMAP_FREE (scratch_regs);
9804 scratch_regs = NULL;
9805 }
9806
9807 #ifdef HAVE_window_save
9808 VEC_free (parm_reg_t, gc, windowed_parm_regs);
9809 #endif
9810
9811 if (vui_vec)
9812 XDELETEVEC (vui_vec);
9813 vui_vec = NULL;
9814 vui_allocated = 0;
9815 }
9816
9817 /* The entry point to variable tracking pass. */
9818
9819 static inline unsigned int
9820 variable_tracking_main_1 (void)
9821 {
9822 bool success;
9823
9824 if (flag_var_tracking_assignments < 0)
9825 {
9826 delete_debug_insns ();
9827 return 0;
9828 }
9829
9830 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9831 {
9832 vt_debug_insns_local (true);
9833 return 0;
9834 }
9835
9836 mark_dfs_back_edges ();
9837 if (!vt_initialize ())
9838 {
9839 vt_finalize ();
9840 vt_debug_insns_local (true);
9841 return 0;
9842 }
9843
9844 success = vt_find_locations ();
9845
9846 if (!success && flag_var_tracking_assignments > 0)
9847 {
9848 vt_finalize ();
9849
9850 delete_debug_insns ();
9851
9852 /* This is later restored by our caller. */
9853 flag_var_tracking_assignments = 0;
9854
9855 success = vt_initialize ();
9856 gcc_assert (success);
9857
9858 success = vt_find_locations ();
9859 }
9860
9861 if (!success)
9862 {
9863 vt_finalize ();
9864 vt_debug_insns_local (false);
9865 return 0;
9866 }
9867
9868 if (dump_file && (dump_flags & TDF_DETAILS))
9869 {
9870 dump_dataflow_sets ();
9871 dump_flow_info (dump_file, dump_flags);
9872 }
9873
9874 timevar_push (TV_VAR_TRACKING_EMIT);
9875 vt_emit_notes ();
9876 timevar_pop (TV_VAR_TRACKING_EMIT);
9877
9878 vt_finalize ();
9879 vt_debug_insns_local (false);
9880 return 0;
9881 }
9882
9883 unsigned int
9884 variable_tracking_main (void)
9885 {
9886 unsigned int ret;
9887 int save = flag_var_tracking_assignments;
9888
9889 ret = variable_tracking_main_1 ();
9890
9891 flag_var_tracking_assignments = save;
9892
9893 return ret;
9894 }
9895 \f
9896 static bool
9897 gate_handle_var_tracking (void)
9898 {
9899 return (flag_var_tracking && !targetm.delay_vartrack);
9900 }
9901
9902
9903
9904 struct rtl_opt_pass pass_variable_tracking =
9905 {
9906 {
9907 RTL_PASS,
9908 "vartrack", /* name */
9909 gate_handle_var_tracking, /* gate */
9910 variable_tracking_main, /* execute */
9911 NULL, /* sub */
9912 NULL, /* next */
9913 0, /* static_pass_number */
9914 TV_VAR_TRACKING, /* tv_id */
9915 0, /* properties_required */
9916 0, /* properties_provided */
9917 0, /* properties_destroyed */
9918 0, /* todo_flags_start */
9919 TODO_verify_rtl_sharing /* todo_flags_finish */
9920 }
9921 };
This page took 0.496432 seconds and 5 git commands to generate.