]> gcc.gnu.org Git - gcc.git/blob - gcc/tree.c
tree: Add vector_element_bits(_tree) [PR94980 1/3]
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71
72 /* Tree code classes. */
73
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
79 };
80
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
83
84 /* Table indexed by tree code giving number of expression
85 operands beyond the fixed part of the node structure.
86 Not used for types or decls. */
87
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
90
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
93 };
94
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
97
98 /* Names of tree components.
99 Used for printing out the tree and error messages. */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
102
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
105 };
106
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
109
110 /* Each tree code class has an associated string representation.
111 These must correspond to the tree_code_class entries. */
112
113 const char *const tree_code_class_strings[] =
114 {
115 "exceptional",
116 "constant",
117 "type",
118 "declaration",
119 "reference",
120 "comparison",
121 "unary",
122 "binary",
123 "statement",
124 "vl_exp",
125 "expression"
126 };
127
128 /* obstack.[ch] explicitly declined to prototype this. */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130
131 /* Statistics-gathering stuff. */
132
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
136
137 /* Keep in sync with tree.h:enum tree_node_kind. */
138 static const char * const tree_node_kind_names[] = {
139 "decls",
140 "types",
141 "blocks",
142 "stmts",
143 "refs",
144 "exprs",
145 "constants",
146 "identifiers",
147 "vecs",
148 "binfos",
149 "ssa names",
150 "constructors",
151 "random kinds",
152 "lang_decl kinds",
153 "lang_type kinds",
154 "omp clauses",
155 };
156
157 /* Unique id for next decl created. */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created. */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created. Use negative numbers,
162 to catch erroneous uses. */
163 static GTY(()) int next_debug_decl_uid;
164
165 /* Since we cannot rehash a type after it is in the table, we have to
166 keep the hash code. */
167
168 struct GTY((for_user)) type_hash {
169 unsigned long hash;
170 tree type;
171 };
172
173 /* Initial size of the hash table (rounded to next prime). */
174 #define TYPE_HASH_INITIAL_SIZE 1000
175
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 {
178 static hashval_t hash (type_hash *t) { return t->hash; }
179 static bool equal (type_hash *a, type_hash *b);
180
181 static int
182 keep_cache_entry (type_hash *&t)
183 {
184 return ggc_marked_p (t->type);
185 }
186 };
187
188 /* Now here is the hash table. When recording a type, it is added to
189 the slot whose index is the hash code. Note that the hash table is
190 used for several kinds of types (function types, array types and
191 array index range types, for now). While all these live in the
192 same table, they are completely independent, and the hash code is
193 computed differently for each of these. */
194
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196
197 /* Hash table and temporary node for larger integer const values. */
198 static GTY (()) tree int_cst_node;
199
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 {
202 static hashval_t hash (tree t);
203 static bool equal (tree x, tree y);
204 };
205
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209 for a given value. */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212 typedef std::pair<tree, const poly_wide_int *> compare_type;
213 static hashval_t hash (tree t);
214 static bool equal (tree x, const compare_type &y);
215 };
216
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218
219 /* Hash table for optimization flags and target option flags. Use the same
220 hash table for both sets of options. Nodes for building the current
221 optimization and target option nodes. The assumption is most of the time
222 the options created will already be in the hash table, so we avoid
223 allocating and freeing up a node repeatably. */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
226
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 {
229 static hashval_t hash (tree t);
230 static bool equal (tree x, tree y);
231 };
232
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234
235 /* General tree->tree mapping structure for use in hash tables. */
236
237
238 static GTY ((cache))
239 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240
241 static GTY ((cache))
242 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 {
246 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247
248 static bool
249 equal (tree_vec_map *a, tree_vec_map *b)
250 {
251 return a->base.from == b->base.from;
252 }
253
254 static int
255 keep_cache_entry (tree_vec_map *&m)
256 {
257 return ggc_marked_p (m->base.from);
258 }
259 };
260
261 static GTY ((cache))
262 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
268
269 static tree build_array_type_1 (tree, tree, bool, bool, bool);
270
271 tree global_trees[TI_MAX];
272 tree integer_types[itk_none];
273
274 bool int_n_enabled_p[NUM_INT_N_ENTS];
275 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
276
277 bool tree_contains_struct[MAX_TREE_CODES][64];
278
279 /* Number of operands for each OpenMP clause. */
280 unsigned const char omp_clause_num_ops[] =
281 {
282 0, /* OMP_CLAUSE_ERROR */
283 1, /* OMP_CLAUSE_PRIVATE */
284 1, /* OMP_CLAUSE_SHARED */
285 1, /* OMP_CLAUSE_FIRSTPRIVATE */
286 2, /* OMP_CLAUSE_LASTPRIVATE */
287 5, /* OMP_CLAUSE_REDUCTION */
288 5, /* OMP_CLAUSE_TASK_REDUCTION */
289 5, /* OMP_CLAUSE_IN_REDUCTION */
290 1, /* OMP_CLAUSE_COPYIN */
291 1, /* OMP_CLAUSE_COPYPRIVATE */
292 3, /* OMP_CLAUSE_LINEAR */
293 2, /* OMP_CLAUSE_ALIGNED */
294 1, /* OMP_CLAUSE_DEPEND */
295 1, /* OMP_CLAUSE_NONTEMPORAL */
296 1, /* OMP_CLAUSE_UNIFORM */
297 1, /* OMP_CLAUSE_TO_DECLARE */
298 1, /* OMP_CLAUSE_LINK */
299 2, /* OMP_CLAUSE_FROM */
300 2, /* OMP_CLAUSE_TO */
301 2, /* OMP_CLAUSE_MAP */
302 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
303 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
304 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
305 1, /* OMP_CLAUSE_INCLUSIVE */
306 1, /* OMP_CLAUSE_EXCLUSIVE */
307 2, /* OMP_CLAUSE__CACHE_ */
308 2, /* OMP_CLAUSE_GANG */
309 1, /* OMP_CLAUSE_ASYNC */
310 1, /* OMP_CLAUSE_WAIT */
311 0, /* OMP_CLAUSE_AUTO */
312 0, /* OMP_CLAUSE_SEQ */
313 1, /* OMP_CLAUSE__LOOPTEMP_ */
314 1, /* OMP_CLAUSE__REDUCTEMP_ */
315 1, /* OMP_CLAUSE__CONDTEMP_ */
316 1, /* OMP_CLAUSE__SCANTEMP_ */
317 1, /* OMP_CLAUSE_IF */
318 1, /* OMP_CLAUSE_NUM_THREADS */
319 1, /* OMP_CLAUSE_SCHEDULE */
320 0, /* OMP_CLAUSE_NOWAIT */
321 1, /* OMP_CLAUSE_ORDERED */
322 0, /* OMP_CLAUSE_DEFAULT */
323 3, /* OMP_CLAUSE_COLLAPSE */
324 0, /* OMP_CLAUSE_UNTIED */
325 1, /* OMP_CLAUSE_FINAL */
326 0, /* OMP_CLAUSE_MERGEABLE */
327 1, /* OMP_CLAUSE_DEVICE */
328 1, /* OMP_CLAUSE_DIST_SCHEDULE */
329 0, /* OMP_CLAUSE_INBRANCH */
330 0, /* OMP_CLAUSE_NOTINBRANCH */
331 1, /* OMP_CLAUSE_NUM_TEAMS */
332 1, /* OMP_CLAUSE_THREAD_LIMIT */
333 0, /* OMP_CLAUSE_PROC_BIND */
334 1, /* OMP_CLAUSE_SAFELEN */
335 1, /* OMP_CLAUSE_SIMDLEN */
336 0, /* OMP_CLAUSE_DEVICE_TYPE */
337 0, /* OMP_CLAUSE_FOR */
338 0, /* OMP_CLAUSE_PARALLEL */
339 0, /* OMP_CLAUSE_SECTIONS */
340 0, /* OMP_CLAUSE_TASKGROUP */
341 1, /* OMP_CLAUSE_PRIORITY */
342 1, /* OMP_CLAUSE_GRAINSIZE */
343 1, /* OMP_CLAUSE_NUM_TASKS */
344 0, /* OMP_CLAUSE_NOGROUP */
345 0, /* OMP_CLAUSE_THREADS */
346 0, /* OMP_CLAUSE_SIMD */
347 1, /* OMP_CLAUSE_HINT */
348 0, /* OMP_CLAUSE_DEFAULTMAP */
349 0, /* OMP_CLAUSE_ORDER */
350 0, /* OMP_CLAUSE_BIND */
351 1, /* OMP_CLAUSE__SIMDUID_ */
352 0, /* OMP_CLAUSE__SIMT_ */
353 0, /* OMP_CLAUSE_INDEPENDENT */
354 1, /* OMP_CLAUSE_WORKER */
355 1, /* OMP_CLAUSE_VECTOR */
356 1, /* OMP_CLAUSE_NUM_GANGS */
357 1, /* OMP_CLAUSE_NUM_WORKERS */
358 1, /* OMP_CLAUSE_VECTOR_LENGTH */
359 3, /* OMP_CLAUSE_TILE */
360 2, /* OMP_CLAUSE__GRIDDIM_ */
361 0, /* OMP_CLAUSE_IF_PRESENT */
362 0, /* OMP_CLAUSE_FINALIZE */
363 };
364
365 const char * const omp_clause_code_name[] =
366 {
367 "error_clause",
368 "private",
369 "shared",
370 "firstprivate",
371 "lastprivate",
372 "reduction",
373 "task_reduction",
374 "in_reduction",
375 "copyin",
376 "copyprivate",
377 "linear",
378 "aligned",
379 "depend",
380 "nontemporal",
381 "uniform",
382 "to",
383 "link",
384 "from",
385 "to",
386 "map",
387 "use_device_ptr",
388 "use_device_addr",
389 "is_device_ptr",
390 "inclusive",
391 "exclusive",
392 "_cache_",
393 "gang",
394 "async",
395 "wait",
396 "auto",
397 "seq",
398 "_looptemp_",
399 "_reductemp_",
400 "_condtemp_",
401 "_scantemp_",
402 "if",
403 "num_threads",
404 "schedule",
405 "nowait",
406 "ordered",
407 "default",
408 "collapse",
409 "untied",
410 "final",
411 "mergeable",
412 "device",
413 "dist_schedule",
414 "inbranch",
415 "notinbranch",
416 "num_teams",
417 "thread_limit",
418 "proc_bind",
419 "safelen",
420 "simdlen",
421 "device_type",
422 "for",
423 "parallel",
424 "sections",
425 "taskgroup",
426 "priority",
427 "grainsize",
428 "num_tasks",
429 "nogroup",
430 "threads",
431 "simd",
432 "hint",
433 "defaultmap",
434 "order",
435 "bind",
436 "_simduid_",
437 "_simt_",
438 "independent",
439 "worker",
440 "vector",
441 "num_gangs",
442 "num_workers",
443 "vector_length",
444 "tile",
445 "_griddim_",
446 "if_present",
447 "finalize",
448 };
449
450
451 /* Return the tree node structure used by tree code CODE. */
452
453 static inline enum tree_node_structure_enum
454 tree_node_structure_for_code (enum tree_code code)
455 {
456 switch (TREE_CODE_CLASS (code))
457 {
458 case tcc_declaration:
459 switch (code)
460 {
461 case CONST_DECL: return TS_CONST_DECL;
462 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
463 case FIELD_DECL: return TS_FIELD_DECL;
464 case FUNCTION_DECL: return TS_FUNCTION_DECL;
465 case LABEL_DECL: return TS_LABEL_DECL;
466 case PARM_DECL: return TS_PARM_DECL;
467 case RESULT_DECL: return TS_RESULT_DECL;
468 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
469 case TYPE_DECL: return TS_TYPE_DECL;
470 case VAR_DECL: return TS_VAR_DECL;
471 default: return TS_DECL_NON_COMMON;
472 }
473
474 case tcc_type: return TS_TYPE_NON_COMMON;
475
476 case tcc_binary:
477 case tcc_comparison:
478 case tcc_expression:
479 case tcc_reference:
480 case tcc_statement:
481 case tcc_unary:
482 case tcc_vl_exp: return TS_EXP;
483
484 default: /* tcc_constant and tcc_exceptional */
485 break;
486 }
487
488 switch (code)
489 {
490 /* tcc_constant cases. */
491 case COMPLEX_CST: return TS_COMPLEX;
492 case FIXED_CST: return TS_FIXED_CST;
493 case INTEGER_CST: return TS_INT_CST;
494 case POLY_INT_CST: return TS_POLY_INT_CST;
495 case REAL_CST: return TS_REAL_CST;
496 case STRING_CST: return TS_STRING;
497 case VECTOR_CST: return TS_VECTOR;
498 case VOID_CST: return TS_TYPED;
499
500 /* tcc_exceptional cases. */
501 case BLOCK: return TS_BLOCK;
502 case CONSTRUCTOR: return TS_CONSTRUCTOR;
503 case ERROR_MARK: return TS_COMMON;
504 case IDENTIFIER_NODE: return TS_IDENTIFIER;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case PLACEHOLDER_EXPR: return TS_COMMON;
508 case SSA_NAME: return TS_SSA_NAME;
509 case STATEMENT_LIST: return TS_STATEMENT_LIST;
510 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
511 case TREE_BINFO: return TS_BINFO;
512 case TREE_LIST: return TS_LIST;
513 case TREE_VEC: return TS_VEC;
514
515 default:
516 gcc_unreachable ();
517 }
518 }
519
520
521 /* Initialize tree_contains_struct to describe the hierarchy of tree
522 nodes. */
523
524 static void
525 initialize_tree_contains_struct (void)
526 {
527 unsigned i;
528
529 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
530 {
531 enum tree_code code;
532 enum tree_node_structure_enum ts_code;
533
534 code = (enum tree_code) i;
535 ts_code = tree_node_structure_for_code (code);
536
537 /* Mark the TS structure itself. */
538 tree_contains_struct[code][ts_code] = 1;
539
540 /* Mark all the structures that TS is derived from. */
541 switch (ts_code)
542 {
543 case TS_TYPED:
544 case TS_BLOCK:
545 case TS_OPTIMIZATION:
546 case TS_TARGET_OPTION:
547 MARK_TS_BASE (code);
548 break;
549
550 case TS_COMMON:
551 case TS_INT_CST:
552 case TS_POLY_INT_CST:
553 case TS_REAL_CST:
554 case TS_FIXED_CST:
555 case TS_VECTOR:
556 case TS_STRING:
557 case TS_COMPLEX:
558 case TS_SSA_NAME:
559 case TS_CONSTRUCTOR:
560 case TS_EXP:
561 case TS_STATEMENT_LIST:
562 MARK_TS_TYPED (code);
563 break;
564
565 case TS_IDENTIFIER:
566 case TS_DECL_MINIMAL:
567 case TS_TYPE_COMMON:
568 case TS_LIST:
569 case TS_VEC:
570 case TS_BINFO:
571 case TS_OMP_CLAUSE:
572 MARK_TS_COMMON (code);
573 break;
574
575 case TS_TYPE_WITH_LANG_SPECIFIC:
576 MARK_TS_TYPE_COMMON (code);
577 break;
578
579 case TS_TYPE_NON_COMMON:
580 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
581 break;
582
583 case TS_DECL_COMMON:
584 MARK_TS_DECL_MINIMAL (code);
585 break;
586
587 case TS_DECL_WRTL:
588 case TS_CONST_DECL:
589 MARK_TS_DECL_COMMON (code);
590 break;
591
592 case TS_DECL_NON_COMMON:
593 MARK_TS_DECL_WITH_VIS (code);
594 break;
595
596 case TS_DECL_WITH_VIS:
597 case TS_PARM_DECL:
598 case TS_LABEL_DECL:
599 case TS_RESULT_DECL:
600 MARK_TS_DECL_WRTL (code);
601 break;
602
603 case TS_FIELD_DECL:
604 MARK_TS_DECL_COMMON (code);
605 break;
606
607 case TS_VAR_DECL:
608 MARK_TS_DECL_WITH_VIS (code);
609 break;
610
611 case TS_TYPE_DECL:
612 case TS_FUNCTION_DECL:
613 MARK_TS_DECL_NON_COMMON (code);
614 break;
615
616 case TS_TRANSLATION_UNIT_DECL:
617 MARK_TS_DECL_COMMON (code);
618 break;
619
620 default:
621 gcc_unreachable ();
622 }
623 }
624
625 /* Basic consistency checks for attributes used in fold. */
626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
627 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
628 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
632 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
633 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
638 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
639 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
640 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
641 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
642 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
645 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
646 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
647 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
654 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
655 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
656 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
657 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
658 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
659 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
662 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
663 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
664 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
665 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
666 }
667
668
669 /* Init tree.c. */
670
671 void
672 init_ttree (void)
673 {
674 /* Initialize the hash table of types. */
675 type_hash_table
676 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
677
678 debug_expr_for_decl
679 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
680
681 value_expr_for_decl
682 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
683
684 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
685
686 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
687
688 int_cst_node = make_int_cst (1, 1);
689
690 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
691
692 cl_optimization_node = make_node (OPTIMIZATION_NODE);
693 cl_target_option_node = make_node (TARGET_OPTION_NODE);
694
695 /* Initialize the tree_contains_struct array. */
696 initialize_tree_contains_struct ();
697 lang_hooks.init_ts ();
698 }
699
700 \f
701 /* The name of the object as the assembler will see it (but before any
702 translations made by ASM_OUTPUT_LABELREF). Often this is the same
703 as DECL_NAME. It is an IDENTIFIER_NODE. */
704 tree
705 decl_assembler_name (tree decl)
706 {
707 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
708 lang_hooks.set_decl_assembler_name (decl);
709 return DECL_ASSEMBLER_NAME_RAW (decl);
710 }
711
712 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
713 (either of which may be NULL). Inform the FE, if this changes the
714 name. */
715
716 void
717 overwrite_decl_assembler_name (tree decl, tree name)
718 {
719 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
720 lang_hooks.overwrite_decl_assembler_name (decl, name);
721 }
722
723 /* When the target supports COMDAT groups, this indicates which group the
724 DECL is associated with. This can be either an IDENTIFIER_NODE or a
725 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
726 tree
727 decl_comdat_group (const_tree node)
728 {
729 struct symtab_node *snode = symtab_node::get (node);
730 if (!snode)
731 return NULL;
732 return snode->get_comdat_group ();
733 }
734
735 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
736 tree
737 decl_comdat_group_id (const_tree node)
738 {
739 struct symtab_node *snode = symtab_node::get (node);
740 if (!snode)
741 return NULL;
742 return snode->get_comdat_group_id ();
743 }
744
745 /* When the target supports named section, return its name as IDENTIFIER_NODE
746 or NULL if it is in no section. */
747 const char *
748 decl_section_name (const_tree node)
749 {
750 struct symtab_node *snode = symtab_node::get (node);
751 if (!snode)
752 return NULL;
753 return snode->get_section ();
754 }
755
756 /* Set section name of NODE to VALUE (that is expected to be
757 identifier node) */
758 void
759 set_decl_section_name (tree node, const char *value)
760 {
761 struct symtab_node *snode;
762
763 if (value == NULL)
764 {
765 snode = symtab_node::get (node);
766 if (!snode)
767 return;
768 }
769 else if (VAR_P (node))
770 snode = varpool_node::get_create (node);
771 else
772 snode = cgraph_node::get_create (node);
773 snode->set_section (value);
774 }
775
776 /* Return TLS model of a variable NODE. */
777 enum tls_model
778 decl_tls_model (const_tree node)
779 {
780 struct varpool_node *snode = varpool_node::get (node);
781 if (!snode)
782 return TLS_MODEL_NONE;
783 return snode->tls_model;
784 }
785
786 /* Set TLS model of variable NODE to MODEL. */
787 void
788 set_decl_tls_model (tree node, enum tls_model model)
789 {
790 struct varpool_node *vnode;
791
792 if (model == TLS_MODEL_NONE)
793 {
794 vnode = varpool_node::get (node);
795 if (!vnode)
796 return;
797 }
798 else
799 vnode = varpool_node::get_create (node);
800 vnode->tls_model = model;
801 }
802
803 /* Compute the number of bytes occupied by a tree with code CODE.
804 This function cannot be used for nodes that have variable sizes,
805 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
806 size_t
807 tree_code_size (enum tree_code code)
808 {
809 switch (TREE_CODE_CLASS (code))
810 {
811 case tcc_declaration: /* A decl node */
812 switch (code)
813 {
814 case FIELD_DECL: return sizeof (tree_field_decl);
815 case PARM_DECL: return sizeof (tree_parm_decl);
816 case VAR_DECL: return sizeof (tree_var_decl);
817 case LABEL_DECL: return sizeof (tree_label_decl);
818 case RESULT_DECL: return sizeof (tree_result_decl);
819 case CONST_DECL: return sizeof (tree_const_decl);
820 case TYPE_DECL: return sizeof (tree_type_decl);
821 case FUNCTION_DECL: return sizeof (tree_function_decl);
822 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
823 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
824 case NAMESPACE_DECL:
825 case IMPORTED_DECL:
826 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
827 default:
828 gcc_checking_assert (code >= NUM_TREE_CODES);
829 return lang_hooks.tree_size (code);
830 }
831
832 case tcc_type: /* a type node */
833 switch (code)
834 {
835 case OFFSET_TYPE:
836 case ENUMERAL_TYPE:
837 case BOOLEAN_TYPE:
838 case INTEGER_TYPE:
839 case REAL_TYPE:
840 case POINTER_TYPE:
841 case REFERENCE_TYPE:
842 case NULLPTR_TYPE:
843 case FIXED_POINT_TYPE:
844 case COMPLEX_TYPE:
845 case VECTOR_TYPE:
846 case ARRAY_TYPE:
847 case RECORD_TYPE:
848 case UNION_TYPE:
849 case QUAL_UNION_TYPE:
850 case VOID_TYPE:
851 case FUNCTION_TYPE:
852 case METHOD_TYPE:
853 case LANG_TYPE: return sizeof (tree_type_non_common);
854 default:
855 gcc_checking_assert (code >= NUM_TREE_CODES);
856 return lang_hooks.tree_size (code);
857 }
858
859 case tcc_reference: /* a reference */
860 case tcc_expression: /* an expression */
861 case tcc_statement: /* an expression with side effects */
862 case tcc_comparison: /* a comparison expression */
863 case tcc_unary: /* a unary arithmetic expression */
864 case tcc_binary: /* a binary arithmetic expression */
865 return (sizeof (struct tree_exp)
866 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
867
868 case tcc_constant: /* a constant */
869 switch (code)
870 {
871 case VOID_CST: return sizeof (tree_typed);
872 case INTEGER_CST: gcc_unreachable ();
873 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
874 case REAL_CST: return sizeof (tree_real_cst);
875 case FIXED_CST: return sizeof (tree_fixed_cst);
876 case COMPLEX_CST: return sizeof (tree_complex);
877 case VECTOR_CST: gcc_unreachable ();
878 case STRING_CST: gcc_unreachable ();
879 default:
880 gcc_checking_assert (code >= NUM_TREE_CODES);
881 return lang_hooks.tree_size (code);
882 }
883
884 case tcc_exceptional: /* something random, like an identifier. */
885 switch (code)
886 {
887 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
888 case TREE_LIST: return sizeof (tree_list);
889
890 case ERROR_MARK:
891 case PLACEHOLDER_EXPR: return sizeof (tree_common);
892
893 case TREE_VEC: gcc_unreachable ();
894 case OMP_CLAUSE: gcc_unreachable ();
895
896 case SSA_NAME: return sizeof (tree_ssa_name);
897
898 case STATEMENT_LIST: return sizeof (tree_statement_list);
899 case BLOCK: return sizeof (struct tree_block);
900 case CONSTRUCTOR: return sizeof (tree_constructor);
901 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
902 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
903
904 default:
905 gcc_checking_assert (code >= NUM_TREE_CODES);
906 return lang_hooks.tree_size (code);
907 }
908
909 default:
910 gcc_unreachable ();
911 }
912 }
913
914 /* Compute the number of bytes occupied by NODE. This routine only
915 looks at TREE_CODE, except for those nodes that have variable sizes. */
916 size_t
917 tree_size (const_tree node)
918 {
919 const enum tree_code code = TREE_CODE (node);
920 switch (code)
921 {
922 case INTEGER_CST:
923 return (sizeof (struct tree_int_cst)
924 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
925
926 case TREE_BINFO:
927 return (offsetof (struct tree_binfo, base_binfos)
928 + vec<tree, va_gc>
929 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
930
931 case TREE_VEC:
932 return (sizeof (struct tree_vec)
933 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
934
935 case VECTOR_CST:
936 return (sizeof (struct tree_vector)
937 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
938
939 case STRING_CST:
940 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
941
942 case OMP_CLAUSE:
943 return (sizeof (struct tree_omp_clause)
944 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
945 * sizeof (tree));
946
947 default:
948 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
949 return (sizeof (struct tree_exp)
950 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
951 else
952 return tree_code_size (code);
953 }
954 }
955
956 /* Return tree node kind based on tree CODE. */
957
958 static tree_node_kind
959 get_stats_node_kind (enum tree_code code)
960 {
961 enum tree_code_class type = TREE_CODE_CLASS (code);
962
963 switch (type)
964 {
965 case tcc_declaration: /* A decl node */
966 return d_kind;
967 case tcc_type: /* a type node */
968 return t_kind;
969 case tcc_statement: /* an expression with side effects */
970 return s_kind;
971 case tcc_reference: /* a reference */
972 return r_kind;
973 case tcc_expression: /* an expression */
974 case tcc_comparison: /* a comparison expression */
975 case tcc_unary: /* a unary arithmetic expression */
976 case tcc_binary: /* a binary arithmetic expression */
977 return e_kind;
978 case tcc_constant: /* a constant */
979 return c_kind;
980 case tcc_exceptional: /* something random, like an identifier. */
981 switch (code)
982 {
983 case IDENTIFIER_NODE:
984 return id_kind;
985 case TREE_VEC:
986 return vec_kind;
987 case TREE_BINFO:
988 return binfo_kind;
989 case SSA_NAME:
990 return ssa_name_kind;
991 case BLOCK:
992 return b_kind;
993 case CONSTRUCTOR:
994 return constr_kind;
995 case OMP_CLAUSE:
996 return omp_clause_kind;
997 default:
998 return x_kind;
999 }
1000 break;
1001 case tcc_vl_exp:
1002 return e_kind;
1003 default:
1004 gcc_unreachable ();
1005 }
1006 }
1007
1008 /* Record interesting allocation statistics for a tree node with CODE
1009 and LENGTH. */
1010
1011 static void
1012 record_node_allocation_statistics (enum tree_code code, size_t length)
1013 {
1014 if (!GATHER_STATISTICS)
1015 return;
1016
1017 tree_node_kind kind = get_stats_node_kind (code);
1018
1019 tree_code_counts[(int) code]++;
1020 tree_node_counts[(int) kind]++;
1021 tree_node_sizes[(int) kind] += length;
1022 }
1023
1024 /* Allocate and return a new UID from the DECL_UID namespace. */
1025
1026 int
1027 allocate_decl_uid (void)
1028 {
1029 return next_decl_uid++;
1030 }
1031
1032 /* Return a newly allocated node of code CODE. For decl and type
1033 nodes, some other fields are initialized. The rest of the node is
1034 initialized to zero. This function cannot be used for TREE_VEC,
1035 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1036 tree_code_size.
1037
1038 Achoo! I got a code in the node. */
1039
1040 tree
1041 make_node (enum tree_code code MEM_STAT_DECL)
1042 {
1043 tree t;
1044 enum tree_code_class type = TREE_CODE_CLASS (code);
1045 size_t length = tree_code_size (code);
1046
1047 record_node_allocation_statistics (code, length);
1048
1049 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1050 TREE_SET_CODE (t, code);
1051
1052 switch (type)
1053 {
1054 case tcc_statement:
1055 if (code != DEBUG_BEGIN_STMT)
1056 TREE_SIDE_EFFECTS (t) = 1;
1057 break;
1058
1059 case tcc_declaration:
1060 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1061 {
1062 if (code == FUNCTION_DECL)
1063 {
1064 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1065 SET_DECL_MODE (t, FUNCTION_MODE);
1066 }
1067 else
1068 SET_DECL_ALIGN (t, 1);
1069 }
1070 DECL_SOURCE_LOCATION (t) = input_location;
1071 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1072 DECL_UID (t) = --next_debug_decl_uid;
1073 else
1074 {
1075 DECL_UID (t) = allocate_decl_uid ();
1076 SET_DECL_PT_UID (t, -1);
1077 }
1078 if (TREE_CODE (t) == LABEL_DECL)
1079 LABEL_DECL_UID (t) = -1;
1080
1081 break;
1082
1083 case tcc_type:
1084 TYPE_UID (t) = next_type_uid++;
1085 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1086 TYPE_USER_ALIGN (t) = 0;
1087 TYPE_MAIN_VARIANT (t) = t;
1088 TYPE_CANONICAL (t) = t;
1089
1090 /* Default to no attributes for type, but let target change that. */
1091 TYPE_ATTRIBUTES (t) = NULL_TREE;
1092 targetm.set_default_type_attributes (t);
1093
1094 /* We have not yet computed the alias set for this type. */
1095 TYPE_ALIAS_SET (t) = -1;
1096 break;
1097
1098 case tcc_constant:
1099 TREE_CONSTANT (t) = 1;
1100 break;
1101
1102 case tcc_expression:
1103 switch (code)
1104 {
1105 case INIT_EXPR:
1106 case MODIFY_EXPR:
1107 case VA_ARG_EXPR:
1108 case PREDECREMENT_EXPR:
1109 case PREINCREMENT_EXPR:
1110 case POSTDECREMENT_EXPR:
1111 case POSTINCREMENT_EXPR:
1112 /* All of these have side-effects, no matter what their
1113 operands are. */
1114 TREE_SIDE_EFFECTS (t) = 1;
1115 break;
1116
1117 default:
1118 break;
1119 }
1120 break;
1121
1122 case tcc_exceptional:
1123 switch (code)
1124 {
1125 case TARGET_OPTION_NODE:
1126 TREE_TARGET_OPTION(t)
1127 = ggc_cleared_alloc<struct cl_target_option> ();
1128 break;
1129
1130 case OPTIMIZATION_NODE:
1131 TREE_OPTIMIZATION (t)
1132 = ggc_cleared_alloc<struct cl_optimization> ();
1133 break;
1134
1135 default:
1136 break;
1137 }
1138 break;
1139
1140 default:
1141 /* Other classes need no special treatment. */
1142 break;
1143 }
1144
1145 return t;
1146 }
1147
1148 /* Free tree node. */
1149
1150 void
1151 free_node (tree node)
1152 {
1153 enum tree_code code = TREE_CODE (node);
1154 if (GATHER_STATISTICS)
1155 {
1156 enum tree_node_kind kind = get_stats_node_kind (code);
1157
1158 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1159 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1160 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1161
1162 tree_code_counts[(int) TREE_CODE (node)]--;
1163 tree_node_counts[(int) kind]--;
1164 tree_node_sizes[(int) kind] -= tree_size (node);
1165 }
1166 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1167 vec_free (CONSTRUCTOR_ELTS (node));
1168 else if (code == BLOCK)
1169 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1170 else if (code == TREE_BINFO)
1171 vec_free (BINFO_BASE_ACCESSES (node));
1172 else if (code == OPTIMIZATION_NODE)
1173 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1174 else if (code == TARGET_OPTION_NODE)
1175 cl_target_option_free (TREE_TARGET_OPTION (node));
1176 ggc_free (node);
1177 }
1178 \f
1179 /* Return a new node with the same contents as NODE except that its
1180 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1181
1182 tree
1183 copy_node (tree node MEM_STAT_DECL)
1184 {
1185 tree t;
1186 enum tree_code code = TREE_CODE (node);
1187 size_t length;
1188
1189 gcc_assert (code != STATEMENT_LIST);
1190
1191 length = tree_size (node);
1192 record_node_allocation_statistics (code, length);
1193 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1194 memcpy (t, node, length);
1195
1196 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1197 TREE_CHAIN (t) = 0;
1198 TREE_ASM_WRITTEN (t) = 0;
1199 TREE_VISITED (t) = 0;
1200
1201 if (TREE_CODE_CLASS (code) == tcc_declaration)
1202 {
1203 if (code == DEBUG_EXPR_DECL)
1204 DECL_UID (t) = --next_debug_decl_uid;
1205 else
1206 {
1207 DECL_UID (t) = allocate_decl_uid ();
1208 if (DECL_PT_UID_SET_P (node))
1209 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1210 }
1211 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1212 && DECL_HAS_VALUE_EXPR_P (node))
1213 {
1214 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1215 DECL_HAS_VALUE_EXPR_P (t) = 1;
1216 }
1217 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1218 if (VAR_P (node))
1219 {
1220 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1221 t->decl_with_vis.symtab_node = NULL;
1222 }
1223 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1224 {
1225 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1226 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1227 }
1228 if (TREE_CODE (node) == FUNCTION_DECL)
1229 {
1230 DECL_STRUCT_FUNCTION (t) = NULL;
1231 t->decl_with_vis.symtab_node = NULL;
1232 }
1233 }
1234 else if (TREE_CODE_CLASS (code) == tcc_type)
1235 {
1236 TYPE_UID (t) = next_type_uid++;
1237 /* The following is so that the debug code for
1238 the copy is different from the original type.
1239 The two statements usually duplicate each other
1240 (because they clear fields of the same union),
1241 but the optimizer should catch that. */
1242 TYPE_SYMTAB_ADDRESS (t) = 0;
1243 TYPE_SYMTAB_DIE (t) = 0;
1244
1245 /* Do not copy the values cache. */
1246 if (TYPE_CACHED_VALUES_P (t))
1247 {
1248 TYPE_CACHED_VALUES_P (t) = 0;
1249 TYPE_CACHED_VALUES (t) = NULL_TREE;
1250 }
1251 }
1252 else if (code == TARGET_OPTION_NODE)
1253 {
1254 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1255 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1256 sizeof (struct cl_target_option));
1257 }
1258 else if (code == OPTIMIZATION_NODE)
1259 {
1260 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1261 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1262 sizeof (struct cl_optimization));
1263 }
1264
1265 return t;
1266 }
1267
1268 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1269 For example, this can copy a list made of TREE_LIST nodes. */
1270
1271 tree
1272 copy_list (tree list)
1273 {
1274 tree head;
1275 tree prev, next;
1276
1277 if (list == 0)
1278 return 0;
1279
1280 head = prev = copy_node (list);
1281 next = TREE_CHAIN (list);
1282 while (next)
1283 {
1284 TREE_CHAIN (prev) = copy_node (next);
1285 prev = TREE_CHAIN (prev);
1286 next = TREE_CHAIN (next);
1287 }
1288 return head;
1289 }
1290
1291 \f
1292 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1293 INTEGER_CST with value CST and type TYPE. */
1294
1295 static unsigned int
1296 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1297 {
1298 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1299 /* We need extra HWIs if CST is an unsigned integer with its
1300 upper bit set. */
1301 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1302 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1303 return cst.get_len ();
1304 }
1305
1306 /* Return a new INTEGER_CST with value CST and type TYPE. */
1307
1308 static tree
1309 build_new_int_cst (tree type, const wide_int &cst)
1310 {
1311 unsigned int len = cst.get_len ();
1312 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1313 tree nt = make_int_cst (len, ext_len);
1314
1315 if (len < ext_len)
1316 {
1317 --ext_len;
1318 TREE_INT_CST_ELT (nt, ext_len)
1319 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1320 for (unsigned int i = len; i < ext_len; ++i)
1321 TREE_INT_CST_ELT (nt, i) = -1;
1322 }
1323 else if (TYPE_UNSIGNED (type)
1324 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1325 {
1326 len--;
1327 TREE_INT_CST_ELT (nt, len)
1328 = zext_hwi (cst.elt (len),
1329 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1330 }
1331
1332 for (unsigned int i = 0; i < len; i++)
1333 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1334 TREE_TYPE (nt) = type;
1335 return nt;
1336 }
1337
1338 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1339
1340 static tree
1341 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1342 CXX_MEM_STAT_INFO)
1343 {
1344 size_t length = sizeof (struct tree_poly_int_cst);
1345 record_node_allocation_statistics (POLY_INT_CST, length);
1346
1347 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1348
1349 TREE_SET_CODE (t, POLY_INT_CST);
1350 TREE_CONSTANT (t) = 1;
1351 TREE_TYPE (t) = type;
1352 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1353 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1354 return t;
1355 }
1356
1357 /* Create a constant tree that contains CST sign-extended to TYPE. */
1358
1359 tree
1360 build_int_cst (tree type, poly_int64 cst)
1361 {
1362 /* Support legacy code. */
1363 if (!type)
1364 type = integer_type_node;
1365
1366 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1367 }
1368
1369 /* Create a constant tree that contains CST zero-extended to TYPE. */
1370
1371 tree
1372 build_int_cstu (tree type, poly_uint64 cst)
1373 {
1374 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1375 }
1376
1377 /* Create a constant tree that contains CST sign-extended to TYPE. */
1378
1379 tree
1380 build_int_cst_type (tree type, poly_int64 cst)
1381 {
1382 gcc_assert (type);
1383 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1384 }
1385
1386 /* Constructs tree in type TYPE from with value given by CST. Signedness
1387 of CST is assumed to be the same as the signedness of TYPE. */
1388
1389 tree
1390 double_int_to_tree (tree type, double_int cst)
1391 {
1392 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1393 }
1394
1395 /* We force the wide_int CST to the range of the type TYPE by sign or
1396 zero extending it. OVERFLOWABLE indicates if we are interested in
1397 overflow of the value, when >0 we are only interested in signed
1398 overflow, for <0 we are interested in any overflow. OVERFLOWED
1399 indicates whether overflow has already occurred. CONST_OVERFLOWED
1400 indicates whether constant overflow has already occurred. We force
1401 T's value to be within range of T's type (by setting to 0 or 1 all
1402 the bits outside the type's range). We set TREE_OVERFLOWED if,
1403 OVERFLOWED is nonzero,
1404 or OVERFLOWABLE is >0 and signed overflow occurs
1405 or OVERFLOWABLE is <0 and any overflow occurs
1406 We return a new tree node for the extended wide_int. The node
1407 is shared if no overflow flags are set. */
1408
1409
1410 tree
1411 force_fit_type (tree type, const poly_wide_int_ref &cst,
1412 int overflowable, bool overflowed)
1413 {
1414 signop sign = TYPE_SIGN (type);
1415
1416 /* If we need to set overflow flags, return a new unshared node. */
1417 if (overflowed || !wi::fits_to_tree_p (cst, type))
1418 {
1419 if (overflowed
1420 || overflowable < 0
1421 || (overflowable > 0 && sign == SIGNED))
1422 {
1423 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1424 sign);
1425 tree t;
1426 if (tmp.is_constant ())
1427 t = build_new_int_cst (type, tmp.coeffs[0]);
1428 else
1429 {
1430 tree coeffs[NUM_POLY_INT_COEFFS];
1431 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1432 {
1433 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1434 TREE_OVERFLOW (coeffs[i]) = 1;
1435 }
1436 t = build_new_poly_int_cst (type, coeffs);
1437 }
1438 TREE_OVERFLOW (t) = 1;
1439 return t;
1440 }
1441 }
1442
1443 /* Else build a shared node. */
1444 return wide_int_to_tree (type, cst);
1445 }
1446
1447 /* These are the hash table functions for the hash table of INTEGER_CST
1448 nodes of a sizetype. */
1449
1450 /* Return the hash code X, an INTEGER_CST. */
1451
1452 hashval_t
1453 int_cst_hasher::hash (tree x)
1454 {
1455 const_tree const t = x;
1456 hashval_t code = TYPE_UID (TREE_TYPE (t));
1457 int i;
1458
1459 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1460 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1461
1462 return code;
1463 }
1464
1465 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1466 is the same as that given by *Y, which is the same. */
1467
1468 bool
1469 int_cst_hasher::equal (tree x, tree y)
1470 {
1471 const_tree const xt = x;
1472 const_tree const yt = y;
1473
1474 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1475 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1476 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1477 return false;
1478
1479 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1480 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1481 return false;
1482
1483 return true;
1484 }
1485
1486 /* Create an INT_CST node of TYPE and value CST.
1487 The returned node is always shared. For small integers we use a
1488 per-type vector cache, for larger ones we use a single hash table.
1489 The value is extended from its precision according to the sign of
1490 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1491 the upper bits and ensures that hashing and value equality based
1492 upon the underlying HOST_WIDE_INTs works without masking. */
1493
1494 static tree
1495 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1496 {
1497 tree t;
1498 int ix = -1;
1499 int limit = 0;
1500
1501 gcc_assert (type);
1502 unsigned int prec = TYPE_PRECISION (type);
1503 signop sgn = TYPE_SIGN (type);
1504
1505 /* Verify that everything is canonical. */
1506 int l = pcst.get_len ();
1507 if (l > 1)
1508 {
1509 if (pcst.elt (l - 1) == 0)
1510 gcc_checking_assert (pcst.elt (l - 2) < 0);
1511 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1512 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1513 }
1514
1515 wide_int cst = wide_int::from (pcst, prec, sgn);
1516 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1517
1518 if (ext_len == 1)
1519 {
1520 /* We just need to store a single HOST_WIDE_INT. */
1521 HOST_WIDE_INT hwi;
1522 if (TYPE_UNSIGNED (type))
1523 hwi = cst.to_uhwi ();
1524 else
1525 hwi = cst.to_shwi ();
1526
1527 switch (TREE_CODE (type))
1528 {
1529 case NULLPTR_TYPE:
1530 gcc_assert (hwi == 0);
1531 /* Fallthru. */
1532
1533 case POINTER_TYPE:
1534 case REFERENCE_TYPE:
1535 /* Cache NULL pointer and zero bounds. */
1536 if (hwi == 0)
1537 {
1538 limit = 1;
1539 ix = 0;
1540 }
1541 break;
1542
1543 case BOOLEAN_TYPE:
1544 /* Cache false or true. */
1545 limit = 2;
1546 if (IN_RANGE (hwi, 0, 1))
1547 ix = hwi;
1548 break;
1549
1550 case INTEGER_TYPE:
1551 case OFFSET_TYPE:
1552 if (TYPE_SIGN (type) == UNSIGNED)
1553 {
1554 /* Cache [0, N). */
1555 limit = param_integer_share_limit;
1556 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1557 ix = hwi;
1558 }
1559 else
1560 {
1561 /* Cache [-1, N). */
1562 limit = param_integer_share_limit + 1;
1563 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1564 ix = hwi + 1;
1565 }
1566 break;
1567
1568 case ENUMERAL_TYPE:
1569 break;
1570
1571 default:
1572 gcc_unreachable ();
1573 }
1574
1575 if (ix >= 0)
1576 {
1577 /* Look for it in the type's vector of small shared ints. */
1578 if (!TYPE_CACHED_VALUES_P (type))
1579 {
1580 TYPE_CACHED_VALUES_P (type) = 1;
1581 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1582 }
1583
1584 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1585 if (t)
1586 /* Make sure no one is clobbering the shared constant. */
1587 gcc_checking_assert (TREE_TYPE (t) == type
1588 && TREE_INT_CST_NUNITS (t) == 1
1589 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1590 && TREE_INT_CST_EXT_NUNITS (t) == 1
1591 && TREE_INT_CST_ELT (t, 0) == hwi);
1592 else
1593 {
1594 /* Create a new shared int. */
1595 t = build_new_int_cst (type, cst);
1596 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1597 }
1598 }
1599 else
1600 {
1601 /* Use the cache of larger shared ints, using int_cst_node as
1602 a temporary. */
1603
1604 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1605 TREE_TYPE (int_cst_node) = type;
1606
1607 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1608 t = *slot;
1609 if (!t)
1610 {
1611 /* Insert this one into the hash table. */
1612 t = int_cst_node;
1613 *slot = t;
1614 /* Make a new node for next time round. */
1615 int_cst_node = make_int_cst (1, 1);
1616 }
1617 }
1618 }
1619 else
1620 {
1621 /* The value either hashes properly or we drop it on the floor
1622 for the gc to take care of. There will not be enough of them
1623 to worry about. */
1624
1625 tree nt = build_new_int_cst (type, cst);
1626 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1627 t = *slot;
1628 if (!t)
1629 {
1630 /* Insert this one into the hash table. */
1631 t = nt;
1632 *slot = t;
1633 }
1634 else
1635 ggc_free (nt);
1636 }
1637
1638 return t;
1639 }
1640
1641 hashval_t
1642 poly_int_cst_hasher::hash (tree t)
1643 {
1644 inchash::hash hstate;
1645
1646 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1647 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1648 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1649
1650 return hstate.end ();
1651 }
1652
1653 bool
1654 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1655 {
1656 if (TREE_TYPE (x) != y.first)
1657 return false;
1658 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1659 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1660 return false;
1661 return true;
1662 }
1663
1664 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1665 The elements must also have type TYPE. */
1666
1667 tree
1668 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1669 {
1670 unsigned int prec = TYPE_PRECISION (type);
1671 gcc_assert (prec <= values.coeffs[0].get_precision ());
1672 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1673
1674 inchash::hash h;
1675 h.add_int (TYPE_UID (type));
1676 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1677 h.add_wide_int (c.coeffs[i]);
1678 poly_int_cst_hasher::compare_type comp (type, &c);
1679 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1680 INSERT);
1681 if (*slot == NULL_TREE)
1682 {
1683 tree coeffs[NUM_POLY_INT_COEFFS];
1684 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1685 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1686 *slot = build_new_poly_int_cst (type, coeffs);
1687 }
1688 return *slot;
1689 }
1690
1691 /* Create a constant tree with value VALUE in type TYPE. */
1692
1693 tree
1694 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1695 {
1696 if (value.is_constant ())
1697 return wide_int_to_tree_1 (type, value.coeffs[0]);
1698 return build_poly_int_cst (type, value);
1699 }
1700
1701 void
1702 cache_integer_cst (tree t)
1703 {
1704 tree type = TREE_TYPE (t);
1705 int ix = -1;
1706 int limit = 0;
1707 int prec = TYPE_PRECISION (type);
1708
1709 gcc_assert (!TREE_OVERFLOW (t));
1710
1711 switch (TREE_CODE (type))
1712 {
1713 case NULLPTR_TYPE:
1714 gcc_assert (integer_zerop (t));
1715 /* Fallthru. */
1716
1717 case POINTER_TYPE:
1718 case REFERENCE_TYPE:
1719 /* Cache NULL pointer. */
1720 if (integer_zerop (t))
1721 {
1722 limit = 1;
1723 ix = 0;
1724 }
1725 break;
1726
1727 case BOOLEAN_TYPE:
1728 /* Cache false or true. */
1729 limit = 2;
1730 if (wi::ltu_p (wi::to_wide (t), 2))
1731 ix = TREE_INT_CST_ELT (t, 0);
1732 break;
1733
1734 case INTEGER_TYPE:
1735 case OFFSET_TYPE:
1736 if (TYPE_UNSIGNED (type))
1737 {
1738 /* Cache 0..N */
1739 limit = param_integer_share_limit;
1740
1741 /* This is a little hokie, but if the prec is smaller than
1742 what is necessary to hold param_integer_share_limit, then the
1743 obvious test will not get the correct answer. */
1744 if (prec < HOST_BITS_PER_WIDE_INT)
1745 {
1746 if (tree_to_uhwi (t)
1747 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1748 ix = tree_to_uhwi (t);
1749 }
1750 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1751 ix = tree_to_uhwi (t);
1752 }
1753 else
1754 {
1755 /* Cache -1..N */
1756 limit = param_integer_share_limit + 1;
1757
1758 if (integer_minus_onep (t))
1759 ix = 0;
1760 else if (!wi::neg_p (wi::to_wide (t)))
1761 {
1762 if (prec < HOST_BITS_PER_WIDE_INT)
1763 {
1764 if (tree_to_shwi (t) < param_integer_share_limit)
1765 ix = tree_to_shwi (t) + 1;
1766 }
1767 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1768 ix = tree_to_shwi (t) + 1;
1769 }
1770 }
1771 break;
1772
1773 case ENUMERAL_TYPE:
1774 break;
1775
1776 default:
1777 gcc_unreachable ();
1778 }
1779
1780 if (ix >= 0)
1781 {
1782 /* Look for it in the type's vector of small shared ints. */
1783 if (!TYPE_CACHED_VALUES_P (type))
1784 {
1785 TYPE_CACHED_VALUES_P (type) = 1;
1786 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1787 }
1788
1789 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1790 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1791 }
1792 else
1793 {
1794 /* Use the cache of larger shared ints. */
1795 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1796 /* If there is already an entry for the number verify it's the
1797 same. */
1798 if (*slot)
1799 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1800 else
1801 /* Otherwise insert this one into the hash table. */
1802 *slot = t;
1803 }
1804 }
1805
1806
1807 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1808 and the rest are zeros. */
1809
1810 tree
1811 build_low_bits_mask (tree type, unsigned bits)
1812 {
1813 gcc_assert (bits <= TYPE_PRECISION (type));
1814
1815 return wide_int_to_tree (type, wi::mask (bits, false,
1816 TYPE_PRECISION (type)));
1817 }
1818
1819 /* Checks that X is integer constant that can be expressed in (unsigned)
1820 HOST_WIDE_INT without loss of precision. */
1821
1822 bool
1823 cst_and_fits_in_hwi (const_tree x)
1824 {
1825 return (TREE_CODE (x) == INTEGER_CST
1826 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1827 }
1828
1829 /* Build a newly constructed VECTOR_CST with the given values of
1830 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1831
1832 tree
1833 make_vector (unsigned log2_npatterns,
1834 unsigned int nelts_per_pattern MEM_STAT_DECL)
1835 {
1836 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1837 tree t;
1838 unsigned npatterns = 1 << log2_npatterns;
1839 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1840 unsigned length = (sizeof (struct tree_vector)
1841 + (encoded_nelts - 1) * sizeof (tree));
1842
1843 record_node_allocation_statistics (VECTOR_CST, length);
1844
1845 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1846
1847 TREE_SET_CODE (t, VECTOR_CST);
1848 TREE_CONSTANT (t) = 1;
1849 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1850 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1851
1852 return t;
1853 }
1854
1855 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1856 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1857
1858 tree
1859 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1860 {
1861 if (vec_safe_length (v) == 0)
1862 return build_zero_cst (type);
1863
1864 unsigned HOST_WIDE_INT idx, nelts;
1865 tree value;
1866
1867 /* We can't construct a VECTOR_CST for a variable number of elements. */
1868 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1869 tree_vector_builder vec (type, nelts, 1);
1870 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1871 {
1872 if (TREE_CODE (value) == VECTOR_CST)
1873 {
1874 /* If NELTS is constant then this must be too. */
1875 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1876 for (unsigned i = 0; i < sub_nelts; ++i)
1877 vec.quick_push (VECTOR_CST_ELT (value, i));
1878 }
1879 else
1880 vec.quick_push (value);
1881 }
1882 while (vec.length () < nelts)
1883 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1884
1885 return vec.build ();
1886 }
1887
1888 /* Build a vector of type VECTYPE where all the elements are SCs. */
1889 tree
1890 build_vector_from_val (tree vectype, tree sc)
1891 {
1892 unsigned HOST_WIDE_INT i, nunits;
1893
1894 if (sc == error_mark_node)
1895 return sc;
1896
1897 /* Verify that the vector type is suitable for SC. Note that there
1898 is some inconsistency in the type-system with respect to restrict
1899 qualifications of pointers. Vector types always have a main-variant
1900 element type and the qualification is applied to the vector-type.
1901 So TREE_TYPE (vector-type) does not return a properly qualified
1902 vector element-type. */
1903 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1904 TREE_TYPE (vectype)));
1905
1906 if (CONSTANT_CLASS_P (sc))
1907 {
1908 tree_vector_builder v (vectype, 1, 1);
1909 v.quick_push (sc);
1910 return v.build ();
1911 }
1912 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1913 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1914 else
1915 {
1916 vec<constructor_elt, va_gc> *v;
1917 vec_alloc (v, nunits);
1918 for (i = 0; i < nunits; ++i)
1919 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1920 return build_constructor (vectype, v);
1921 }
1922 }
1923
1924 /* If TYPE is not a vector type, just return SC, otherwise return
1925 build_vector_from_val (TYPE, SC). */
1926
1927 tree
1928 build_uniform_cst (tree type, tree sc)
1929 {
1930 if (!VECTOR_TYPE_P (type))
1931 return sc;
1932
1933 return build_vector_from_val (type, sc);
1934 }
1935
1936 /* Build a vector series of type TYPE in which element I has the value
1937 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1938 and a VEC_SERIES_EXPR otherwise. */
1939
1940 tree
1941 build_vec_series (tree type, tree base, tree step)
1942 {
1943 if (integer_zerop (step))
1944 return build_vector_from_val (type, base);
1945 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1946 {
1947 tree_vector_builder builder (type, 1, 3);
1948 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1949 wi::to_wide (base) + wi::to_wide (step));
1950 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1951 wi::to_wide (elt1) + wi::to_wide (step));
1952 builder.quick_push (base);
1953 builder.quick_push (elt1);
1954 builder.quick_push (elt2);
1955 return builder.build ();
1956 }
1957 return build2 (VEC_SERIES_EXPR, type, base, step);
1958 }
1959
1960 /* Return a vector with the same number of units and number of bits
1961 as VEC_TYPE, but in which the elements are a linear series of unsigned
1962 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1963
1964 tree
1965 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1966 {
1967 tree index_vec_type = vec_type;
1968 tree index_elt_type = TREE_TYPE (vec_type);
1969 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1970 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1971 {
1972 index_elt_type = build_nonstandard_integer_type
1973 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1974 index_vec_type = build_vector_type (index_elt_type, nunits);
1975 }
1976
1977 tree_vector_builder v (index_vec_type, 1, 3);
1978 for (unsigned int i = 0; i < 3; ++i)
1979 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1980 return v.build ();
1981 }
1982
1983 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1984 elements are A and the rest are B. */
1985
1986 tree
1987 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1988 {
1989 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1990 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1991 /* Optimize the constant case. */
1992 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1993 count /= 2;
1994 tree_vector_builder builder (vec_type, count, 2);
1995 for (unsigned int i = 0; i < count * 2; ++i)
1996 builder.quick_push (i < num_a ? a : b);
1997 return builder.build ();
1998 }
1999
2000 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2001 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2002
2003 void
2004 recompute_constructor_flags (tree c)
2005 {
2006 unsigned int i;
2007 tree val;
2008 bool constant_p = true;
2009 bool side_effects_p = false;
2010 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2011
2012 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2013 {
2014 /* Mostly ctors will have elts that don't have side-effects, so
2015 the usual case is to scan all the elements. Hence a single
2016 loop for both const and side effects, rather than one loop
2017 each (with early outs). */
2018 if (!TREE_CONSTANT (val))
2019 constant_p = false;
2020 if (TREE_SIDE_EFFECTS (val))
2021 side_effects_p = true;
2022 }
2023
2024 TREE_SIDE_EFFECTS (c) = side_effects_p;
2025 TREE_CONSTANT (c) = constant_p;
2026 }
2027
2028 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2029 CONSTRUCTOR C. */
2030
2031 void
2032 verify_constructor_flags (tree c)
2033 {
2034 unsigned int i;
2035 tree val;
2036 bool constant_p = TREE_CONSTANT (c);
2037 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2038 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2039
2040 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2041 {
2042 if (constant_p && !TREE_CONSTANT (val))
2043 internal_error ("non-constant element in constant CONSTRUCTOR");
2044 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2045 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2046 }
2047 }
2048
2049 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2050 are in the vec pointed to by VALS. */
2051 tree
2052 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2053 {
2054 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2055
2056 TREE_TYPE (c) = type;
2057 CONSTRUCTOR_ELTS (c) = vals;
2058
2059 recompute_constructor_flags (c);
2060
2061 return c;
2062 }
2063
2064 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2065 INDEX and VALUE. */
2066 tree
2067 build_constructor_single (tree type, tree index, tree value)
2068 {
2069 vec<constructor_elt, va_gc> *v;
2070 constructor_elt elt = {index, value};
2071
2072 vec_alloc (v, 1);
2073 v->quick_push (elt);
2074
2075 return build_constructor (type, v);
2076 }
2077
2078
2079 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2080 are in a list pointed to by VALS. */
2081 tree
2082 build_constructor_from_list (tree type, tree vals)
2083 {
2084 tree t;
2085 vec<constructor_elt, va_gc> *v = NULL;
2086
2087 if (vals)
2088 {
2089 vec_alloc (v, list_length (vals));
2090 for (t = vals; t; t = TREE_CHAIN (t))
2091 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2092 }
2093
2094 return build_constructor (type, v);
2095 }
2096
2097 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2098 of elements, provided as index/value pairs. */
2099
2100 tree
2101 build_constructor_va (tree type, int nelts, ...)
2102 {
2103 vec<constructor_elt, va_gc> *v = NULL;
2104 va_list p;
2105
2106 va_start (p, nelts);
2107 vec_alloc (v, nelts);
2108 while (nelts--)
2109 {
2110 tree index = va_arg (p, tree);
2111 tree value = va_arg (p, tree);
2112 CONSTRUCTOR_APPEND_ELT (v, index, value);
2113 }
2114 va_end (p);
2115 return build_constructor (type, v);
2116 }
2117
2118 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2119
2120 tree
2121 build_clobber (tree type)
2122 {
2123 tree clobber = build_constructor (type, NULL);
2124 TREE_THIS_VOLATILE (clobber) = true;
2125 return clobber;
2126 }
2127
2128 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2129
2130 tree
2131 build_fixed (tree type, FIXED_VALUE_TYPE f)
2132 {
2133 tree v;
2134 FIXED_VALUE_TYPE *fp;
2135
2136 v = make_node (FIXED_CST);
2137 fp = ggc_alloc<fixed_value> ();
2138 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2139
2140 TREE_TYPE (v) = type;
2141 TREE_FIXED_CST_PTR (v) = fp;
2142 return v;
2143 }
2144
2145 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2146
2147 tree
2148 build_real (tree type, REAL_VALUE_TYPE d)
2149 {
2150 tree v;
2151 REAL_VALUE_TYPE *dp;
2152 int overflow = 0;
2153
2154 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2155 Consider doing it via real_convert now. */
2156
2157 v = make_node (REAL_CST);
2158 dp = ggc_alloc<real_value> ();
2159 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2160
2161 TREE_TYPE (v) = type;
2162 TREE_REAL_CST_PTR (v) = dp;
2163 TREE_OVERFLOW (v) = overflow;
2164 return v;
2165 }
2166
2167 /* Like build_real, but first truncate D to the type. */
2168
2169 tree
2170 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2171 {
2172 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2173 }
2174
2175 /* Return a new REAL_CST node whose type is TYPE
2176 and whose value is the integer value of the INTEGER_CST node I. */
2177
2178 REAL_VALUE_TYPE
2179 real_value_from_int_cst (const_tree type, const_tree i)
2180 {
2181 REAL_VALUE_TYPE d;
2182
2183 /* Clear all bits of the real value type so that we can later do
2184 bitwise comparisons to see if two values are the same. */
2185 memset (&d, 0, sizeof d);
2186
2187 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2188 TYPE_SIGN (TREE_TYPE (i)));
2189 return d;
2190 }
2191
2192 /* Given a tree representing an integer constant I, return a tree
2193 representing the same value as a floating-point constant of type TYPE. */
2194
2195 tree
2196 build_real_from_int_cst (tree type, const_tree i)
2197 {
2198 tree v;
2199 int overflow = TREE_OVERFLOW (i);
2200
2201 v = build_real (type, real_value_from_int_cst (type, i));
2202
2203 TREE_OVERFLOW (v) |= overflow;
2204 return v;
2205 }
2206
2207 /* Return a newly constructed STRING_CST node whose value is
2208 the LEN characters at STR.
2209 Note that for a C string literal, LEN should include the trailing NUL.
2210 The TREE_TYPE is not initialized. */
2211
2212 tree
2213 build_string (int len, const char *str)
2214 {
2215 tree s;
2216 size_t length;
2217
2218 /* Do not waste bytes provided by padding of struct tree_string. */
2219 length = len + offsetof (struct tree_string, str) + 1;
2220
2221 record_node_allocation_statistics (STRING_CST, length);
2222
2223 s = (tree) ggc_internal_alloc (length);
2224
2225 memset (s, 0, sizeof (struct tree_typed));
2226 TREE_SET_CODE (s, STRING_CST);
2227 TREE_CONSTANT (s) = 1;
2228 TREE_STRING_LENGTH (s) = len;
2229 memcpy (s->string.str, str, len);
2230 s->string.str[len] = '\0';
2231
2232 return s;
2233 }
2234
2235 /* Return a newly constructed COMPLEX_CST node whose value is
2236 specified by the real and imaginary parts REAL and IMAG.
2237 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2238 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2239
2240 tree
2241 build_complex (tree type, tree real, tree imag)
2242 {
2243 gcc_assert (CONSTANT_CLASS_P (real));
2244 gcc_assert (CONSTANT_CLASS_P (imag));
2245
2246 tree t = make_node (COMPLEX_CST);
2247
2248 TREE_REALPART (t) = real;
2249 TREE_IMAGPART (t) = imag;
2250 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2251 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2252 return t;
2253 }
2254
2255 /* Build a complex (inf +- 0i), such as for the result of cproj.
2256 TYPE is the complex tree type of the result. If NEG is true, the
2257 imaginary zero is negative. */
2258
2259 tree
2260 build_complex_inf (tree type, bool neg)
2261 {
2262 REAL_VALUE_TYPE rinf, rzero = dconst0;
2263
2264 real_inf (&rinf);
2265 rzero.sign = neg;
2266 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2267 build_real (TREE_TYPE (type), rzero));
2268 }
2269
2270 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2271 element is set to 1. In particular, this is 1 + i for complex types. */
2272
2273 tree
2274 build_each_one_cst (tree type)
2275 {
2276 if (TREE_CODE (type) == COMPLEX_TYPE)
2277 {
2278 tree scalar = build_one_cst (TREE_TYPE (type));
2279 return build_complex (type, scalar, scalar);
2280 }
2281 else
2282 return build_one_cst (type);
2283 }
2284
2285 /* Return a constant of arithmetic type TYPE which is the
2286 multiplicative identity of the set TYPE. */
2287
2288 tree
2289 build_one_cst (tree type)
2290 {
2291 switch (TREE_CODE (type))
2292 {
2293 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2294 case POINTER_TYPE: case REFERENCE_TYPE:
2295 case OFFSET_TYPE:
2296 return build_int_cst (type, 1);
2297
2298 case REAL_TYPE:
2299 return build_real (type, dconst1);
2300
2301 case FIXED_POINT_TYPE:
2302 /* We can only generate 1 for accum types. */
2303 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2304 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2305
2306 case VECTOR_TYPE:
2307 {
2308 tree scalar = build_one_cst (TREE_TYPE (type));
2309
2310 return build_vector_from_val (type, scalar);
2311 }
2312
2313 case COMPLEX_TYPE:
2314 return build_complex (type,
2315 build_one_cst (TREE_TYPE (type)),
2316 build_zero_cst (TREE_TYPE (type)));
2317
2318 default:
2319 gcc_unreachable ();
2320 }
2321 }
2322
2323 /* Return an integer of type TYPE containing all 1's in as much precision as
2324 it contains, or a complex or vector whose subparts are such integers. */
2325
2326 tree
2327 build_all_ones_cst (tree type)
2328 {
2329 if (TREE_CODE (type) == COMPLEX_TYPE)
2330 {
2331 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2332 return build_complex (type, scalar, scalar);
2333 }
2334 else
2335 return build_minus_one_cst (type);
2336 }
2337
2338 /* Return a constant of arithmetic type TYPE which is the
2339 opposite of the multiplicative identity of the set TYPE. */
2340
2341 tree
2342 build_minus_one_cst (tree type)
2343 {
2344 switch (TREE_CODE (type))
2345 {
2346 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2347 case POINTER_TYPE: case REFERENCE_TYPE:
2348 case OFFSET_TYPE:
2349 return build_int_cst (type, -1);
2350
2351 case REAL_TYPE:
2352 return build_real (type, dconstm1);
2353
2354 case FIXED_POINT_TYPE:
2355 /* We can only generate 1 for accum types. */
2356 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2357 return build_fixed (type,
2358 fixed_from_double_int (double_int_minus_one,
2359 SCALAR_TYPE_MODE (type)));
2360
2361 case VECTOR_TYPE:
2362 {
2363 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2364
2365 return build_vector_from_val (type, scalar);
2366 }
2367
2368 case COMPLEX_TYPE:
2369 return build_complex (type,
2370 build_minus_one_cst (TREE_TYPE (type)),
2371 build_zero_cst (TREE_TYPE (type)));
2372
2373 default:
2374 gcc_unreachable ();
2375 }
2376 }
2377
2378 /* Build 0 constant of type TYPE. This is used by constructor folding
2379 and thus the constant should be represented in memory by
2380 zero(es). */
2381
2382 tree
2383 build_zero_cst (tree type)
2384 {
2385 switch (TREE_CODE (type))
2386 {
2387 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2388 case POINTER_TYPE: case REFERENCE_TYPE:
2389 case OFFSET_TYPE: case NULLPTR_TYPE:
2390 return build_int_cst (type, 0);
2391
2392 case REAL_TYPE:
2393 return build_real (type, dconst0);
2394
2395 case FIXED_POINT_TYPE:
2396 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2397
2398 case VECTOR_TYPE:
2399 {
2400 tree scalar = build_zero_cst (TREE_TYPE (type));
2401
2402 return build_vector_from_val (type, scalar);
2403 }
2404
2405 case COMPLEX_TYPE:
2406 {
2407 tree zero = build_zero_cst (TREE_TYPE (type));
2408
2409 return build_complex (type, zero, zero);
2410 }
2411
2412 default:
2413 if (!AGGREGATE_TYPE_P (type))
2414 return fold_convert (type, integer_zero_node);
2415 return build_constructor (type, NULL);
2416 }
2417 }
2418
2419
2420 /* Build a BINFO with LEN language slots. */
2421
2422 tree
2423 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2424 {
2425 tree t;
2426 size_t length = (offsetof (struct tree_binfo, base_binfos)
2427 + vec<tree, va_gc>::embedded_size (base_binfos));
2428
2429 record_node_allocation_statistics (TREE_BINFO, length);
2430
2431 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2432
2433 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2434
2435 TREE_SET_CODE (t, TREE_BINFO);
2436
2437 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2438
2439 return t;
2440 }
2441
2442 /* Create a CASE_LABEL_EXPR tree node and return it. */
2443
2444 tree
2445 build_case_label (tree low_value, tree high_value, tree label_decl)
2446 {
2447 tree t = make_node (CASE_LABEL_EXPR);
2448
2449 TREE_TYPE (t) = void_type_node;
2450 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2451
2452 CASE_LOW (t) = low_value;
2453 CASE_HIGH (t) = high_value;
2454 CASE_LABEL (t) = label_decl;
2455 CASE_CHAIN (t) = NULL_TREE;
2456
2457 return t;
2458 }
2459
2460 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2461 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2462 The latter determines the length of the HOST_WIDE_INT vector. */
2463
2464 tree
2465 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2466 {
2467 tree t;
2468 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2469 + sizeof (struct tree_int_cst));
2470
2471 gcc_assert (len);
2472 record_node_allocation_statistics (INTEGER_CST, length);
2473
2474 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2475
2476 TREE_SET_CODE (t, INTEGER_CST);
2477 TREE_INT_CST_NUNITS (t) = len;
2478 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2479 /* to_offset can only be applied to trees that are offset_int-sized
2480 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2481 must be exactly the precision of offset_int and so LEN is correct. */
2482 if (ext_len <= OFFSET_INT_ELTS)
2483 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2484 else
2485 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2486
2487 TREE_CONSTANT (t) = 1;
2488
2489 return t;
2490 }
2491
2492 /* Build a newly constructed TREE_VEC node of length LEN. */
2493
2494 tree
2495 make_tree_vec (int len MEM_STAT_DECL)
2496 {
2497 tree t;
2498 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2499
2500 record_node_allocation_statistics (TREE_VEC, length);
2501
2502 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2503
2504 TREE_SET_CODE (t, TREE_VEC);
2505 TREE_VEC_LENGTH (t) = len;
2506
2507 return t;
2508 }
2509
2510 /* Grow a TREE_VEC node to new length LEN. */
2511
2512 tree
2513 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2514 {
2515 gcc_assert (TREE_CODE (v) == TREE_VEC);
2516
2517 int oldlen = TREE_VEC_LENGTH (v);
2518 gcc_assert (len > oldlen);
2519
2520 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2521 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2522
2523 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2524
2525 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2526
2527 TREE_VEC_LENGTH (v) = len;
2528
2529 return v;
2530 }
2531 \f
2532 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2533 fixed, and scalar, complex or vector. */
2534
2535 bool
2536 zerop (const_tree expr)
2537 {
2538 return (integer_zerop (expr)
2539 || real_zerop (expr)
2540 || fixed_zerop (expr));
2541 }
2542
2543 /* Return 1 if EXPR is the integer constant zero or a complex constant
2544 of zero, or a location wrapper for such a constant. */
2545
2546 bool
2547 integer_zerop (const_tree expr)
2548 {
2549 STRIP_ANY_LOCATION_WRAPPER (expr);
2550
2551 switch (TREE_CODE (expr))
2552 {
2553 case INTEGER_CST:
2554 return wi::to_wide (expr) == 0;
2555 case COMPLEX_CST:
2556 return (integer_zerop (TREE_REALPART (expr))
2557 && integer_zerop (TREE_IMAGPART (expr)));
2558 case VECTOR_CST:
2559 return (VECTOR_CST_NPATTERNS (expr) == 1
2560 && VECTOR_CST_DUPLICATE_P (expr)
2561 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2562 default:
2563 return false;
2564 }
2565 }
2566
2567 /* Return 1 if EXPR is the integer constant one or the corresponding
2568 complex constant, or a location wrapper for such a constant. */
2569
2570 bool
2571 integer_onep (const_tree expr)
2572 {
2573 STRIP_ANY_LOCATION_WRAPPER (expr);
2574
2575 switch (TREE_CODE (expr))
2576 {
2577 case INTEGER_CST:
2578 return wi::eq_p (wi::to_widest (expr), 1);
2579 case COMPLEX_CST:
2580 return (integer_onep (TREE_REALPART (expr))
2581 && integer_zerop (TREE_IMAGPART (expr)));
2582 case VECTOR_CST:
2583 return (VECTOR_CST_NPATTERNS (expr) == 1
2584 && VECTOR_CST_DUPLICATE_P (expr)
2585 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2586 default:
2587 return false;
2588 }
2589 }
2590
2591 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2592 return 1 if every piece is the integer constant one.
2593 Also return 1 for location wrappers for such a constant. */
2594
2595 bool
2596 integer_each_onep (const_tree expr)
2597 {
2598 STRIP_ANY_LOCATION_WRAPPER (expr);
2599
2600 if (TREE_CODE (expr) == COMPLEX_CST)
2601 return (integer_onep (TREE_REALPART (expr))
2602 && integer_onep (TREE_IMAGPART (expr)));
2603 else
2604 return integer_onep (expr);
2605 }
2606
2607 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2608 it contains, or a complex or vector whose subparts are such integers,
2609 or a location wrapper for such a constant. */
2610
2611 bool
2612 integer_all_onesp (const_tree expr)
2613 {
2614 STRIP_ANY_LOCATION_WRAPPER (expr);
2615
2616 if (TREE_CODE (expr) == COMPLEX_CST
2617 && integer_all_onesp (TREE_REALPART (expr))
2618 && integer_all_onesp (TREE_IMAGPART (expr)))
2619 return true;
2620
2621 else if (TREE_CODE (expr) == VECTOR_CST)
2622 return (VECTOR_CST_NPATTERNS (expr) == 1
2623 && VECTOR_CST_DUPLICATE_P (expr)
2624 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2625
2626 else if (TREE_CODE (expr) != INTEGER_CST)
2627 return false;
2628
2629 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2630 == wi::to_wide (expr));
2631 }
2632
2633 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2634 for such a constant. */
2635
2636 bool
2637 integer_minus_onep (const_tree expr)
2638 {
2639 STRIP_ANY_LOCATION_WRAPPER (expr);
2640
2641 if (TREE_CODE (expr) == COMPLEX_CST)
2642 return (integer_all_onesp (TREE_REALPART (expr))
2643 && integer_zerop (TREE_IMAGPART (expr)));
2644 else
2645 return integer_all_onesp (expr);
2646 }
2647
2648 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2649 one bit on), or a location wrapper for such a constant. */
2650
2651 bool
2652 integer_pow2p (const_tree expr)
2653 {
2654 STRIP_ANY_LOCATION_WRAPPER (expr);
2655
2656 if (TREE_CODE (expr) == COMPLEX_CST
2657 && integer_pow2p (TREE_REALPART (expr))
2658 && integer_zerop (TREE_IMAGPART (expr)))
2659 return true;
2660
2661 if (TREE_CODE (expr) != INTEGER_CST)
2662 return false;
2663
2664 return wi::popcount (wi::to_wide (expr)) == 1;
2665 }
2666
2667 /* Return 1 if EXPR is an integer constant other than zero or a
2668 complex constant other than zero, or a location wrapper for such a
2669 constant. */
2670
2671 bool
2672 integer_nonzerop (const_tree expr)
2673 {
2674 STRIP_ANY_LOCATION_WRAPPER (expr);
2675
2676 return ((TREE_CODE (expr) == INTEGER_CST
2677 && wi::to_wide (expr) != 0)
2678 || (TREE_CODE (expr) == COMPLEX_CST
2679 && (integer_nonzerop (TREE_REALPART (expr))
2680 || integer_nonzerop (TREE_IMAGPART (expr)))));
2681 }
2682
2683 /* Return 1 if EXPR is the integer constant one. For vector,
2684 return 1 if every piece is the integer constant minus one
2685 (representing the value TRUE).
2686 Also return 1 for location wrappers for such a constant. */
2687
2688 bool
2689 integer_truep (const_tree expr)
2690 {
2691 STRIP_ANY_LOCATION_WRAPPER (expr);
2692
2693 if (TREE_CODE (expr) == VECTOR_CST)
2694 return integer_all_onesp (expr);
2695 return integer_onep (expr);
2696 }
2697
2698 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2699 for such a constant. */
2700
2701 bool
2702 fixed_zerop (const_tree expr)
2703 {
2704 STRIP_ANY_LOCATION_WRAPPER (expr);
2705
2706 return (TREE_CODE (expr) == FIXED_CST
2707 && TREE_FIXED_CST (expr).data.is_zero ());
2708 }
2709
2710 /* Return the power of two represented by a tree node known to be a
2711 power of two. */
2712
2713 int
2714 tree_log2 (const_tree expr)
2715 {
2716 if (TREE_CODE (expr) == COMPLEX_CST)
2717 return tree_log2 (TREE_REALPART (expr));
2718
2719 return wi::exact_log2 (wi::to_wide (expr));
2720 }
2721
2722 /* Similar, but return the largest integer Y such that 2 ** Y is less
2723 than or equal to EXPR. */
2724
2725 int
2726 tree_floor_log2 (const_tree expr)
2727 {
2728 if (TREE_CODE (expr) == COMPLEX_CST)
2729 return tree_log2 (TREE_REALPART (expr));
2730
2731 return wi::floor_log2 (wi::to_wide (expr));
2732 }
2733
2734 /* Return number of known trailing zero bits in EXPR, or, if the value of
2735 EXPR is known to be zero, the precision of it's type. */
2736
2737 unsigned int
2738 tree_ctz (const_tree expr)
2739 {
2740 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2741 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2742 return 0;
2743
2744 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2745 switch (TREE_CODE (expr))
2746 {
2747 case INTEGER_CST:
2748 ret1 = wi::ctz (wi::to_wide (expr));
2749 return MIN (ret1, prec);
2750 case SSA_NAME:
2751 ret1 = wi::ctz (get_nonzero_bits (expr));
2752 return MIN (ret1, prec);
2753 case PLUS_EXPR:
2754 case MINUS_EXPR:
2755 case BIT_IOR_EXPR:
2756 case BIT_XOR_EXPR:
2757 case MIN_EXPR:
2758 case MAX_EXPR:
2759 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2760 if (ret1 == 0)
2761 return ret1;
2762 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2763 return MIN (ret1, ret2);
2764 case POINTER_PLUS_EXPR:
2765 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2766 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2767 /* Second operand is sizetype, which could be in theory
2768 wider than pointer's precision. Make sure we never
2769 return more than prec. */
2770 ret2 = MIN (ret2, prec);
2771 return MIN (ret1, ret2);
2772 case BIT_AND_EXPR:
2773 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2774 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2775 return MAX (ret1, ret2);
2776 case MULT_EXPR:
2777 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2778 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2779 return MIN (ret1 + ret2, prec);
2780 case LSHIFT_EXPR:
2781 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2782 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2783 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2784 {
2785 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2786 return MIN (ret1 + ret2, prec);
2787 }
2788 return ret1;
2789 case RSHIFT_EXPR:
2790 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2791 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2792 {
2793 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2794 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2795 if (ret1 > ret2)
2796 return ret1 - ret2;
2797 }
2798 return 0;
2799 case TRUNC_DIV_EXPR:
2800 case CEIL_DIV_EXPR:
2801 case FLOOR_DIV_EXPR:
2802 case ROUND_DIV_EXPR:
2803 case EXACT_DIV_EXPR:
2804 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2805 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2806 {
2807 int l = tree_log2 (TREE_OPERAND (expr, 1));
2808 if (l >= 0)
2809 {
2810 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2811 ret2 = l;
2812 if (ret1 > ret2)
2813 return ret1 - ret2;
2814 }
2815 }
2816 return 0;
2817 CASE_CONVERT:
2818 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2819 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2820 ret1 = prec;
2821 return MIN (ret1, prec);
2822 case SAVE_EXPR:
2823 return tree_ctz (TREE_OPERAND (expr, 0));
2824 case COND_EXPR:
2825 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2826 if (ret1 == 0)
2827 return 0;
2828 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2829 return MIN (ret1, ret2);
2830 case COMPOUND_EXPR:
2831 return tree_ctz (TREE_OPERAND (expr, 1));
2832 case ADDR_EXPR:
2833 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2834 if (ret1 > BITS_PER_UNIT)
2835 {
2836 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2837 return MIN (ret1, prec);
2838 }
2839 return 0;
2840 default:
2841 return 0;
2842 }
2843 }
2844
2845 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2846 decimal float constants, so don't return 1 for them.
2847 Also return 1 for location wrappers around such a constant. */
2848
2849 bool
2850 real_zerop (const_tree expr)
2851 {
2852 STRIP_ANY_LOCATION_WRAPPER (expr);
2853
2854 switch (TREE_CODE (expr))
2855 {
2856 case REAL_CST:
2857 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2858 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2859 case COMPLEX_CST:
2860 return real_zerop (TREE_REALPART (expr))
2861 && real_zerop (TREE_IMAGPART (expr));
2862 case VECTOR_CST:
2863 {
2864 /* Don't simply check for a duplicate because the predicate
2865 accepts both +0.0 and -0.0. */
2866 unsigned count = vector_cst_encoded_nelts (expr);
2867 for (unsigned int i = 0; i < count; ++i)
2868 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2869 return false;
2870 return true;
2871 }
2872 default:
2873 return false;
2874 }
2875 }
2876
2877 /* Return 1 if EXPR is the real constant one in real or complex form.
2878 Trailing zeroes matter for decimal float constants, so don't return
2879 1 for them.
2880 Also return 1 for location wrappers around such a constant. */
2881
2882 bool
2883 real_onep (const_tree expr)
2884 {
2885 STRIP_ANY_LOCATION_WRAPPER (expr);
2886
2887 switch (TREE_CODE (expr))
2888 {
2889 case REAL_CST:
2890 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2891 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2892 case COMPLEX_CST:
2893 return real_onep (TREE_REALPART (expr))
2894 && real_zerop (TREE_IMAGPART (expr));
2895 case VECTOR_CST:
2896 return (VECTOR_CST_NPATTERNS (expr) == 1
2897 && VECTOR_CST_DUPLICATE_P (expr)
2898 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2899 default:
2900 return false;
2901 }
2902 }
2903
2904 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2905 matter for decimal float constants, so don't return 1 for them.
2906 Also return 1 for location wrappers around such a constant. */
2907
2908 bool
2909 real_minus_onep (const_tree expr)
2910 {
2911 STRIP_ANY_LOCATION_WRAPPER (expr);
2912
2913 switch (TREE_CODE (expr))
2914 {
2915 case REAL_CST:
2916 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2917 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2918 case COMPLEX_CST:
2919 return real_minus_onep (TREE_REALPART (expr))
2920 && real_zerop (TREE_IMAGPART (expr));
2921 case VECTOR_CST:
2922 return (VECTOR_CST_NPATTERNS (expr) == 1
2923 && VECTOR_CST_DUPLICATE_P (expr)
2924 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2925 default:
2926 return false;
2927 }
2928 }
2929
2930 /* Nonzero if EXP is a constant or a cast of a constant. */
2931
2932 bool
2933 really_constant_p (const_tree exp)
2934 {
2935 /* This is not quite the same as STRIP_NOPS. It does more. */
2936 while (CONVERT_EXPR_P (exp)
2937 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2938 exp = TREE_OPERAND (exp, 0);
2939 return TREE_CONSTANT (exp);
2940 }
2941
2942 /* Return true if T holds a polynomial pointer difference, storing it in
2943 *VALUE if so. A true return means that T's precision is no greater
2944 than 64 bits, which is the largest address space we support, so *VALUE
2945 never loses precision. However, the signedness of the result does
2946 not necessarily match the signedness of T: sometimes an unsigned type
2947 like sizetype is used to encode a value that is actually negative. */
2948
2949 bool
2950 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2951 {
2952 if (!t)
2953 return false;
2954 if (TREE_CODE (t) == INTEGER_CST)
2955 {
2956 if (!cst_and_fits_in_hwi (t))
2957 return false;
2958 *value = int_cst_value (t);
2959 return true;
2960 }
2961 if (POLY_INT_CST_P (t))
2962 {
2963 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2964 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2965 return false;
2966 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2967 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2968 return true;
2969 }
2970 return false;
2971 }
2972
2973 poly_int64
2974 tree_to_poly_int64 (const_tree t)
2975 {
2976 gcc_assert (tree_fits_poly_int64_p (t));
2977 if (POLY_INT_CST_P (t))
2978 return poly_int_cst_value (t).force_shwi ();
2979 return TREE_INT_CST_LOW (t);
2980 }
2981
2982 poly_uint64
2983 tree_to_poly_uint64 (const_tree t)
2984 {
2985 gcc_assert (tree_fits_poly_uint64_p (t));
2986 if (POLY_INT_CST_P (t))
2987 return poly_int_cst_value (t).force_uhwi ();
2988 return TREE_INT_CST_LOW (t);
2989 }
2990 \f
2991 /* Return first list element whose TREE_VALUE is ELEM.
2992 Return 0 if ELEM is not in LIST. */
2993
2994 tree
2995 value_member (tree elem, tree list)
2996 {
2997 while (list)
2998 {
2999 if (elem == TREE_VALUE (list))
3000 return list;
3001 list = TREE_CHAIN (list);
3002 }
3003 return NULL_TREE;
3004 }
3005
3006 /* Return first list element whose TREE_PURPOSE is ELEM.
3007 Return 0 if ELEM is not in LIST. */
3008
3009 tree
3010 purpose_member (const_tree elem, tree list)
3011 {
3012 while (list)
3013 {
3014 if (elem == TREE_PURPOSE (list))
3015 return list;
3016 list = TREE_CHAIN (list);
3017 }
3018 return NULL_TREE;
3019 }
3020
3021 /* Return true if ELEM is in V. */
3022
3023 bool
3024 vec_member (const_tree elem, vec<tree, va_gc> *v)
3025 {
3026 unsigned ix;
3027 tree t;
3028 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3029 if (elem == t)
3030 return true;
3031 return false;
3032 }
3033
3034 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3035 NULL_TREE. */
3036
3037 tree
3038 chain_index (int idx, tree chain)
3039 {
3040 for (; chain && idx > 0; --idx)
3041 chain = TREE_CHAIN (chain);
3042 return chain;
3043 }
3044
3045 /* Return nonzero if ELEM is part of the chain CHAIN. */
3046
3047 bool
3048 chain_member (const_tree elem, const_tree chain)
3049 {
3050 while (chain)
3051 {
3052 if (elem == chain)
3053 return true;
3054 chain = DECL_CHAIN (chain);
3055 }
3056
3057 return false;
3058 }
3059
3060 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3061 We expect a null pointer to mark the end of the chain.
3062 This is the Lisp primitive `length'. */
3063
3064 int
3065 list_length (const_tree t)
3066 {
3067 const_tree p = t;
3068 #ifdef ENABLE_TREE_CHECKING
3069 const_tree q = t;
3070 #endif
3071 int len = 0;
3072
3073 while (p)
3074 {
3075 p = TREE_CHAIN (p);
3076 #ifdef ENABLE_TREE_CHECKING
3077 if (len % 2)
3078 q = TREE_CHAIN (q);
3079 gcc_assert (p != q);
3080 #endif
3081 len++;
3082 }
3083
3084 return len;
3085 }
3086
3087 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3088 UNION_TYPE TYPE, or NULL_TREE if none. */
3089
3090 tree
3091 first_field (const_tree type)
3092 {
3093 tree t = TYPE_FIELDS (type);
3094 while (t && TREE_CODE (t) != FIELD_DECL)
3095 t = TREE_CHAIN (t);
3096 return t;
3097 }
3098
3099 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3100 UNION_TYPE TYPE, or NULL_TREE if none. */
3101
3102 tree
3103 last_field (const_tree type)
3104 {
3105 tree last = NULL_TREE;
3106
3107 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3108 {
3109 if (TREE_CODE (fld) != FIELD_DECL)
3110 continue;
3111
3112 last = fld;
3113 }
3114
3115 return last;
3116 }
3117
3118 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3119 by modifying the last node in chain 1 to point to chain 2.
3120 This is the Lisp primitive `nconc'. */
3121
3122 tree
3123 chainon (tree op1, tree op2)
3124 {
3125 tree t1;
3126
3127 if (!op1)
3128 return op2;
3129 if (!op2)
3130 return op1;
3131
3132 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3133 continue;
3134 TREE_CHAIN (t1) = op2;
3135
3136 #ifdef ENABLE_TREE_CHECKING
3137 {
3138 tree t2;
3139 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3140 gcc_assert (t2 != t1);
3141 }
3142 #endif
3143
3144 return op1;
3145 }
3146
3147 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3148
3149 tree
3150 tree_last (tree chain)
3151 {
3152 tree next;
3153 if (chain)
3154 while ((next = TREE_CHAIN (chain)))
3155 chain = next;
3156 return chain;
3157 }
3158
3159 /* Reverse the order of elements in the chain T,
3160 and return the new head of the chain (old last element). */
3161
3162 tree
3163 nreverse (tree t)
3164 {
3165 tree prev = 0, decl, next;
3166 for (decl = t; decl; decl = next)
3167 {
3168 /* We shouldn't be using this function to reverse BLOCK chains; we
3169 have blocks_nreverse for that. */
3170 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3171 next = TREE_CHAIN (decl);
3172 TREE_CHAIN (decl) = prev;
3173 prev = decl;
3174 }
3175 return prev;
3176 }
3177 \f
3178 /* Return a newly created TREE_LIST node whose
3179 purpose and value fields are PARM and VALUE. */
3180
3181 tree
3182 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3183 {
3184 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3185 TREE_PURPOSE (t) = parm;
3186 TREE_VALUE (t) = value;
3187 return t;
3188 }
3189
3190 /* Build a chain of TREE_LIST nodes from a vector. */
3191
3192 tree
3193 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3194 {
3195 tree ret = NULL_TREE;
3196 tree *pp = &ret;
3197 unsigned int i;
3198 tree t;
3199 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3200 {
3201 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3202 pp = &TREE_CHAIN (*pp);
3203 }
3204 return ret;
3205 }
3206
3207 /* Return a newly created TREE_LIST node whose
3208 purpose and value fields are PURPOSE and VALUE
3209 and whose TREE_CHAIN is CHAIN. */
3210
3211 tree
3212 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3213 {
3214 tree node;
3215
3216 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3217 memset (node, 0, sizeof (struct tree_common));
3218
3219 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3220
3221 TREE_SET_CODE (node, TREE_LIST);
3222 TREE_CHAIN (node) = chain;
3223 TREE_PURPOSE (node) = purpose;
3224 TREE_VALUE (node) = value;
3225 return node;
3226 }
3227
3228 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3229 trees. */
3230
3231 vec<tree, va_gc> *
3232 ctor_to_vec (tree ctor)
3233 {
3234 vec<tree, va_gc> *vec;
3235 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3236 unsigned int ix;
3237 tree val;
3238
3239 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3240 vec->quick_push (val);
3241
3242 return vec;
3243 }
3244 \f
3245 /* Return the size nominally occupied by an object of type TYPE
3246 when it resides in memory. The value is measured in units of bytes,
3247 and its data type is that normally used for type sizes
3248 (which is the first type created by make_signed_type or
3249 make_unsigned_type). */
3250
3251 tree
3252 size_in_bytes_loc (location_t loc, const_tree type)
3253 {
3254 tree t;
3255
3256 if (type == error_mark_node)
3257 return integer_zero_node;
3258
3259 type = TYPE_MAIN_VARIANT (type);
3260 t = TYPE_SIZE_UNIT (type);
3261
3262 if (t == 0)
3263 {
3264 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3265 return size_zero_node;
3266 }
3267
3268 return t;
3269 }
3270
3271 /* Return the size of TYPE (in bytes) as a wide integer
3272 or return -1 if the size can vary or is larger than an integer. */
3273
3274 HOST_WIDE_INT
3275 int_size_in_bytes (const_tree type)
3276 {
3277 tree t;
3278
3279 if (type == error_mark_node)
3280 return 0;
3281
3282 type = TYPE_MAIN_VARIANT (type);
3283 t = TYPE_SIZE_UNIT (type);
3284
3285 if (t && tree_fits_uhwi_p (t))
3286 return TREE_INT_CST_LOW (t);
3287 else
3288 return -1;
3289 }
3290
3291 /* Return the maximum size of TYPE (in bytes) as a wide integer
3292 or return -1 if the size can vary or is larger than an integer. */
3293
3294 HOST_WIDE_INT
3295 max_int_size_in_bytes (const_tree type)
3296 {
3297 HOST_WIDE_INT size = -1;
3298 tree size_tree;
3299
3300 /* If this is an array type, check for a possible MAX_SIZE attached. */
3301
3302 if (TREE_CODE (type) == ARRAY_TYPE)
3303 {
3304 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3305
3306 if (size_tree && tree_fits_uhwi_p (size_tree))
3307 size = tree_to_uhwi (size_tree);
3308 }
3309
3310 /* If we still haven't been able to get a size, see if the language
3311 can compute a maximum size. */
3312
3313 if (size == -1)
3314 {
3315 size_tree = lang_hooks.types.max_size (type);
3316
3317 if (size_tree && tree_fits_uhwi_p (size_tree))
3318 size = tree_to_uhwi (size_tree);
3319 }
3320
3321 return size;
3322 }
3323 \f
3324 /* Return the bit position of FIELD, in bits from the start of the record.
3325 This is a tree of type bitsizetype. */
3326
3327 tree
3328 bit_position (const_tree field)
3329 {
3330 return bit_from_pos (DECL_FIELD_OFFSET (field),
3331 DECL_FIELD_BIT_OFFSET (field));
3332 }
3333 \f
3334 /* Return the byte position of FIELD, in bytes from the start of the record.
3335 This is a tree of type sizetype. */
3336
3337 tree
3338 byte_position (const_tree field)
3339 {
3340 return byte_from_pos (DECL_FIELD_OFFSET (field),
3341 DECL_FIELD_BIT_OFFSET (field));
3342 }
3343
3344 /* Likewise, but return as an integer. It must be representable in
3345 that way (since it could be a signed value, we don't have the
3346 option of returning -1 like int_size_in_byte can. */
3347
3348 HOST_WIDE_INT
3349 int_byte_position (const_tree field)
3350 {
3351 return tree_to_shwi (byte_position (field));
3352 }
3353 \f
3354 /* Return, as a tree node, the number of elements for TYPE (which is an
3355 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3356
3357 tree
3358 array_type_nelts (const_tree type)
3359 {
3360 tree index_type, min, max;
3361
3362 /* If they did it with unspecified bounds, then we should have already
3363 given an error about it before we got here. */
3364 if (! TYPE_DOMAIN (type))
3365 return error_mark_node;
3366
3367 index_type = TYPE_DOMAIN (type);
3368 min = TYPE_MIN_VALUE (index_type);
3369 max = TYPE_MAX_VALUE (index_type);
3370
3371 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3372 if (!max)
3373 return error_mark_node;
3374
3375 return (integer_zerop (min)
3376 ? max
3377 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3378 }
3379 \f
3380 /* If arg is static -- a reference to an object in static storage -- then
3381 return the object. This is not the same as the C meaning of `static'.
3382 If arg isn't static, return NULL. */
3383
3384 tree
3385 staticp (tree arg)
3386 {
3387 switch (TREE_CODE (arg))
3388 {
3389 case FUNCTION_DECL:
3390 /* Nested functions are static, even though taking their address will
3391 involve a trampoline as we unnest the nested function and create
3392 the trampoline on the tree level. */
3393 return arg;
3394
3395 case VAR_DECL:
3396 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3397 && ! DECL_THREAD_LOCAL_P (arg)
3398 && ! DECL_DLLIMPORT_P (arg)
3399 ? arg : NULL);
3400
3401 case CONST_DECL:
3402 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3403 ? arg : NULL);
3404
3405 case CONSTRUCTOR:
3406 return TREE_STATIC (arg) ? arg : NULL;
3407
3408 case LABEL_DECL:
3409 case STRING_CST:
3410 return arg;
3411
3412 case COMPONENT_REF:
3413 /* If the thing being referenced is not a field, then it is
3414 something language specific. */
3415 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3416
3417 /* If we are referencing a bitfield, we can't evaluate an
3418 ADDR_EXPR at compile time and so it isn't a constant. */
3419 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3420 return NULL;
3421
3422 return staticp (TREE_OPERAND (arg, 0));
3423
3424 case BIT_FIELD_REF:
3425 return NULL;
3426
3427 case INDIRECT_REF:
3428 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3429
3430 case ARRAY_REF:
3431 case ARRAY_RANGE_REF:
3432 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3433 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3434 return staticp (TREE_OPERAND (arg, 0));
3435 else
3436 return NULL;
3437
3438 case COMPOUND_LITERAL_EXPR:
3439 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3440
3441 default:
3442 return NULL;
3443 }
3444 }
3445
3446 \f
3447
3448
3449 /* Return whether OP is a DECL whose address is function-invariant. */
3450
3451 bool
3452 decl_address_invariant_p (const_tree op)
3453 {
3454 /* The conditions below are slightly less strict than the one in
3455 staticp. */
3456
3457 switch (TREE_CODE (op))
3458 {
3459 case PARM_DECL:
3460 case RESULT_DECL:
3461 case LABEL_DECL:
3462 case FUNCTION_DECL:
3463 return true;
3464
3465 case VAR_DECL:
3466 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3467 || DECL_THREAD_LOCAL_P (op)
3468 || DECL_CONTEXT (op) == current_function_decl
3469 || decl_function_context (op) == current_function_decl)
3470 return true;
3471 break;
3472
3473 case CONST_DECL:
3474 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3475 || decl_function_context (op) == current_function_decl)
3476 return true;
3477 break;
3478
3479 default:
3480 break;
3481 }
3482
3483 return false;
3484 }
3485
3486 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3487
3488 bool
3489 decl_address_ip_invariant_p (const_tree op)
3490 {
3491 /* The conditions below are slightly less strict than the one in
3492 staticp. */
3493
3494 switch (TREE_CODE (op))
3495 {
3496 case LABEL_DECL:
3497 case FUNCTION_DECL:
3498 case STRING_CST:
3499 return true;
3500
3501 case VAR_DECL:
3502 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3503 && !DECL_DLLIMPORT_P (op))
3504 || DECL_THREAD_LOCAL_P (op))
3505 return true;
3506 break;
3507
3508 case CONST_DECL:
3509 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3510 return true;
3511 break;
3512
3513 default:
3514 break;
3515 }
3516
3517 return false;
3518 }
3519
3520
3521 /* Return true if T is function-invariant (internal function, does
3522 not handle arithmetic; that's handled in skip_simple_arithmetic and
3523 tree_invariant_p). */
3524
3525 static bool
3526 tree_invariant_p_1 (tree t)
3527 {
3528 tree op;
3529
3530 if (TREE_CONSTANT (t)
3531 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3532 return true;
3533
3534 switch (TREE_CODE (t))
3535 {
3536 case SAVE_EXPR:
3537 return true;
3538
3539 case ADDR_EXPR:
3540 op = TREE_OPERAND (t, 0);
3541 while (handled_component_p (op))
3542 {
3543 switch (TREE_CODE (op))
3544 {
3545 case ARRAY_REF:
3546 case ARRAY_RANGE_REF:
3547 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3548 || TREE_OPERAND (op, 2) != NULL_TREE
3549 || TREE_OPERAND (op, 3) != NULL_TREE)
3550 return false;
3551 break;
3552
3553 case COMPONENT_REF:
3554 if (TREE_OPERAND (op, 2) != NULL_TREE)
3555 return false;
3556 break;
3557
3558 default:;
3559 }
3560 op = TREE_OPERAND (op, 0);
3561 }
3562
3563 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3564
3565 default:
3566 break;
3567 }
3568
3569 return false;
3570 }
3571
3572 /* Return true if T is function-invariant. */
3573
3574 bool
3575 tree_invariant_p (tree t)
3576 {
3577 tree inner = skip_simple_arithmetic (t);
3578 return tree_invariant_p_1 (inner);
3579 }
3580
3581 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3582 Do this to any expression which may be used in more than one place,
3583 but must be evaluated only once.
3584
3585 Normally, expand_expr would reevaluate the expression each time.
3586 Calling save_expr produces something that is evaluated and recorded
3587 the first time expand_expr is called on it. Subsequent calls to
3588 expand_expr just reuse the recorded value.
3589
3590 The call to expand_expr that generates code that actually computes
3591 the value is the first call *at compile time*. Subsequent calls
3592 *at compile time* generate code to use the saved value.
3593 This produces correct result provided that *at run time* control
3594 always flows through the insns made by the first expand_expr
3595 before reaching the other places where the save_expr was evaluated.
3596 You, the caller of save_expr, must make sure this is so.
3597
3598 Constants, and certain read-only nodes, are returned with no
3599 SAVE_EXPR because that is safe. Expressions containing placeholders
3600 are not touched; see tree.def for an explanation of what these
3601 are used for. */
3602
3603 tree
3604 save_expr (tree expr)
3605 {
3606 tree inner;
3607
3608 /* If the tree evaluates to a constant, then we don't want to hide that
3609 fact (i.e. this allows further folding, and direct checks for constants).
3610 However, a read-only object that has side effects cannot be bypassed.
3611 Since it is no problem to reevaluate literals, we just return the
3612 literal node. */
3613 inner = skip_simple_arithmetic (expr);
3614 if (TREE_CODE (inner) == ERROR_MARK)
3615 return inner;
3616
3617 if (tree_invariant_p_1 (inner))
3618 return expr;
3619
3620 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3621 it means that the size or offset of some field of an object depends on
3622 the value within another field.
3623
3624 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3625 and some variable since it would then need to be both evaluated once and
3626 evaluated more than once. Front-ends must assure this case cannot
3627 happen by surrounding any such subexpressions in their own SAVE_EXPR
3628 and forcing evaluation at the proper time. */
3629 if (contains_placeholder_p (inner))
3630 return expr;
3631
3632 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3633
3634 /* This expression might be placed ahead of a jump to ensure that the
3635 value was computed on both sides of the jump. So make sure it isn't
3636 eliminated as dead. */
3637 TREE_SIDE_EFFECTS (expr) = 1;
3638 return expr;
3639 }
3640
3641 /* Look inside EXPR into any simple arithmetic operations. Return the
3642 outermost non-arithmetic or non-invariant node. */
3643
3644 tree
3645 skip_simple_arithmetic (tree expr)
3646 {
3647 /* We don't care about whether this can be used as an lvalue in this
3648 context. */
3649 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3650 expr = TREE_OPERAND (expr, 0);
3651
3652 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3653 a constant, it will be more efficient to not make another SAVE_EXPR since
3654 it will allow better simplification and GCSE will be able to merge the
3655 computations if they actually occur. */
3656 while (true)
3657 {
3658 if (UNARY_CLASS_P (expr))
3659 expr = TREE_OPERAND (expr, 0);
3660 else if (BINARY_CLASS_P (expr))
3661 {
3662 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3663 expr = TREE_OPERAND (expr, 0);
3664 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3665 expr = TREE_OPERAND (expr, 1);
3666 else
3667 break;
3668 }
3669 else
3670 break;
3671 }
3672
3673 return expr;
3674 }
3675
3676 /* Look inside EXPR into simple arithmetic operations involving constants.
3677 Return the outermost non-arithmetic or non-constant node. */
3678
3679 tree
3680 skip_simple_constant_arithmetic (tree expr)
3681 {
3682 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3683 expr = TREE_OPERAND (expr, 0);
3684
3685 while (true)
3686 {
3687 if (UNARY_CLASS_P (expr))
3688 expr = TREE_OPERAND (expr, 0);
3689 else if (BINARY_CLASS_P (expr))
3690 {
3691 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3692 expr = TREE_OPERAND (expr, 0);
3693 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3694 expr = TREE_OPERAND (expr, 1);
3695 else
3696 break;
3697 }
3698 else
3699 break;
3700 }
3701
3702 return expr;
3703 }
3704
3705 /* Return which tree structure is used by T. */
3706
3707 enum tree_node_structure_enum
3708 tree_node_structure (const_tree t)
3709 {
3710 const enum tree_code code = TREE_CODE (t);
3711 return tree_node_structure_for_code (code);
3712 }
3713
3714 /* Set various status flags when building a CALL_EXPR object T. */
3715
3716 static void
3717 process_call_operands (tree t)
3718 {
3719 bool side_effects = TREE_SIDE_EFFECTS (t);
3720 bool read_only = false;
3721 int i = call_expr_flags (t);
3722
3723 /* Calls have side-effects, except those to const or pure functions. */
3724 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3725 side_effects = true;
3726 /* Propagate TREE_READONLY of arguments for const functions. */
3727 if (i & ECF_CONST)
3728 read_only = true;
3729
3730 if (!side_effects || read_only)
3731 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3732 {
3733 tree op = TREE_OPERAND (t, i);
3734 if (op && TREE_SIDE_EFFECTS (op))
3735 side_effects = true;
3736 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3737 read_only = false;
3738 }
3739
3740 TREE_SIDE_EFFECTS (t) = side_effects;
3741 TREE_READONLY (t) = read_only;
3742 }
3743 \f
3744 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3745 size or offset that depends on a field within a record. */
3746
3747 bool
3748 contains_placeholder_p (const_tree exp)
3749 {
3750 enum tree_code code;
3751
3752 if (!exp)
3753 return 0;
3754
3755 code = TREE_CODE (exp);
3756 if (code == PLACEHOLDER_EXPR)
3757 return 1;
3758
3759 switch (TREE_CODE_CLASS (code))
3760 {
3761 case tcc_reference:
3762 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3763 position computations since they will be converted into a
3764 WITH_RECORD_EXPR involving the reference, which will assume
3765 here will be valid. */
3766 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3767
3768 case tcc_exceptional:
3769 if (code == TREE_LIST)
3770 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3771 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3772 break;
3773
3774 case tcc_unary:
3775 case tcc_binary:
3776 case tcc_comparison:
3777 case tcc_expression:
3778 switch (code)
3779 {
3780 case COMPOUND_EXPR:
3781 /* Ignoring the first operand isn't quite right, but works best. */
3782 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3783
3784 case COND_EXPR:
3785 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3786 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3787 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3788
3789 case SAVE_EXPR:
3790 /* The save_expr function never wraps anything containing
3791 a PLACEHOLDER_EXPR. */
3792 return 0;
3793
3794 default:
3795 break;
3796 }
3797
3798 switch (TREE_CODE_LENGTH (code))
3799 {
3800 case 1:
3801 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3802 case 2:
3803 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3804 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3805 default:
3806 return 0;
3807 }
3808
3809 case tcc_vl_exp:
3810 switch (code)
3811 {
3812 case CALL_EXPR:
3813 {
3814 const_tree arg;
3815 const_call_expr_arg_iterator iter;
3816 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3817 if (CONTAINS_PLACEHOLDER_P (arg))
3818 return 1;
3819 return 0;
3820 }
3821 default:
3822 return 0;
3823 }
3824
3825 default:
3826 return 0;
3827 }
3828 return 0;
3829 }
3830
3831 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3832 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3833 field positions. */
3834
3835 static bool
3836 type_contains_placeholder_1 (const_tree type)
3837 {
3838 /* If the size contains a placeholder or the parent type (component type in
3839 the case of arrays) type involves a placeholder, this type does. */
3840 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3841 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3842 || (!POINTER_TYPE_P (type)
3843 && TREE_TYPE (type)
3844 && type_contains_placeholder_p (TREE_TYPE (type))))
3845 return true;
3846
3847 /* Now do type-specific checks. Note that the last part of the check above
3848 greatly limits what we have to do below. */
3849 switch (TREE_CODE (type))
3850 {
3851 case VOID_TYPE:
3852 case COMPLEX_TYPE:
3853 case ENUMERAL_TYPE:
3854 case BOOLEAN_TYPE:
3855 case POINTER_TYPE:
3856 case OFFSET_TYPE:
3857 case REFERENCE_TYPE:
3858 case METHOD_TYPE:
3859 case FUNCTION_TYPE:
3860 case VECTOR_TYPE:
3861 case NULLPTR_TYPE:
3862 return false;
3863
3864 case INTEGER_TYPE:
3865 case REAL_TYPE:
3866 case FIXED_POINT_TYPE:
3867 /* Here we just check the bounds. */
3868 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3869 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3870
3871 case ARRAY_TYPE:
3872 /* We have already checked the component type above, so just check
3873 the domain type. Flexible array members have a null domain. */
3874 return TYPE_DOMAIN (type) ?
3875 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3876
3877 case RECORD_TYPE:
3878 case UNION_TYPE:
3879 case QUAL_UNION_TYPE:
3880 {
3881 tree field;
3882
3883 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3884 if (TREE_CODE (field) == FIELD_DECL
3885 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3886 || (TREE_CODE (type) == QUAL_UNION_TYPE
3887 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3888 || type_contains_placeholder_p (TREE_TYPE (field))))
3889 return true;
3890
3891 return false;
3892 }
3893
3894 default:
3895 gcc_unreachable ();
3896 }
3897 }
3898
3899 /* Wrapper around above function used to cache its result. */
3900
3901 bool
3902 type_contains_placeholder_p (tree type)
3903 {
3904 bool result;
3905
3906 /* If the contains_placeholder_bits field has been initialized,
3907 then we know the answer. */
3908 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3909 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3910
3911 /* Indicate that we've seen this type node, and the answer is false.
3912 This is what we want to return if we run into recursion via fields. */
3913 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3914
3915 /* Compute the real value. */
3916 result = type_contains_placeholder_1 (type);
3917
3918 /* Store the real value. */
3919 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3920
3921 return result;
3922 }
3923 \f
3924 /* Push tree EXP onto vector QUEUE if it is not already present. */
3925
3926 static void
3927 push_without_duplicates (tree exp, vec<tree> *queue)
3928 {
3929 unsigned int i;
3930 tree iter;
3931
3932 FOR_EACH_VEC_ELT (*queue, i, iter)
3933 if (simple_cst_equal (iter, exp) == 1)
3934 break;
3935
3936 if (!iter)
3937 queue->safe_push (exp);
3938 }
3939
3940 /* Given a tree EXP, find all occurrences of references to fields
3941 in a PLACEHOLDER_EXPR and place them in vector REFS without
3942 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3943 we assume here that EXP contains only arithmetic expressions
3944 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3945 argument list. */
3946
3947 void
3948 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3949 {
3950 enum tree_code code = TREE_CODE (exp);
3951 tree inner;
3952 int i;
3953
3954 /* We handle TREE_LIST and COMPONENT_REF separately. */
3955 if (code == TREE_LIST)
3956 {
3957 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3958 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3959 }
3960 else if (code == COMPONENT_REF)
3961 {
3962 for (inner = TREE_OPERAND (exp, 0);
3963 REFERENCE_CLASS_P (inner);
3964 inner = TREE_OPERAND (inner, 0))
3965 ;
3966
3967 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3968 push_without_duplicates (exp, refs);
3969 else
3970 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3971 }
3972 else
3973 switch (TREE_CODE_CLASS (code))
3974 {
3975 case tcc_constant:
3976 break;
3977
3978 case tcc_declaration:
3979 /* Variables allocated to static storage can stay. */
3980 if (!TREE_STATIC (exp))
3981 push_without_duplicates (exp, refs);
3982 break;
3983
3984 case tcc_expression:
3985 /* This is the pattern built in ada/make_aligning_type. */
3986 if (code == ADDR_EXPR
3987 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3988 {
3989 push_without_duplicates (exp, refs);
3990 break;
3991 }
3992
3993 /* Fall through. */
3994
3995 case tcc_exceptional:
3996 case tcc_unary:
3997 case tcc_binary:
3998 case tcc_comparison:
3999 case tcc_reference:
4000 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4001 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4002 break;
4003
4004 case tcc_vl_exp:
4005 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4006 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4007 break;
4008
4009 default:
4010 gcc_unreachable ();
4011 }
4012 }
4013
4014 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4015 return a tree with all occurrences of references to F in a
4016 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4017 CONST_DECLs. Note that we assume here that EXP contains only
4018 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4019 occurring only in their argument list. */
4020
4021 tree
4022 substitute_in_expr (tree exp, tree f, tree r)
4023 {
4024 enum tree_code code = TREE_CODE (exp);
4025 tree op0, op1, op2, op3;
4026 tree new_tree;
4027
4028 /* We handle TREE_LIST and COMPONENT_REF separately. */
4029 if (code == TREE_LIST)
4030 {
4031 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4032 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4033 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4034 return exp;
4035
4036 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4037 }
4038 else if (code == COMPONENT_REF)
4039 {
4040 tree inner;
4041
4042 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4043 and it is the right field, replace it with R. */
4044 for (inner = TREE_OPERAND (exp, 0);
4045 REFERENCE_CLASS_P (inner);
4046 inner = TREE_OPERAND (inner, 0))
4047 ;
4048
4049 /* The field. */
4050 op1 = TREE_OPERAND (exp, 1);
4051
4052 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4053 return r;
4054
4055 /* If this expression hasn't been completed let, leave it alone. */
4056 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4057 return exp;
4058
4059 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4060 if (op0 == TREE_OPERAND (exp, 0))
4061 return exp;
4062
4063 new_tree
4064 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4065 }
4066 else
4067 switch (TREE_CODE_CLASS (code))
4068 {
4069 case tcc_constant:
4070 return exp;
4071
4072 case tcc_declaration:
4073 if (exp == f)
4074 return r;
4075 else
4076 return exp;
4077
4078 case tcc_expression:
4079 if (exp == f)
4080 return r;
4081
4082 /* Fall through. */
4083
4084 case tcc_exceptional:
4085 case tcc_unary:
4086 case tcc_binary:
4087 case tcc_comparison:
4088 case tcc_reference:
4089 switch (TREE_CODE_LENGTH (code))
4090 {
4091 case 0:
4092 return exp;
4093
4094 case 1:
4095 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4096 if (op0 == TREE_OPERAND (exp, 0))
4097 return exp;
4098
4099 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4100 break;
4101
4102 case 2:
4103 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4104 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4105
4106 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4107 return exp;
4108
4109 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4110 break;
4111
4112 case 3:
4113 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4114 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4115 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4116
4117 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4118 && op2 == TREE_OPERAND (exp, 2))
4119 return exp;
4120
4121 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4122 break;
4123
4124 case 4:
4125 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4126 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4127 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4128 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4129
4130 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4131 && op2 == TREE_OPERAND (exp, 2)
4132 && op3 == TREE_OPERAND (exp, 3))
4133 return exp;
4134
4135 new_tree
4136 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4137 break;
4138
4139 default:
4140 gcc_unreachable ();
4141 }
4142 break;
4143
4144 case tcc_vl_exp:
4145 {
4146 int i;
4147
4148 new_tree = NULL_TREE;
4149
4150 /* If we are trying to replace F with a constant or with another
4151 instance of one of the arguments of the call, inline back
4152 functions which do nothing else than computing a value from
4153 the arguments they are passed. This makes it possible to
4154 fold partially or entirely the replacement expression. */
4155 if (code == CALL_EXPR)
4156 {
4157 bool maybe_inline = false;
4158 if (CONSTANT_CLASS_P (r))
4159 maybe_inline = true;
4160 else
4161 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4162 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4163 {
4164 maybe_inline = true;
4165 break;
4166 }
4167 if (maybe_inline)
4168 {
4169 tree t = maybe_inline_call_in_expr (exp);
4170 if (t)
4171 return SUBSTITUTE_IN_EXPR (t, f, r);
4172 }
4173 }
4174
4175 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4176 {
4177 tree op = TREE_OPERAND (exp, i);
4178 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4179 if (new_op != op)
4180 {
4181 if (!new_tree)
4182 new_tree = copy_node (exp);
4183 TREE_OPERAND (new_tree, i) = new_op;
4184 }
4185 }
4186
4187 if (new_tree)
4188 {
4189 new_tree = fold (new_tree);
4190 if (TREE_CODE (new_tree) == CALL_EXPR)
4191 process_call_operands (new_tree);
4192 }
4193 else
4194 return exp;
4195 }
4196 break;
4197
4198 default:
4199 gcc_unreachable ();
4200 }
4201
4202 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4203
4204 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4205 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4206
4207 return new_tree;
4208 }
4209
4210 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4211 for it within OBJ, a tree that is an object or a chain of references. */
4212
4213 tree
4214 substitute_placeholder_in_expr (tree exp, tree obj)
4215 {
4216 enum tree_code code = TREE_CODE (exp);
4217 tree op0, op1, op2, op3;
4218 tree new_tree;
4219
4220 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4221 in the chain of OBJ. */
4222 if (code == PLACEHOLDER_EXPR)
4223 {
4224 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4225 tree elt;
4226
4227 for (elt = obj; elt != 0;
4228 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4229 || TREE_CODE (elt) == COND_EXPR)
4230 ? TREE_OPERAND (elt, 1)
4231 : (REFERENCE_CLASS_P (elt)
4232 || UNARY_CLASS_P (elt)
4233 || BINARY_CLASS_P (elt)
4234 || VL_EXP_CLASS_P (elt)
4235 || EXPRESSION_CLASS_P (elt))
4236 ? TREE_OPERAND (elt, 0) : 0))
4237 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4238 return elt;
4239
4240 for (elt = obj; elt != 0;
4241 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4242 || TREE_CODE (elt) == COND_EXPR)
4243 ? TREE_OPERAND (elt, 1)
4244 : (REFERENCE_CLASS_P (elt)
4245 || UNARY_CLASS_P (elt)
4246 || BINARY_CLASS_P (elt)
4247 || VL_EXP_CLASS_P (elt)
4248 || EXPRESSION_CLASS_P (elt))
4249 ? TREE_OPERAND (elt, 0) : 0))
4250 if (POINTER_TYPE_P (TREE_TYPE (elt))
4251 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4252 == need_type))
4253 return fold_build1 (INDIRECT_REF, need_type, elt);
4254
4255 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4256 survives until RTL generation, there will be an error. */
4257 return exp;
4258 }
4259
4260 /* TREE_LIST is special because we need to look at TREE_VALUE
4261 and TREE_CHAIN, not TREE_OPERANDS. */
4262 else if (code == TREE_LIST)
4263 {
4264 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4265 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4266 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4267 return exp;
4268
4269 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4270 }
4271 else
4272 switch (TREE_CODE_CLASS (code))
4273 {
4274 case tcc_constant:
4275 case tcc_declaration:
4276 return exp;
4277
4278 case tcc_exceptional:
4279 case tcc_unary:
4280 case tcc_binary:
4281 case tcc_comparison:
4282 case tcc_expression:
4283 case tcc_reference:
4284 case tcc_statement:
4285 switch (TREE_CODE_LENGTH (code))
4286 {
4287 case 0:
4288 return exp;
4289
4290 case 1:
4291 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4292 if (op0 == TREE_OPERAND (exp, 0))
4293 return exp;
4294
4295 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4296 break;
4297
4298 case 2:
4299 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4300 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4301
4302 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4303 return exp;
4304
4305 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4306 break;
4307
4308 case 3:
4309 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4310 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4311 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4312
4313 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4314 && op2 == TREE_OPERAND (exp, 2))
4315 return exp;
4316
4317 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4318 break;
4319
4320 case 4:
4321 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4322 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4323 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4324 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4325
4326 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4327 && op2 == TREE_OPERAND (exp, 2)
4328 && op3 == TREE_OPERAND (exp, 3))
4329 return exp;
4330
4331 new_tree
4332 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4333 break;
4334
4335 default:
4336 gcc_unreachable ();
4337 }
4338 break;
4339
4340 case tcc_vl_exp:
4341 {
4342 int i;
4343
4344 new_tree = NULL_TREE;
4345
4346 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4347 {
4348 tree op = TREE_OPERAND (exp, i);
4349 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4350 if (new_op != op)
4351 {
4352 if (!new_tree)
4353 new_tree = copy_node (exp);
4354 TREE_OPERAND (new_tree, i) = new_op;
4355 }
4356 }
4357
4358 if (new_tree)
4359 {
4360 new_tree = fold (new_tree);
4361 if (TREE_CODE (new_tree) == CALL_EXPR)
4362 process_call_operands (new_tree);
4363 }
4364 else
4365 return exp;
4366 }
4367 break;
4368
4369 default:
4370 gcc_unreachable ();
4371 }
4372
4373 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4374
4375 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4376 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4377
4378 return new_tree;
4379 }
4380 \f
4381
4382 /* Subroutine of stabilize_reference; this is called for subtrees of
4383 references. Any expression with side-effects must be put in a SAVE_EXPR
4384 to ensure that it is only evaluated once.
4385
4386 We don't put SAVE_EXPR nodes around everything, because assigning very
4387 simple expressions to temporaries causes us to miss good opportunities
4388 for optimizations. Among other things, the opportunity to fold in the
4389 addition of a constant into an addressing mode often gets lost, e.g.
4390 "y[i+1] += x;". In general, we take the approach that we should not make
4391 an assignment unless we are forced into it - i.e., that any non-side effect
4392 operator should be allowed, and that cse should take care of coalescing
4393 multiple utterances of the same expression should that prove fruitful. */
4394
4395 static tree
4396 stabilize_reference_1 (tree e)
4397 {
4398 tree result;
4399 enum tree_code code = TREE_CODE (e);
4400
4401 /* We cannot ignore const expressions because it might be a reference
4402 to a const array but whose index contains side-effects. But we can
4403 ignore things that are actual constant or that already have been
4404 handled by this function. */
4405
4406 if (tree_invariant_p (e))
4407 return e;
4408
4409 switch (TREE_CODE_CLASS (code))
4410 {
4411 case tcc_exceptional:
4412 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4413 have side-effects. */
4414 if (code == STATEMENT_LIST)
4415 return save_expr (e);
4416 /* FALLTHRU */
4417 case tcc_type:
4418 case tcc_declaration:
4419 case tcc_comparison:
4420 case tcc_statement:
4421 case tcc_expression:
4422 case tcc_reference:
4423 case tcc_vl_exp:
4424 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4425 so that it will only be evaluated once. */
4426 /* The reference (r) and comparison (<) classes could be handled as
4427 below, but it is generally faster to only evaluate them once. */
4428 if (TREE_SIDE_EFFECTS (e))
4429 return save_expr (e);
4430 return e;
4431
4432 case tcc_constant:
4433 /* Constants need no processing. In fact, we should never reach
4434 here. */
4435 return e;
4436
4437 case tcc_binary:
4438 /* Division is slow and tends to be compiled with jumps,
4439 especially the division by powers of 2 that is often
4440 found inside of an array reference. So do it just once. */
4441 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4442 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4443 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4444 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4445 return save_expr (e);
4446 /* Recursively stabilize each operand. */
4447 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4448 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4449 break;
4450
4451 case tcc_unary:
4452 /* Recursively stabilize each operand. */
4453 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4454 break;
4455
4456 default:
4457 gcc_unreachable ();
4458 }
4459
4460 TREE_TYPE (result) = TREE_TYPE (e);
4461 TREE_READONLY (result) = TREE_READONLY (e);
4462 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4463 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4464
4465 return result;
4466 }
4467
4468 /* Stabilize a reference so that we can use it any number of times
4469 without causing its operands to be evaluated more than once.
4470 Returns the stabilized reference. This works by means of save_expr,
4471 so see the caveats in the comments about save_expr.
4472
4473 Also allows conversion expressions whose operands are references.
4474 Any other kind of expression is returned unchanged. */
4475
4476 tree
4477 stabilize_reference (tree ref)
4478 {
4479 tree result;
4480 enum tree_code code = TREE_CODE (ref);
4481
4482 switch (code)
4483 {
4484 case VAR_DECL:
4485 case PARM_DECL:
4486 case RESULT_DECL:
4487 /* No action is needed in this case. */
4488 return ref;
4489
4490 CASE_CONVERT:
4491 case FLOAT_EXPR:
4492 case FIX_TRUNC_EXPR:
4493 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4494 break;
4495
4496 case INDIRECT_REF:
4497 result = build_nt (INDIRECT_REF,
4498 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4499 break;
4500
4501 case COMPONENT_REF:
4502 result = build_nt (COMPONENT_REF,
4503 stabilize_reference (TREE_OPERAND (ref, 0)),
4504 TREE_OPERAND (ref, 1), NULL_TREE);
4505 break;
4506
4507 case BIT_FIELD_REF:
4508 result = build_nt (BIT_FIELD_REF,
4509 stabilize_reference (TREE_OPERAND (ref, 0)),
4510 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4511 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4512 break;
4513
4514 case ARRAY_REF:
4515 result = build_nt (ARRAY_REF,
4516 stabilize_reference (TREE_OPERAND (ref, 0)),
4517 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4518 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4519 break;
4520
4521 case ARRAY_RANGE_REF:
4522 result = build_nt (ARRAY_RANGE_REF,
4523 stabilize_reference (TREE_OPERAND (ref, 0)),
4524 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4525 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4526 break;
4527
4528 case COMPOUND_EXPR:
4529 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4530 it wouldn't be ignored. This matters when dealing with
4531 volatiles. */
4532 return stabilize_reference_1 (ref);
4533
4534 /* If arg isn't a kind of lvalue we recognize, make no change.
4535 Caller should recognize the error for an invalid lvalue. */
4536 default:
4537 return ref;
4538
4539 case ERROR_MARK:
4540 return error_mark_node;
4541 }
4542
4543 TREE_TYPE (result) = TREE_TYPE (ref);
4544 TREE_READONLY (result) = TREE_READONLY (ref);
4545 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4546 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4547
4548 return result;
4549 }
4550 \f
4551 /* Low-level constructors for expressions. */
4552
4553 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4554 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4555
4556 void
4557 recompute_tree_invariant_for_addr_expr (tree t)
4558 {
4559 tree node;
4560 bool tc = true, se = false;
4561
4562 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4563
4564 /* We started out assuming this address is both invariant and constant, but
4565 does not have side effects. Now go down any handled components and see if
4566 any of them involve offsets that are either non-constant or non-invariant.
4567 Also check for side-effects.
4568
4569 ??? Note that this code makes no attempt to deal with the case where
4570 taking the address of something causes a copy due to misalignment. */
4571
4572 #define UPDATE_FLAGS(NODE) \
4573 do { tree _node = (NODE); \
4574 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4575 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4576
4577 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4578 node = TREE_OPERAND (node, 0))
4579 {
4580 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4581 array reference (probably made temporarily by the G++ front end),
4582 so ignore all the operands. */
4583 if ((TREE_CODE (node) == ARRAY_REF
4584 || TREE_CODE (node) == ARRAY_RANGE_REF)
4585 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4586 {
4587 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4588 if (TREE_OPERAND (node, 2))
4589 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4590 if (TREE_OPERAND (node, 3))
4591 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4592 }
4593 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4594 FIELD_DECL, apparently. The G++ front end can put something else
4595 there, at least temporarily. */
4596 else if (TREE_CODE (node) == COMPONENT_REF
4597 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4598 {
4599 if (TREE_OPERAND (node, 2))
4600 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4601 }
4602 }
4603
4604 node = lang_hooks.expr_to_decl (node, &tc, &se);
4605
4606 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4607 the address, since &(*a)->b is a form of addition. If it's a constant, the
4608 address is constant too. If it's a decl, its address is constant if the
4609 decl is static. Everything else is not constant and, furthermore,
4610 taking the address of a volatile variable is not volatile. */
4611 if (TREE_CODE (node) == INDIRECT_REF
4612 || TREE_CODE (node) == MEM_REF)
4613 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4614 else if (CONSTANT_CLASS_P (node))
4615 ;
4616 else if (DECL_P (node))
4617 tc &= (staticp (node) != NULL_TREE);
4618 else
4619 {
4620 tc = false;
4621 se |= TREE_SIDE_EFFECTS (node);
4622 }
4623
4624
4625 TREE_CONSTANT (t) = tc;
4626 TREE_SIDE_EFFECTS (t) = se;
4627 #undef UPDATE_FLAGS
4628 }
4629
4630 /* Build an expression of code CODE, data type TYPE, and operands as
4631 specified. Expressions and reference nodes can be created this way.
4632 Constants, decls, types and misc nodes cannot be.
4633
4634 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4635 enough for all extant tree codes. */
4636
4637 tree
4638 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4639 {
4640 tree t;
4641
4642 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4643
4644 t = make_node (code PASS_MEM_STAT);
4645 TREE_TYPE (t) = tt;
4646
4647 return t;
4648 }
4649
4650 tree
4651 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4652 {
4653 int length = sizeof (struct tree_exp);
4654 tree t;
4655
4656 record_node_allocation_statistics (code, length);
4657
4658 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4659
4660 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4661
4662 memset (t, 0, sizeof (struct tree_common));
4663
4664 TREE_SET_CODE (t, code);
4665
4666 TREE_TYPE (t) = type;
4667 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4668 TREE_OPERAND (t, 0) = node;
4669 if (node && !TYPE_P (node))
4670 {
4671 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4672 TREE_READONLY (t) = TREE_READONLY (node);
4673 }
4674
4675 if (TREE_CODE_CLASS (code) == tcc_statement)
4676 {
4677 if (code != DEBUG_BEGIN_STMT)
4678 TREE_SIDE_EFFECTS (t) = 1;
4679 }
4680 else switch (code)
4681 {
4682 case VA_ARG_EXPR:
4683 /* All of these have side-effects, no matter what their
4684 operands are. */
4685 TREE_SIDE_EFFECTS (t) = 1;
4686 TREE_READONLY (t) = 0;
4687 break;
4688
4689 case INDIRECT_REF:
4690 /* Whether a dereference is readonly has nothing to do with whether
4691 its operand is readonly. */
4692 TREE_READONLY (t) = 0;
4693 break;
4694
4695 case ADDR_EXPR:
4696 if (node)
4697 recompute_tree_invariant_for_addr_expr (t);
4698 break;
4699
4700 default:
4701 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4702 && node && !TYPE_P (node)
4703 && TREE_CONSTANT (node))
4704 TREE_CONSTANT (t) = 1;
4705 if (TREE_CODE_CLASS (code) == tcc_reference
4706 && node && TREE_THIS_VOLATILE (node))
4707 TREE_THIS_VOLATILE (t) = 1;
4708 break;
4709 }
4710
4711 return t;
4712 }
4713
4714 #define PROCESS_ARG(N) \
4715 do { \
4716 TREE_OPERAND (t, N) = arg##N; \
4717 if (arg##N &&!TYPE_P (arg##N)) \
4718 { \
4719 if (TREE_SIDE_EFFECTS (arg##N)) \
4720 side_effects = 1; \
4721 if (!TREE_READONLY (arg##N) \
4722 && !CONSTANT_CLASS_P (arg##N)) \
4723 (void) (read_only = 0); \
4724 if (!TREE_CONSTANT (arg##N)) \
4725 (void) (constant = 0); \
4726 } \
4727 } while (0)
4728
4729 tree
4730 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4731 {
4732 bool constant, read_only, side_effects, div_by_zero;
4733 tree t;
4734
4735 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4736
4737 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4738 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4739 /* When sizetype precision doesn't match that of pointers
4740 we need to be able to build explicit extensions or truncations
4741 of the offset argument. */
4742 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4743 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4744 && TREE_CODE (arg1) == INTEGER_CST);
4745
4746 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4747 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4748 && ptrofftype_p (TREE_TYPE (arg1)));
4749
4750 t = make_node (code PASS_MEM_STAT);
4751 TREE_TYPE (t) = tt;
4752
4753 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4754 result based on those same flags for the arguments. But if the
4755 arguments aren't really even `tree' expressions, we shouldn't be trying
4756 to do this. */
4757
4758 /* Expressions without side effects may be constant if their
4759 arguments are as well. */
4760 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4761 || TREE_CODE_CLASS (code) == tcc_binary);
4762 read_only = 1;
4763 side_effects = TREE_SIDE_EFFECTS (t);
4764
4765 switch (code)
4766 {
4767 case TRUNC_DIV_EXPR:
4768 case CEIL_DIV_EXPR:
4769 case FLOOR_DIV_EXPR:
4770 case ROUND_DIV_EXPR:
4771 case EXACT_DIV_EXPR:
4772 case CEIL_MOD_EXPR:
4773 case FLOOR_MOD_EXPR:
4774 case ROUND_MOD_EXPR:
4775 case TRUNC_MOD_EXPR:
4776 div_by_zero = integer_zerop (arg1);
4777 break;
4778 default:
4779 div_by_zero = false;
4780 }
4781
4782 PROCESS_ARG (0);
4783 PROCESS_ARG (1);
4784
4785 TREE_SIDE_EFFECTS (t) = side_effects;
4786 if (code == MEM_REF)
4787 {
4788 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4789 {
4790 tree o = TREE_OPERAND (arg0, 0);
4791 TREE_READONLY (t) = TREE_READONLY (o);
4792 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4793 }
4794 }
4795 else
4796 {
4797 TREE_READONLY (t) = read_only;
4798 /* Don't mark X / 0 as constant. */
4799 TREE_CONSTANT (t) = constant && !div_by_zero;
4800 TREE_THIS_VOLATILE (t)
4801 = (TREE_CODE_CLASS (code) == tcc_reference
4802 && arg0 && TREE_THIS_VOLATILE (arg0));
4803 }
4804
4805 return t;
4806 }
4807
4808
4809 tree
4810 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4811 tree arg2 MEM_STAT_DECL)
4812 {
4813 bool constant, read_only, side_effects;
4814 tree t;
4815
4816 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4817 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4818
4819 t = make_node (code PASS_MEM_STAT);
4820 TREE_TYPE (t) = tt;
4821
4822 read_only = 1;
4823
4824 /* As a special exception, if COND_EXPR has NULL branches, we
4825 assume that it is a gimple statement and always consider
4826 it to have side effects. */
4827 if (code == COND_EXPR
4828 && tt == void_type_node
4829 && arg1 == NULL_TREE
4830 && arg2 == NULL_TREE)
4831 side_effects = true;
4832 else
4833 side_effects = TREE_SIDE_EFFECTS (t);
4834
4835 PROCESS_ARG (0);
4836 PROCESS_ARG (1);
4837 PROCESS_ARG (2);
4838
4839 if (code == COND_EXPR)
4840 TREE_READONLY (t) = read_only;
4841
4842 TREE_SIDE_EFFECTS (t) = side_effects;
4843 TREE_THIS_VOLATILE (t)
4844 = (TREE_CODE_CLASS (code) == tcc_reference
4845 && arg0 && TREE_THIS_VOLATILE (arg0));
4846
4847 return t;
4848 }
4849
4850 tree
4851 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4852 tree arg2, tree arg3 MEM_STAT_DECL)
4853 {
4854 bool constant, read_only, side_effects;
4855 tree t;
4856
4857 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4858
4859 t = make_node (code PASS_MEM_STAT);
4860 TREE_TYPE (t) = tt;
4861
4862 side_effects = TREE_SIDE_EFFECTS (t);
4863
4864 PROCESS_ARG (0);
4865 PROCESS_ARG (1);
4866 PROCESS_ARG (2);
4867 PROCESS_ARG (3);
4868
4869 TREE_SIDE_EFFECTS (t) = side_effects;
4870 TREE_THIS_VOLATILE (t)
4871 = (TREE_CODE_CLASS (code) == tcc_reference
4872 && arg0 && TREE_THIS_VOLATILE (arg0));
4873
4874 return t;
4875 }
4876
4877 tree
4878 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4879 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4880 {
4881 bool constant, read_only, side_effects;
4882 tree t;
4883
4884 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4885
4886 t = make_node (code PASS_MEM_STAT);
4887 TREE_TYPE (t) = tt;
4888
4889 side_effects = TREE_SIDE_EFFECTS (t);
4890
4891 PROCESS_ARG (0);
4892 PROCESS_ARG (1);
4893 PROCESS_ARG (2);
4894 PROCESS_ARG (3);
4895 PROCESS_ARG (4);
4896
4897 TREE_SIDE_EFFECTS (t) = side_effects;
4898 if (code == TARGET_MEM_REF)
4899 {
4900 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4901 {
4902 tree o = TREE_OPERAND (arg0, 0);
4903 TREE_READONLY (t) = TREE_READONLY (o);
4904 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4905 }
4906 }
4907 else
4908 TREE_THIS_VOLATILE (t)
4909 = (TREE_CODE_CLASS (code) == tcc_reference
4910 && arg0 && TREE_THIS_VOLATILE (arg0));
4911
4912 return t;
4913 }
4914
4915 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4916 on the pointer PTR. */
4917
4918 tree
4919 build_simple_mem_ref_loc (location_t loc, tree ptr)
4920 {
4921 poly_int64 offset = 0;
4922 tree ptype = TREE_TYPE (ptr);
4923 tree tem;
4924 /* For convenience allow addresses that collapse to a simple base
4925 and offset. */
4926 if (TREE_CODE (ptr) == ADDR_EXPR
4927 && (handled_component_p (TREE_OPERAND (ptr, 0))
4928 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4929 {
4930 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4931 gcc_assert (ptr);
4932 if (TREE_CODE (ptr) == MEM_REF)
4933 {
4934 offset += mem_ref_offset (ptr).force_shwi ();
4935 ptr = TREE_OPERAND (ptr, 0);
4936 }
4937 else
4938 ptr = build_fold_addr_expr (ptr);
4939 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4940 }
4941 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4942 ptr, build_int_cst (ptype, offset));
4943 SET_EXPR_LOCATION (tem, loc);
4944 return tem;
4945 }
4946
4947 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4948
4949 poly_offset_int
4950 mem_ref_offset (const_tree t)
4951 {
4952 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4953 SIGNED);
4954 }
4955
4956 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4957 offsetted by OFFSET units. */
4958
4959 tree
4960 build_invariant_address (tree type, tree base, poly_int64 offset)
4961 {
4962 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4963 build_fold_addr_expr (base),
4964 build_int_cst (ptr_type_node, offset));
4965 tree addr = build1 (ADDR_EXPR, type, ref);
4966 recompute_tree_invariant_for_addr_expr (addr);
4967 return addr;
4968 }
4969
4970 /* Similar except don't specify the TREE_TYPE
4971 and leave the TREE_SIDE_EFFECTS as 0.
4972 It is permissible for arguments to be null,
4973 or even garbage if their values do not matter. */
4974
4975 tree
4976 build_nt (enum tree_code code, ...)
4977 {
4978 tree t;
4979 int length;
4980 int i;
4981 va_list p;
4982
4983 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4984
4985 va_start (p, code);
4986
4987 t = make_node (code);
4988 length = TREE_CODE_LENGTH (code);
4989
4990 for (i = 0; i < length; i++)
4991 TREE_OPERAND (t, i) = va_arg (p, tree);
4992
4993 va_end (p);
4994 return t;
4995 }
4996
4997 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4998 tree vec. */
4999
5000 tree
5001 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5002 {
5003 tree ret, t;
5004 unsigned int ix;
5005
5006 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5007 CALL_EXPR_FN (ret) = fn;
5008 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5009 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5010 CALL_EXPR_ARG (ret, ix) = t;
5011 return ret;
5012 }
5013 \f
5014 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5015 and data type TYPE.
5016 We do NOT enter this node in any sort of symbol table.
5017
5018 LOC is the location of the decl.
5019
5020 layout_decl is used to set up the decl's storage layout.
5021 Other slots are initialized to 0 or null pointers. */
5022
5023 tree
5024 build_decl (location_t loc, enum tree_code code, tree name,
5025 tree type MEM_STAT_DECL)
5026 {
5027 tree t;
5028
5029 t = make_node (code PASS_MEM_STAT);
5030 DECL_SOURCE_LOCATION (t) = loc;
5031
5032 /* if (type == error_mark_node)
5033 type = integer_type_node; */
5034 /* That is not done, deliberately, so that having error_mark_node
5035 as the type can suppress useless errors in the use of this variable. */
5036
5037 DECL_NAME (t) = name;
5038 TREE_TYPE (t) = type;
5039
5040 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5041 layout_decl (t, 0);
5042
5043 return t;
5044 }
5045
5046 /* Builds and returns function declaration with NAME and TYPE. */
5047
5048 tree
5049 build_fn_decl (const char *name, tree type)
5050 {
5051 tree id = get_identifier (name);
5052 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5053
5054 DECL_EXTERNAL (decl) = 1;
5055 TREE_PUBLIC (decl) = 1;
5056 DECL_ARTIFICIAL (decl) = 1;
5057 TREE_NOTHROW (decl) = 1;
5058
5059 return decl;
5060 }
5061
5062 vec<tree, va_gc> *all_translation_units;
5063
5064 /* Builds a new translation-unit decl with name NAME, queues it in the
5065 global list of translation-unit decls and returns it. */
5066
5067 tree
5068 build_translation_unit_decl (tree name)
5069 {
5070 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5071 name, NULL_TREE);
5072 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5073 vec_safe_push (all_translation_units, tu);
5074 return tu;
5075 }
5076
5077 \f
5078 /* BLOCK nodes are used to represent the structure of binding contours
5079 and declarations, once those contours have been exited and their contents
5080 compiled. This information is used for outputting debugging info. */
5081
5082 tree
5083 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5084 {
5085 tree block = make_node (BLOCK);
5086
5087 BLOCK_VARS (block) = vars;
5088 BLOCK_SUBBLOCKS (block) = subblocks;
5089 BLOCK_SUPERCONTEXT (block) = supercontext;
5090 BLOCK_CHAIN (block) = chain;
5091 return block;
5092 }
5093
5094 \f
5095 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5096
5097 LOC is the location to use in tree T. */
5098
5099 void
5100 protected_set_expr_location (tree t, location_t loc)
5101 {
5102 if (CAN_HAVE_LOCATION_P (t))
5103 SET_EXPR_LOCATION (t, loc);
5104 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5105 {
5106 t = expr_single (t);
5107 if (t && CAN_HAVE_LOCATION_P (t))
5108 SET_EXPR_LOCATION (t, loc);
5109 }
5110 }
5111
5112 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5113 UNKNOWN_LOCATION. */
5114
5115 void
5116 protected_set_expr_location_if_unset (tree t, location_t loc)
5117 {
5118 t = expr_single (t);
5119 if (t && !EXPR_HAS_LOCATION (t))
5120 protected_set_expr_location (t, loc);
5121 }
5122
5123 /* Data used when collecting DECLs and TYPEs for language data removal. */
5124
5125 class free_lang_data_d
5126 {
5127 public:
5128 free_lang_data_d () : decls (100), types (100) {}
5129
5130 /* Worklist to avoid excessive recursion. */
5131 auto_vec<tree> worklist;
5132
5133 /* Set of traversed objects. Used to avoid duplicate visits. */
5134 hash_set<tree> pset;
5135
5136 /* Array of symbols to process with free_lang_data_in_decl. */
5137 auto_vec<tree> decls;
5138
5139 /* Array of types to process with free_lang_data_in_type. */
5140 auto_vec<tree> types;
5141 };
5142
5143
5144 /* Add type or decl T to one of the list of tree nodes that need their
5145 language data removed. The lists are held inside FLD. */
5146
5147 static void
5148 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5149 {
5150 if (DECL_P (t))
5151 fld->decls.safe_push (t);
5152 else if (TYPE_P (t))
5153 fld->types.safe_push (t);
5154 else
5155 gcc_unreachable ();
5156 }
5157
5158 /* Push tree node T into FLD->WORKLIST. */
5159
5160 static inline void
5161 fld_worklist_push (tree t, class free_lang_data_d *fld)
5162 {
5163 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5164 fld->worklist.safe_push ((t));
5165 }
5166
5167
5168 \f
5169 /* Return simplified TYPE_NAME of TYPE. */
5170
5171 static tree
5172 fld_simplified_type_name (tree type)
5173 {
5174 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5175 return TYPE_NAME (type);
5176 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5177 TYPE_DECL if the type doesn't have linkage.
5178 this must match fld_ */
5179 if (type != TYPE_MAIN_VARIANT (type)
5180 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5181 && (TREE_CODE (type) != RECORD_TYPE
5182 || !TYPE_BINFO (type)
5183 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5184 return DECL_NAME (TYPE_NAME (type));
5185 return TYPE_NAME (type);
5186 }
5187
5188 /* Do same comparsion as check_qualified_type skipping lang part of type
5189 and be more permissive about type names: we only care that names are
5190 same (for diagnostics) and that ODR names are the same.
5191 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5192
5193 static bool
5194 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5195 {
5196 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5197 /* We want to match incomplete variants with complete types.
5198 In this case we need to ignore alignment. */
5199 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5200 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5201 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5202 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5203 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5204 TYPE_ATTRIBUTES (v))
5205 || (inner_type && TREE_TYPE (v) != inner_type))
5206 return false;
5207
5208 return true;
5209 }
5210
5211 /* Find variant of FIRST that match T and create new one if necessary.
5212 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5213
5214 static tree
5215 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5216 tree inner_type = NULL)
5217 {
5218 if (first == TYPE_MAIN_VARIANT (t))
5219 return t;
5220 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5221 if (fld_type_variant_equal_p (t, v, inner_type))
5222 return v;
5223 tree v = build_variant_type_copy (first);
5224 TYPE_READONLY (v) = TYPE_READONLY (t);
5225 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5226 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5227 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5228 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5229 TYPE_NAME (v) = TYPE_NAME (t);
5230 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5231 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5232 /* Variants of incomplete types should have alignment
5233 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5234 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5235 {
5236 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5237 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5238 }
5239 if (inner_type)
5240 TREE_TYPE (v) = inner_type;
5241 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5242 if (!fld->pset.add (v))
5243 add_tree_to_fld_list (v, fld);
5244 return v;
5245 }
5246
5247 /* Map complete types to incomplete types. */
5248
5249 static hash_map<tree, tree> *fld_incomplete_types;
5250
5251 /* Map types to simplified types. */
5252
5253 static hash_map<tree, tree> *fld_simplified_types;
5254
5255 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5256 use MAP to prevent duplicates. */
5257
5258 static tree
5259 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5260 class free_lang_data_d *fld)
5261 {
5262 if (TREE_TYPE (t) == t2)
5263 return t;
5264
5265 if (TYPE_MAIN_VARIANT (t) != t)
5266 {
5267 return fld_type_variant
5268 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5269 TYPE_MAIN_VARIANT (t2), map, fld),
5270 t, fld, t2);
5271 }
5272
5273 bool existed;
5274 tree &array
5275 = map->get_or_insert (t, &existed);
5276 if (!existed)
5277 {
5278 array
5279 = build_array_type_1 (t2, TYPE_DOMAIN (t), TYPE_TYPELESS_STORAGE (t),
5280 false, false);
5281 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5282 if (!fld->pset.add (array))
5283 add_tree_to_fld_list (array, fld);
5284 }
5285 return array;
5286 }
5287
5288 /* Return CTX after removal of contexts that are not relevant */
5289
5290 static tree
5291 fld_decl_context (tree ctx)
5292 {
5293 /* Variably modified types are needed for tree_is_indexable to decide
5294 whether the type needs to go to local or global section.
5295 This code is semi-broken but for now it is easiest to keep contexts
5296 as expected. */
5297 if (ctx && TYPE_P (ctx)
5298 && !variably_modified_type_p (ctx, NULL_TREE))
5299 {
5300 while (ctx && TYPE_P (ctx))
5301 ctx = TYPE_CONTEXT (ctx);
5302 }
5303 return ctx;
5304 }
5305
5306 /* For T being aggregate type try to turn it into a incomplete variant.
5307 Return T if no simplification is possible. */
5308
5309 static tree
5310 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5311 {
5312 if (!t)
5313 return NULL;
5314 if (POINTER_TYPE_P (t))
5315 {
5316 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5317 if (t2 != TREE_TYPE (t))
5318 {
5319 tree first;
5320 if (TREE_CODE (t) == POINTER_TYPE)
5321 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5322 TYPE_REF_CAN_ALIAS_ALL (t));
5323 else
5324 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5325 TYPE_REF_CAN_ALIAS_ALL (t));
5326 gcc_assert (TYPE_CANONICAL (t2) != t2
5327 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5328 if (!fld->pset.add (first))
5329 add_tree_to_fld_list (first, fld);
5330 return fld_type_variant (first, t, fld);
5331 }
5332 return t;
5333 }
5334 if (TREE_CODE (t) == ARRAY_TYPE)
5335 return fld_process_array_type (t,
5336 fld_incomplete_type_of (TREE_TYPE (t), fld),
5337 fld_incomplete_types, fld);
5338 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5339 || !COMPLETE_TYPE_P (t))
5340 return t;
5341 if (TYPE_MAIN_VARIANT (t) == t)
5342 {
5343 bool existed;
5344 tree &copy
5345 = fld_incomplete_types->get_or_insert (t, &existed);
5346
5347 if (!existed)
5348 {
5349 copy = build_distinct_type_copy (t);
5350
5351 /* It is possible that type was not seen by free_lang_data yet. */
5352 if (!fld->pset.add (copy))
5353 add_tree_to_fld_list (copy, fld);
5354 TYPE_SIZE (copy) = NULL;
5355 TYPE_USER_ALIGN (copy) = 0;
5356 TYPE_SIZE_UNIT (copy) = NULL;
5357 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5358 TREE_ADDRESSABLE (copy) = 0;
5359 if (AGGREGATE_TYPE_P (t))
5360 {
5361 SET_TYPE_MODE (copy, VOIDmode);
5362 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5363 TYPE_TYPELESS_STORAGE (copy) = 0;
5364 TYPE_FIELDS (copy) = NULL;
5365 TYPE_BINFO (copy) = NULL;
5366 TYPE_FINAL_P (copy) = 0;
5367 TYPE_EMPTY_P (copy) = 0;
5368 }
5369 else
5370 {
5371 TYPE_VALUES (copy) = NULL;
5372 ENUM_IS_OPAQUE (copy) = 0;
5373 ENUM_IS_SCOPED (copy) = 0;
5374 }
5375
5376 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5377 This is needed for ODR violation warnings to come out right (we
5378 want duplicate TYPE_DECLs whenever the type is duplicated because
5379 of ODR violation. Because lang data in the TYPE_DECL may not
5380 have been freed yet, rebuild it from scratch and copy relevant
5381 fields. */
5382 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5383 tree name = TYPE_NAME (copy);
5384
5385 if (name && TREE_CODE (name) == TYPE_DECL)
5386 {
5387 gcc_checking_assert (TREE_TYPE (name) == t);
5388 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5389 DECL_NAME (name), copy);
5390 if (DECL_ASSEMBLER_NAME_SET_P (name))
5391 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5392 SET_DECL_ALIGN (name2, 0);
5393 DECL_CONTEXT (name2) = fld_decl_context
5394 (DECL_CONTEXT (name));
5395 TYPE_NAME (copy) = name2;
5396 }
5397 }
5398 return copy;
5399 }
5400 return (fld_type_variant
5401 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5402 }
5403
5404 /* Simplify type T for scenarios where we do not need complete pointer
5405 types. */
5406
5407 static tree
5408 fld_simplified_type (tree t, class free_lang_data_d *fld)
5409 {
5410 if (!t)
5411 return t;
5412 if (POINTER_TYPE_P (t))
5413 return fld_incomplete_type_of (t, fld);
5414 /* FIXME: This triggers verification error, see PR88140. */
5415 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5416 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5417 fld_simplified_types, fld);
5418 return t;
5419 }
5420
5421 /* Reset the expression *EXPR_P, a size or position.
5422
5423 ??? We could reset all non-constant sizes or positions. But it's cheap
5424 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5425
5426 We need to reset self-referential sizes or positions because they cannot
5427 be gimplified and thus can contain a CALL_EXPR after the gimplification
5428 is finished, which will run afoul of LTO streaming. And they need to be
5429 reset to something essentially dummy but not constant, so as to preserve
5430 the properties of the object they are attached to. */
5431
5432 static inline void
5433 free_lang_data_in_one_sizepos (tree *expr_p)
5434 {
5435 tree expr = *expr_p;
5436 if (CONTAINS_PLACEHOLDER_P (expr))
5437 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5438 }
5439
5440
5441 /* Reset all the fields in a binfo node BINFO. We only keep
5442 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5443
5444 static void
5445 free_lang_data_in_binfo (tree binfo)
5446 {
5447 unsigned i;
5448 tree t;
5449
5450 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5451
5452 BINFO_VIRTUALS (binfo) = NULL_TREE;
5453 BINFO_BASE_ACCESSES (binfo) = NULL;
5454 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5455 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5456 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5457 TREE_PUBLIC (binfo) = 0;
5458
5459 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5460 free_lang_data_in_binfo (t);
5461 }
5462
5463
5464 /* Reset all language specific information still present in TYPE. */
5465
5466 static void
5467 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5468 {
5469 gcc_assert (TYPE_P (type));
5470
5471 /* Give the FE a chance to remove its own data first. */
5472 lang_hooks.free_lang_data (type);
5473
5474 TREE_LANG_FLAG_0 (type) = 0;
5475 TREE_LANG_FLAG_1 (type) = 0;
5476 TREE_LANG_FLAG_2 (type) = 0;
5477 TREE_LANG_FLAG_3 (type) = 0;
5478 TREE_LANG_FLAG_4 (type) = 0;
5479 TREE_LANG_FLAG_5 (type) = 0;
5480 TREE_LANG_FLAG_6 (type) = 0;
5481
5482 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5483
5484 /* Purge non-marked variants from the variants chain, so that they
5485 don't reappear in the IL after free_lang_data. */
5486 while (TYPE_NEXT_VARIANT (type)
5487 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5488 {
5489 tree t = TYPE_NEXT_VARIANT (type);
5490 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5491 /* Turn the removed types into distinct types. */
5492 TYPE_MAIN_VARIANT (t) = t;
5493 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5494 }
5495
5496 if (TREE_CODE (type) == FUNCTION_TYPE)
5497 {
5498 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5499 /* Remove the const and volatile qualifiers from arguments. The
5500 C++ front end removes them, but the C front end does not,
5501 leading to false ODR violation errors when merging two
5502 instances of the same function signature compiled by
5503 different front ends. */
5504 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5505 {
5506 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5507 tree arg_type = TREE_VALUE (p);
5508
5509 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5510 {
5511 int quals = TYPE_QUALS (arg_type)
5512 & ~TYPE_QUAL_CONST
5513 & ~TYPE_QUAL_VOLATILE;
5514 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5515 if (!fld->pset.add (TREE_VALUE (p)))
5516 free_lang_data_in_type (TREE_VALUE (p), fld);
5517 }
5518 /* C++ FE uses TREE_PURPOSE to store initial values. */
5519 TREE_PURPOSE (p) = NULL;
5520 }
5521 }
5522 else if (TREE_CODE (type) == METHOD_TYPE)
5523 {
5524 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5525 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5526 {
5527 /* C++ FE uses TREE_PURPOSE to store initial values. */
5528 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5529 TREE_PURPOSE (p) = NULL;
5530 }
5531 }
5532 else if (RECORD_OR_UNION_TYPE_P (type))
5533 {
5534 /* Remove members that are not FIELD_DECLs from the field list
5535 of an aggregate. These occur in C++. */
5536 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5537 if (TREE_CODE (member) == FIELD_DECL)
5538 prev = &DECL_CHAIN (member);
5539 else
5540 *prev = DECL_CHAIN (member);
5541
5542 TYPE_VFIELD (type) = NULL_TREE;
5543
5544 if (TYPE_BINFO (type))
5545 {
5546 free_lang_data_in_binfo (TYPE_BINFO (type));
5547 /* We need to preserve link to bases and virtual table for all
5548 polymorphic types to make devirtualization machinery working. */
5549 if (!BINFO_VTABLE (TYPE_BINFO (type)))
5550 TYPE_BINFO (type) = NULL;
5551 }
5552 }
5553 else if (INTEGRAL_TYPE_P (type)
5554 || SCALAR_FLOAT_TYPE_P (type)
5555 || FIXED_POINT_TYPE_P (type))
5556 {
5557 if (TREE_CODE (type) == ENUMERAL_TYPE)
5558 {
5559 ENUM_IS_OPAQUE (type) = 0;
5560 ENUM_IS_SCOPED (type) = 0;
5561 /* Type values are used only for C++ ODR checking. Drop them
5562 for all type variants and non-ODR types.
5563 For ODR types the data is freed in free_odr_warning_data. */
5564 if (TYPE_MAIN_VARIANT (type) != type
5565 || !type_with_linkage_p (type))
5566 TYPE_VALUES (type) = NULL;
5567 else
5568 /* Simplify representation by recording only values rather
5569 than const decls. */
5570 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5571 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5572 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5573 }
5574 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5575 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5576 }
5577
5578 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5579
5580 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5581 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5582
5583 if (TYPE_CONTEXT (type)
5584 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5585 {
5586 tree ctx = TYPE_CONTEXT (type);
5587 do
5588 {
5589 ctx = BLOCK_SUPERCONTEXT (ctx);
5590 }
5591 while (ctx && TREE_CODE (ctx) == BLOCK);
5592 TYPE_CONTEXT (type) = ctx;
5593 }
5594
5595 TYPE_STUB_DECL (type) = NULL;
5596 TYPE_NAME (type) = fld_simplified_type_name (type);
5597 }
5598
5599
5600 /* Return true if DECL may need an assembler name to be set. */
5601
5602 static inline bool
5603 need_assembler_name_p (tree decl)
5604 {
5605 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5606 Rule merging. This makes type_odr_p to return true on those types during
5607 LTO and by comparing the mangled name, we can say what types are intended
5608 to be equivalent across compilation unit.
5609
5610 We do not store names of type_in_anonymous_namespace_p.
5611
5612 Record, union and enumeration type have linkage that allows use
5613 to check type_in_anonymous_namespace_p. We do not mangle compound types
5614 that always can be compared structurally.
5615
5616 Similarly for builtin types, we compare properties of their main variant.
5617 A special case are integer types where mangling do make differences
5618 between char/signed char/unsigned char etc. Storing name for these makes
5619 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5620 See cp/mangle.c:write_builtin_type for details. */
5621
5622 if (TREE_CODE (decl) == TYPE_DECL)
5623 {
5624 if (DECL_NAME (decl)
5625 && decl == TYPE_NAME (TREE_TYPE (decl))
5626 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5627 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5628 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5629 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5630 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5631 && (type_with_linkage_p (TREE_TYPE (decl))
5632 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5633 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5634 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5635 return false;
5636 }
5637 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5638 if (!VAR_OR_FUNCTION_DECL_P (decl))
5639 return false;
5640
5641 /* If DECL already has its assembler name set, it does not need a
5642 new one. */
5643 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5644 || DECL_ASSEMBLER_NAME_SET_P (decl))
5645 return false;
5646
5647 /* Abstract decls do not need an assembler name. */
5648 if (DECL_ABSTRACT_P (decl))
5649 return false;
5650
5651 /* For VAR_DECLs, only static, public and external symbols need an
5652 assembler name. */
5653 if (VAR_P (decl)
5654 && !TREE_STATIC (decl)
5655 && !TREE_PUBLIC (decl)
5656 && !DECL_EXTERNAL (decl))
5657 return false;
5658
5659 if (TREE_CODE (decl) == FUNCTION_DECL)
5660 {
5661 /* Do not set assembler name on builtins. Allow RTL expansion to
5662 decide whether to expand inline or via a regular call. */
5663 if (fndecl_built_in_p (decl)
5664 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5665 return false;
5666
5667 /* Functions represented in the callgraph need an assembler name. */
5668 if (cgraph_node::get (decl) != NULL)
5669 return true;
5670
5671 /* Unused and not public functions don't need an assembler name. */
5672 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5673 return false;
5674 }
5675
5676 return true;
5677 }
5678
5679
5680 /* Reset all language specific information still present in symbol
5681 DECL. */
5682
5683 static void
5684 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5685 {
5686 gcc_assert (DECL_P (decl));
5687
5688 /* Give the FE a chance to remove its own data first. */
5689 lang_hooks.free_lang_data (decl);
5690
5691 TREE_LANG_FLAG_0 (decl) = 0;
5692 TREE_LANG_FLAG_1 (decl) = 0;
5693 TREE_LANG_FLAG_2 (decl) = 0;
5694 TREE_LANG_FLAG_3 (decl) = 0;
5695 TREE_LANG_FLAG_4 (decl) = 0;
5696 TREE_LANG_FLAG_5 (decl) = 0;
5697 TREE_LANG_FLAG_6 (decl) = 0;
5698
5699 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5700 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5701 if (TREE_CODE (decl) == FIELD_DECL)
5702 {
5703 DECL_FCONTEXT (decl) = NULL;
5704 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5705 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5706 DECL_QUALIFIER (decl) = NULL_TREE;
5707 }
5708
5709 if (TREE_CODE (decl) == FUNCTION_DECL)
5710 {
5711 struct cgraph_node *node;
5712 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5713 the address may be taken in other unit, so this flag has no practical
5714 use for middle-end.
5715
5716 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5717 for public objects that indeed cannot be adressed, but it is not
5718 the case. Set the flag to true so we do not get merge failures for
5719 i.e. virtual tables between units that take address of it and
5720 units that don't. */
5721 if (TREE_PUBLIC (decl))
5722 TREE_ADDRESSABLE (decl) = true;
5723 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5724 if (!(node = cgraph_node::get (decl))
5725 || (!node->definition && !node->clones))
5726 {
5727 if (node)
5728 node->release_body ();
5729 else
5730 {
5731 release_function_body (decl);
5732 DECL_ARGUMENTS (decl) = NULL;
5733 DECL_RESULT (decl) = NULL;
5734 DECL_INITIAL (decl) = error_mark_node;
5735 }
5736 }
5737 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5738 {
5739 tree t;
5740
5741 /* If DECL has a gimple body, then the context for its
5742 arguments must be DECL. Otherwise, it doesn't really
5743 matter, as we will not be emitting any code for DECL. In
5744 general, there may be other instances of DECL created by
5745 the front end and since PARM_DECLs are generally shared,
5746 their DECL_CONTEXT changes as the replicas of DECL are
5747 created. The only time where DECL_CONTEXT is important
5748 is for the FUNCTION_DECLs that have a gimple body (since
5749 the PARM_DECL will be used in the function's body). */
5750 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5751 DECL_CONTEXT (t) = decl;
5752 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5753 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5754 = target_option_default_node;
5755 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5756 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5757 = optimization_default_node;
5758 }
5759
5760 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5761 At this point, it is not needed anymore. */
5762 DECL_SAVED_TREE (decl) = NULL_TREE;
5763
5764 /* Clear the abstract origin if it refers to a method.
5765 Otherwise dwarf2out.c will ICE as we splice functions out of
5766 TYPE_FIELDS and thus the origin will not be output
5767 correctly. */
5768 if (DECL_ABSTRACT_ORIGIN (decl)
5769 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5770 && RECORD_OR_UNION_TYPE_P
5771 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5772 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5773
5774 DECL_VINDEX (decl) = NULL_TREE;
5775 }
5776 else if (VAR_P (decl))
5777 {
5778 /* See comment above why we set the flag for functions. */
5779 if (TREE_PUBLIC (decl))
5780 TREE_ADDRESSABLE (decl) = true;
5781 if ((DECL_EXTERNAL (decl)
5782 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5783 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5784 DECL_INITIAL (decl) = NULL_TREE;
5785 }
5786 else if (TREE_CODE (decl) == TYPE_DECL)
5787 {
5788 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5789 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5790 TREE_PUBLIC (decl) = 0;
5791 TREE_PRIVATE (decl) = 0;
5792 DECL_ARTIFICIAL (decl) = 0;
5793 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5794 DECL_INITIAL (decl) = NULL_TREE;
5795 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5796 DECL_MODE (decl) = VOIDmode;
5797 SET_DECL_ALIGN (decl, 0);
5798 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5799 }
5800 else if (TREE_CODE (decl) == FIELD_DECL)
5801 {
5802 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5803 DECL_INITIAL (decl) = NULL_TREE;
5804 }
5805 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5806 && DECL_INITIAL (decl)
5807 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5808 {
5809 /* Strip builtins from the translation-unit BLOCK. We still have targets
5810 without builtin_decl_explicit support and also builtins are shared
5811 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5812 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5813 while (*nextp)
5814 {
5815 tree var = *nextp;
5816 if (TREE_CODE (var) == FUNCTION_DECL
5817 && fndecl_built_in_p (var))
5818 *nextp = TREE_CHAIN (var);
5819 else
5820 nextp = &TREE_CHAIN (var);
5821 }
5822 }
5823 /* We need to keep field decls associated with their trees. Otherwise tree
5824 merging may merge some fileds and keep others disjoint wich in turn will
5825 not do well with TREE_CHAIN pointers linking them.
5826
5827 Also do not drop containing types for virtual methods and tables because
5828 these are needed by devirtualization.
5829 C++ destructors are special because C++ frontends sometimes produces
5830 virtual destructor as an alias of non-virtual destructor. In
5831 devirutalization code we always walk through aliases and we need
5832 context to be preserved too. See PR89335 */
5833 if (TREE_CODE (decl) != FIELD_DECL
5834 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5835 || (!DECL_VIRTUAL_P (decl)
5836 && (TREE_CODE (decl) != FUNCTION_DECL
5837 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5838 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5839 }
5840
5841
5842 /* Operand callback helper for free_lang_data_in_node. *TP is the
5843 subtree operand being considered. */
5844
5845 static tree
5846 find_decls_types_r (tree *tp, int *ws, void *data)
5847 {
5848 tree t = *tp;
5849 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5850
5851 if (TREE_CODE (t) == TREE_LIST)
5852 return NULL_TREE;
5853
5854 /* Language specific nodes will be removed, so there is no need
5855 to gather anything under them. */
5856 if (is_lang_specific (t))
5857 {
5858 *ws = 0;
5859 return NULL_TREE;
5860 }
5861
5862 if (DECL_P (t))
5863 {
5864 /* Note that walk_tree does not traverse every possible field in
5865 decls, so we have to do our own traversals here. */
5866 add_tree_to_fld_list (t, fld);
5867
5868 fld_worklist_push (DECL_NAME (t), fld);
5869 fld_worklist_push (DECL_CONTEXT (t), fld);
5870 fld_worklist_push (DECL_SIZE (t), fld);
5871 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5872
5873 /* We are going to remove everything under DECL_INITIAL for
5874 TYPE_DECLs. No point walking them. */
5875 if (TREE_CODE (t) != TYPE_DECL)
5876 fld_worklist_push (DECL_INITIAL (t), fld);
5877
5878 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5879 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5880
5881 if (TREE_CODE (t) == FUNCTION_DECL)
5882 {
5883 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5884 fld_worklist_push (DECL_RESULT (t), fld);
5885 }
5886 else if (TREE_CODE (t) == FIELD_DECL)
5887 {
5888 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5889 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5890 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5891 fld_worklist_push (DECL_FCONTEXT (t), fld);
5892 }
5893
5894 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5895 && DECL_HAS_VALUE_EXPR_P (t))
5896 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5897
5898 if (TREE_CODE (t) != FIELD_DECL
5899 && TREE_CODE (t) != TYPE_DECL)
5900 fld_worklist_push (TREE_CHAIN (t), fld);
5901 *ws = 0;
5902 }
5903 else if (TYPE_P (t))
5904 {
5905 /* Note that walk_tree does not traverse every possible field in
5906 types, so we have to do our own traversals here. */
5907 add_tree_to_fld_list (t, fld);
5908
5909 if (!RECORD_OR_UNION_TYPE_P (t))
5910 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5911 fld_worklist_push (TYPE_SIZE (t), fld);
5912 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5913 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5914 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5915 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5916 fld_worklist_push (TYPE_NAME (t), fld);
5917 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5918 lists, we may look types up in these lists and use them while
5919 optimizing the function body. Thus we need to free lang data
5920 in them. */
5921 if (TREE_CODE (t) == POINTER_TYPE)
5922 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5923 if (TREE_CODE (t) == REFERENCE_TYPE)
5924 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5925 if (!POINTER_TYPE_P (t))
5926 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5927 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5928 if (!RECORD_OR_UNION_TYPE_P (t))
5929 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5930 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5931 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5932 do not and want not to reach unused variants this way. */
5933 if (TYPE_CONTEXT (t))
5934 {
5935 tree ctx = TYPE_CONTEXT (t);
5936 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5937 So push that instead. */
5938 while (ctx && TREE_CODE (ctx) == BLOCK)
5939 ctx = BLOCK_SUPERCONTEXT (ctx);
5940 fld_worklist_push (ctx, fld);
5941 }
5942 fld_worklist_push (TYPE_CANONICAL (t), fld);
5943
5944 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5945 {
5946 unsigned i;
5947 tree tem;
5948 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5949 fld_worklist_push (TREE_TYPE (tem), fld);
5950 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5951 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5952 }
5953 if (RECORD_OR_UNION_TYPE_P (t))
5954 {
5955 tree tem;
5956 /* Push all TYPE_FIELDS - there can be interleaving interesting
5957 and non-interesting things. */
5958 tem = TYPE_FIELDS (t);
5959 while (tem)
5960 {
5961 if (TREE_CODE (tem) == FIELD_DECL)
5962 fld_worklist_push (tem, fld);
5963 tem = TREE_CHAIN (tem);
5964 }
5965 }
5966 if (FUNC_OR_METHOD_TYPE_P (t))
5967 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5968
5969 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5970 *ws = 0;
5971 }
5972 else if (TREE_CODE (t) == BLOCK)
5973 {
5974 for (tree *tem = &BLOCK_VARS (t); *tem; )
5975 {
5976 if (TREE_CODE (*tem) != LABEL_DECL
5977 && (TREE_CODE (*tem) != VAR_DECL
5978 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
5979 {
5980 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
5981 && TREE_CODE (*tem) != PARM_DECL);
5982 *tem = TREE_CHAIN (*tem);
5983 }
5984 else
5985 {
5986 fld_worklist_push (*tem, fld);
5987 tem = &TREE_CHAIN (*tem);
5988 }
5989 }
5990 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5991 fld_worklist_push (tem, fld);
5992 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5993 }
5994
5995 if (TREE_CODE (t) != IDENTIFIER_NODE
5996 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5997 fld_worklist_push (TREE_TYPE (t), fld);
5998
5999 return NULL_TREE;
6000 }
6001
6002
6003 /* Find decls and types in T. */
6004
6005 static void
6006 find_decls_types (tree t, class free_lang_data_d *fld)
6007 {
6008 while (1)
6009 {
6010 if (!fld->pset.contains (t))
6011 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6012 if (fld->worklist.is_empty ())
6013 break;
6014 t = fld->worklist.pop ();
6015 }
6016 }
6017
6018 /* Translate all the types in LIST with the corresponding runtime
6019 types. */
6020
6021 static tree
6022 get_eh_types_for_runtime (tree list)
6023 {
6024 tree head, prev;
6025
6026 if (list == NULL_TREE)
6027 return NULL_TREE;
6028
6029 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6030 prev = head;
6031 list = TREE_CHAIN (list);
6032 while (list)
6033 {
6034 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6035 TREE_CHAIN (prev) = n;
6036 prev = TREE_CHAIN (prev);
6037 list = TREE_CHAIN (list);
6038 }
6039
6040 return head;
6041 }
6042
6043
6044 /* Find decls and types referenced in EH region R and store them in
6045 FLD->DECLS and FLD->TYPES. */
6046
6047 static void
6048 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6049 {
6050 switch (r->type)
6051 {
6052 case ERT_CLEANUP:
6053 break;
6054
6055 case ERT_TRY:
6056 {
6057 eh_catch c;
6058
6059 /* The types referenced in each catch must first be changed to the
6060 EH types used at runtime. This removes references to FE types
6061 in the region. */
6062 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6063 {
6064 c->type_list = get_eh_types_for_runtime (c->type_list);
6065 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6066 }
6067 }
6068 break;
6069
6070 case ERT_ALLOWED_EXCEPTIONS:
6071 r->u.allowed.type_list
6072 = get_eh_types_for_runtime (r->u.allowed.type_list);
6073 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6074 break;
6075
6076 case ERT_MUST_NOT_THROW:
6077 walk_tree (&r->u.must_not_throw.failure_decl,
6078 find_decls_types_r, fld, &fld->pset);
6079 break;
6080 }
6081 }
6082
6083
6084 /* Find decls and types referenced in cgraph node N and store them in
6085 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6086 look for *every* kind of DECL and TYPE node reachable from N,
6087 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6088 NAMESPACE_DECLs, etc). */
6089
6090 static void
6091 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6092 {
6093 basic_block bb;
6094 struct function *fn;
6095 unsigned ix;
6096 tree t;
6097
6098 find_decls_types (n->decl, fld);
6099
6100 if (!gimple_has_body_p (n->decl))
6101 return;
6102
6103 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6104
6105 fn = DECL_STRUCT_FUNCTION (n->decl);
6106
6107 /* Traverse locals. */
6108 FOR_EACH_LOCAL_DECL (fn, ix, t)
6109 find_decls_types (t, fld);
6110
6111 /* Traverse EH regions in FN. */
6112 {
6113 eh_region r;
6114 FOR_ALL_EH_REGION_FN (r, fn)
6115 find_decls_types_in_eh_region (r, fld);
6116 }
6117
6118 /* Traverse every statement in FN. */
6119 FOR_EACH_BB_FN (bb, fn)
6120 {
6121 gphi_iterator psi;
6122 gimple_stmt_iterator si;
6123 unsigned i;
6124
6125 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6126 {
6127 gphi *phi = psi.phi ();
6128
6129 for (i = 0; i < gimple_phi_num_args (phi); i++)
6130 {
6131 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6132 find_decls_types (*arg_p, fld);
6133 }
6134 }
6135
6136 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6137 {
6138 gimple *stmt = gsi_stmt (si);
6139
6140 if (is_gimple_call (stmt))
6141 find_decls_types (gimple_call_fntype (stmt), fld);
6142
6143 for (i = 0; i < gimple_num_ops (stmt); i++)
6144 {
6145 tree arg = gimple_op (stmt, i);
6146 find_decls_types (arg, fld);
6147 /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6148 which we need for asm stmts. */
6149 if (arg
6150 && TREE_CODE (arg) == TREE_LIST
6151 && TREE_PURPOSE (arg)
6152 && gimple_code (stmt) == GIMPLE_ASM)
6153 find_decls_types (TREE_PURPOSE (arg), fld);
6154 }
6155 }
6156 }
6157 }
6158
6159
6160 /* Find decls and types referenced in varpool node N and store them in
6161 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6162 look for *every* kind of DECL and TYPE node reachable from N,
6163 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6164 NAMESPACE_DECLs, etc). */
6165
6166 static void
6167 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6168 {
6169 find_decls_types (v->decl, fld);
6170 }
6171
6172 /* If T needs an assembler name, have one created for it. */
6173
6174 void
6175 assign_assembler_name_if_needed (tree t)
6176 {
6177 if (need_assembler_name_p (t))
6178 {
6179 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6180 diagnostics that use input_location to show locus
6181 information. The problem here is that, at this point,
6182 input_location is generally anchored to the end of the file
6183 (since the parser is long gone), so we don't have a good
6184 position to pin it to.
6185
6186 To alleviate this problem, this uses the location of T's
6187 declaration. Examples of this are
6188 testsuite/g++.dg/template/cond2.C and
6189 testsuite/g++.dg/template/pr35240.C. */
6190 location_t saved_location = input_location;
6191 input_location = DECL_SOURCE_LOCATION (t);
6192
6193 decl_assembler_name (t);
6194
6195 input_location = saved_location;
6196 }
6197 }
6198
6199
6200 /* Free language specific information for every operand and expression
6201 in every node of the call graph. This process operates in three stages:
6202
6203 1- Every callgraph node and varpool node is traversed looking for
6204 decls and types embedded in them. This is a more exhaustive
6205 search than that done by find_referenced_vars, because it will
6206 also collect individual fields, decls embedded in types, etc.
6207
6208 2- All the decls found are sent to free_lang_data_in_decl.
6209
6210 3- All the types found are sent to free_lang_data_in_type.
6211
6212 The ordering between decls and types is important because
6213 free_lang_data_in_decl sets assembler names, which includes
6214 mangling. So types cannot be freed up until assembler names have
6215 been set up. */
6216
6217 static void
6218 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6219 {
6220 struct cgraph_node *n;
6221 varpool_node *v;
6222 tree t;
6223 unsigned i;
6224 alias_pair *p;
6225
6226 /* Find decls and types in the body of every function in the callgraph. */
6227 FOR_EACH_FUNCTION (n)
6228 find_decls_types_in_node (n, fld);
6229
6230 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6231 find_decls_types (p->decl, fld);
6232
6233 /* Find decls and types in every varpool symbol. */
6234 FOR_EACH_VARIABLE (v)
6235 find_decls_types_in_var (v, fld);
6236
6237 /* Set the assembler name on every decl found. We need to do this
6238 now because free_lang_data_in_decl will invalidate data needed
6239 for mangling. This breaks mangling on interdependent decls. */
6240 FOR_EACH_VEC_ELT (fld->decls, i, t)
6241 assign_assembler_name_if_needed (t);
6242
6243 /* Traverse every decl found freeing its language data. */
6244 FOR_EACH_VEC_ELT (fld->decls, i, t)
6245 free_lang_data_in_decl (t, fld);
6246
6247 /* Traverse every type found freeing its language data. */
6248 FOR_EACH_VEC_ELT (fld->types, i, t)
6249 free_lang_data_in_type (t, fld);
6250 }
6251
6252
6253 /* Free resources that are used by FE but are not needed once they are done. */
6254
6255 static unsigned
6256 free_lang_data (void)
6257 {
6258 unsigned i;
6259 class free_lang_data_d fld;
6260
6261 /* If we are the LTO frontend we have freed lang-specific data already. */
6262 if (in_lto_p
6263 || (!flag_generate_lto && !flag_generate_offload))
6264 {
6265 /* Rebuild type inheritance graph even when not doing LTO to get
6266 consistent profile data. */
6267 rebuild_type_inheritance_graph ();
6268 return 0;
6269 }
6270
6271 fld_incomplete_types = new hash_map<tree, tree>;
6272 fld_simplified_types = new hash_map<tree, tree>;
6273
6274 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6275 if (vec_safe_is_empty (all_translation_units))
6276 build_translation_unit_decl (NULL_TREE);
6277
6278 /* Allocate and assign alias sets to the standard integer types
6279 while the slots are still in the way the frontends generated them. */
6280 for (i = 0; i < itk_none; ++i)
6281 if (integer_types[i])
6282 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6283
6284 /* Traverse the IL resetting language specific information for
6285 operands, expressions, etc. */
6286 free_lang_data_in_cgraph (&fld);
6287
6288 /* Create gimple variants for common types. */
6289 for (unsigned i = 0;
6290 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6291 ++i)
6292 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6293
6294 /* Reset some langhooks. Do not reset types_compatible_p, it may
6295 still be used indirectly via the get_alias_set langhook. */
6296 lang_hooks.dwarf_name = lhd_dwarf_name;
6297 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6298 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6299 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6300 lang_hooks.print_xnode = lhd_print_tree_nothing;
6301 lang_hooks.print_decl = lhd_print_tree_nothing;
6302 lang_hooks.print_type = lhd_print_tree_nothing;
6303 lang_hooks.print_identifier = lhd_print_tree_nothing;
6304
6305 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6306
6307 if (flag_checking)
6308 {
6309 int i;
6310 tree t;
6311
6312 FOR_EACH_VEC_ELT (fld.types, i, t)
6313 verify_type (t);
6314 }
6315
6316 /* We do not want the default decl_assembler_name implementation,
6317 rather if we have fixed everything we want a wrapper around it
6318 asserting that all non-local symbols already got their assembler
6319 name and only produce assembler names for local symbols. Or rather
6320 make sure we never call decl_assembler_name on local symbols and
6321 devise a separate, middle-end private scheme for it. */
6322
6323 /* Reset diagnostic machinery. */
6324 tree_diagnostics_defaults (global_dc);
6325
6326 rebuild_type_inheritance_graph ();
6327
6328 delete fld_incomplete_types;
6329 delete fld_simplified_types;
6330
6331 return 0;
6332 }
6333
6334
6335 namespace {
6336
6337 const pass_data pass_data_ipa_free_lang_data =
6338 {
6339 SIMPLE_IPA_PASS, /* type */
6340 "*free_lang_data", /* name */
6341 OPTGROUP_NONE, /* optinfo_flags */
6342 TV_IPA_FREE_LANG_DATA, /* tv_id */
6343 0, /* properties_required */
6344 0, /* properties_provided */
6345 0, /* properties_destroyed */
6346 0, /* todo_flags_start */
6347 0, /* todo_flags_finish */
6348 };
6349
6350 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6351 {
6352 public:
6353 pass_ipa_free_lang_data (gcc::context *ctxt)
6354 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6355 {}
6356
6357 /* opt_pass methods: */
6358 virtual unsigned int execute (function *) { return free_lang_data (); }
6359
6360 }; // class pass_ipa_free_lang_data
6361
6362 } // anon namespace
6363
6364 simple_ipa_opt_pass *
6365 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6366 {
6367 return new pass_ipa_free_lang_data (ctxt);
6368 }
6369 \f
6370 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6371 of the various TYPE_QUAL values. */
6372
6373 static void
6374 set_type_quals (tree type, int type_quals)
6375 {
6376 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6377 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6378 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6379 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6380 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6381 }
6382
6383 /* Returns true iff CAND and BASE have equivalent language-specific
6384 qualifiers. */
6385
6386 bool
6387 check_lang_type (const_tree cand, const_tree base)
6388 {
6389 if (lang_hooks.types.type_hash_eq == NULL)
6390 return true;
6391 /* type_hash_eq currently only applies to these types. */
6392 if (TREE_CODE (cand) != FUNCTION_TYPE
6393 && TREE_CODE (cand) != METHOD_TYPE)
6394 return true;
6395 return lang_hooks.types.type_hash_eq (cand, base);
6396 }
6397
6398 /* This function checks to see if TYPE matches the size one of the built-in
6399 atomic types, and returns that core atomic type. */
6400
6401 static tree
6402 find_atomic_core_type (const_tree type)
6403 {
6404 tree base_atomic_type;
6405
6406 /* Only handle complete types. */
6407 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6408 return NULL_TREE;
6409
6410 switch (tree_to_uhwi (TYPE_SIZE (type)))
6411 {
6412 case 8:
6413 base_atomic_type = atomicQI_type_node;
6414 break;
6415
6416 case 16:
6417 base_atomic_type = atomicHI_type_node;
6418 break;
6419
6420 case 32:
6421 base_atomic_type = atomicSI_type_node;
6422 break;
6423
6424 case 64:
6425 base_atomic_type = atomicDI_type_node;
6426 break;
6427
6428 case 128:
6429 base_atomic_type = atomicTI_type_node;
6430 break;
6431
6432 default:
6433 base_atomic_type = NULL_TREE;
6434 }
6435
6436 return base_atomic_type;
6437 }
6438
6439 /* Returns true iff unqualified CAND and BASE are equivalent. */
6440
6441 bool
6442 check_base_type (const_tree cand, const_tree base)
6443 {
6444 if (TYPE_NAME (cand) != TYPE_NAME (base)
6445 /* Apparently this is needed for Objective-C. */
6446 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6447 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6448 TYPE_ATTRIBUTES (base)))
6449 return false;
6450 /* Check alignment. */
6451 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6452 return true;
6453 /* Atomic types increase minimal alignment. We must to do so as well
6454 or we get duplicated canonical types. See PR88686. */
6455 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6456 {
6457 /* See if this object can map to a basic atomic type. */
6458 tree atomic_type = find_atomic_core_type (cand);
6459 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6460 return true;
6461 }
6462 return false;
6463 }
6464
6465 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6466
6467 bool
6468 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6469 {
6470 return (TYPE_QUALS (cand) == type_quals
6471 && check_base_type (cand, base)
6472 && check_lang_type (cand, base));
6473 }
6474
6475 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6476
6477 static bool
6478 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6479 {
6480 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6481 && TYPE_NAME (cand) == TYPE_NAME (base)
6482 /* Apparently this is needed for Objective-C. */
6483 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6484 /* Check alignment. */
6485 && TYPE_ALIGN (cand) == align
6486 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6487 TYPE_ATTRIBUTES (base))
6488 && check_lang_type (cand, base));
6489 }
6490
6491 /* Return a version of the TYPE, qualified as indicated by the
6492 TYPE_QUALS, if one exists. If no qualified version exists yet,
6493 return NULL_TREE. */
6494
6495 tree
6496 get_qualified_type (tree type, int type_quals)
6497 {
6498 if (TYPE_QUALS (type) == type_quals)
6499 return type;
6500
6501 tree mv = TYPE_MAIN_VARIANT (type);
6502 if (check_qualified_type (mv, type, type_quals))
6503 return mv;
6504
6505 /* Search the chain of variants to see if there is already one there just
6506 like the one we need to have. If so, use that existing one. We must
6507 preserve the TYPE_NAME, since there is code that depends on this. */
6508 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6509 if (check_qualified_type (*tp, type, type_quals))
6510 {
6511 /* Put the found variant at the head of the variant list so
6512 frequently searched variants get found faster. The C++ FE
6513 benefits greatly from this. */
6514 tree t = *tp;
6515 *tp = TYPE_NEXT_VARIANT (t);
6516 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6517 TYPE_NEXT_VARIANT (mv) = t;
6518 return t;
6519 }
6520
6521 return NULL_TREE;
6522 }
6523
6524 /* Like get_qualified_type, but creates the type if it does not
6525 exist. This function never returns NULL_TREE. */
6526
6527 tree
6528 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6529 {
6530 tree t;
6531
6532 /* See if we already have the appropriate qualified variant. */
6533 t = get_qualified_type (type, type_quals);
6534
6535 /* If not, build it. */
6536 if (!t)
6537 {
6538 t = build_variant_type_copy (type PASS_MEM_STAT);
6539 set_type_quals (t, type_quals);
6540
6541 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6542 {
6543 /* See if this object can map to a basic atomic type. */
6544 tree atomic_type = find_atomic_core_type (type);
6545 if (atomic_type)
6546 {
6547 /* Ensure the alignment of this type is compatible with
6548 the required alignment of the atomic type. */
6549 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6550 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6551 }
6552 }
6553
6554 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6555 /* Propagate structural equality. */
6556 SET_TYPE_STRUCTURAL_EQUALITY (t);
6557 else if (TYPE_CANONICAL (type) != type)
6558 /* Build the underlying canonical type, since it is different
6559 from TYPE. */
6560 {
6561 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6562 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6563 }
6564 else
6565 /* T is its own canonical type. */
6566 TYPE_CANONICAL (t) = t;
6567
6568 }
6569
6570 return t;
6571 }
6572
6573 /* Create a variant of type T with alignment ALIGN. */
6574
6575 tree
6576 build_aligned_type (tree type, unsigned int align)
6577 {
6578 tree t;
6579
6580 if (TYPE_PACKED (type)
6581 || TYPE_ALIGN (type) == align)
6582 return type;
6583
6584 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6585 if (check_aligned_type (t, type, align))
6586 return t;
6587
6588 t = build_variant_type_copy (type);
6589 SET_TYPE_ALIGN (t, align);
6590 TYPE_USER_ALIGN (t) = 1;
6591
6592 return t;
6593 }
6594
6595 /* Create a new distinct copy of TYPE. The new type is made its own
6596 MAIN_VARIANT. If TYPE requires structural equality checks, the
6597 resulting type requires structural equality checks; otherwise, its
6598 TYPE_CANONICAL points to itself. */
6599
6600 tree
6601 build_distinct_type_copy (tree type MEM_STAT_DECL)
6602 {
6603 tree t = copy_node (type PASS_MEM_STAT);
6604
6605 TYPE_POINTER_TO (t) = 0;
6606 TYPE_REFERENCE_TO (t) = 0;
6607
6608 /* Set the canonical type either to a new equivalence class, or
6609 propagate the need for structural equality checks. */
6610 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6611 SET_TYPE_STRUCTURAL_EQUALITY (t);
6612 else
6613 TYPE_CANONICAL (t) = t;
6614
6615 /* Make it its own variant. */
6616 TYPE_MAIN_VARIANT (t) = t;
6617 TYPE_NEXT_VARIANT (t) = 0;
6618
6619 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6620 whose TREE_TYPE is not t. This can also happen in the Ada
6621 frontend when using subtypes. */
6622
6623 return t;
6624 }
6625
6626 /* Create a new variant of TYPE, equivalent but distinct. This is so
6627 the caller can modify it. TYPE_CANONICAL for the return type will
6628 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6629 are considered equal by the language itself (or that both types
6630 require structural equality checks). */
6631
6632 tree
6633 build_variant_type_copy (tree type MEM_STAT_DECL)
6634 {
6635 tree t, m = TYPE_MAIN_VARIANT (type);
6636
6637 t = build_distinct_type_copy (type PASS_MEM_STAT);
6638
6639 /* Since we're building a variant, assume that it is a non-semantic
6640 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6641 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6642 /* Type variants have no alias set defined. */
6643 TYPE_ALIAS_SET (t) = -1;
6644
6645 /* Add the new type to the chain of variants of TYPE. */
6646 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6647 TYPE_NEXT_VARIANT (m) = t;
6648 TYPE_MAIN_VARIANT (t) = m;
6649
6650 return t;
6651 }
6652 \f
6653 /* Return true if the from tree in both tree maps are equal. */
6654
6655 int
6656 tree_map_base_eq (const void *va, const void *vb)
6657 {
6658 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6659 *const b = (const struct tree_map_base *) vb;
6660 return (a->from == b->from);
6661 }
6662
6663 /* Hash a from tree in a tree_base_map. */
6664
6665 unsigned int
6666 tree_map_base_hash (const void *item)
6667 {
6668 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6669 }
6670
6671 /* Return true if this tree map structure is marked for garbage collection
6672 purposes. We simply return true if the from tree is marked, so that this
6673 structure goes away when the from tree goes away. */
6674
6675 int
6676 tree_map_base_marked_p (const void *p)
6677 {
6678 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6679 }
6680
6681 /* Hash a from tree in a tree_map. */
6682
6683 unsigned int
6684 tree_map_hash (const void *item)
6685 {
6686 return (((const struct tree_map *) item)->hash);
6687 }
6688
6689 /* Hash a from tree in a tree_decl_map. */
6690
6691 unsigned int
6692 tree_decl_map_hash (const void *item)
6693 {
6694 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6695 }
6696
6697 /* Return the initialization priority for DECL. */
6698
6699 priority_type
6700 decl_init_priority_lookup (tree decl)
6701 {
6702 symtab_node *snode = symtab_node::get (decl);
6703
6704 if (!snode)
6705 return DEFAULT_INIT_PRIORITY;
6706 return
6707 snode->get_init_priority ();
6708 }
6709
6710 /* Return the finalization priority for DECL. */
6711
6712 priority_type
6713 decl_fini_priority_lookup (tree decl)
6714 {
6715 cgraph_node *node = cgraph_node::get (decl);
6716
6717 if (!node)
6718 return DEFAULT_INIT_PRIORITY;
6719 return
6720 node->get_fini_priority ();
6721 }
6722
6723 /* Set the initialization priority for DECL to PRIORITY. */
6724
6725 void
6726 decl_init_priority_insert (tree decl, priority_type priority)
6727 {
6728 struct symtab_node *snode;
6729
6730 if (priority == DEFAULT_INIT_PRIORITY)
6731 {
6732 snode = symtab_node::get (decl);
6733 if (!snode)
6734 return;
6735 }
6736 else if (VAR_P (decl))
6737 snode = varpool_node::get_create (decl);
6738 else
6739 snode = cgraph_node::get_create (decl);
6740 snode->set_init_priority (priority);
6741 }
6742
6743 /* Set the finalization priority for DECL to PRIORITY. */
6744
6745 void
6746 decl_fini_priority_insert (tree decl, priority_type priority)
6747 {
6748 struct cgraph_node *node;
6749
6750 if (priority == DEFAULT_INIT_PRIORITY)
6751 {
6752 node = cgraph_node::get (decl);
6753 if (!node)
6754 return;
6755 }
6756 else
6757 node = cgraph_node::get_create (decl);
6758 node->set_fini_priority (priority);
6759 }
6760
6761 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6762
6763 static void
6764 print_debug_expr_statistics (void)
6765 {
6766 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6767 (long) debug_expr_for_decl->size (),
6768 (long) debug_expr_for_decl->elements (),
6769 debug_expr_for_decl->collisions ());
6770 }
6771
6772 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6773
6774 static void
6775 print_value_expr_statistics (void)
6776 {
6777 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6778 (long) value_expr_for_decl->size (),
6779 (long) value_expr_for_decl->elements (),
6780 value_expr_for_decl->collisions ());
6781 }
6782
6783 /* Lookup a debug expression for FROM, and return it if we find one. */
6784
6785 tree
6786 decl_debug_expr_lookup (tree from)
6787 {
6788 struct tree_decl_map *h, in;
6789 in.base.from = from;
6790
6791 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6792 if (h)
6793 return h->to;
6794 return NULL_TREE;
6795 }
6796
6797 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6798
6799 void
6800 decl_debug_expr_insert (tree from, tree to)
6801 {
6802 struct tree_decl_map *h;
6803
6804 h = ggc_alloc<tree_decl_map> ();
6805 h->base.from = from;
6806 h->to = to;
6807 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6808 }
6809
6810 /* Lookup a value expression for FROM, and return it if we find one. */
6811
6812 tree
6813 decl_value_expr_lookup (tree from)
6814 {
6815 struct tree_decl_map *h, in;
6816 in.base.from = from;
6817
6818 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6819 if (h)
6820 return h->to;
6821 return NULL_TREE;
6822 }
6823
6824 /* Insert a mapping FROM->TO in the value expression hashtable. */
6825
6826 void
6827 decl_value_expr_insert (tree from, tree to)
6828 {
6829 struct tree_decl_map *h;
6830
6831 h = ggc_alloc<tree_decl_map> ();
6832 h->base.from = from;
6833 h->to = to;
6834 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6835 }
6836
6837 /* Lookup a vector of debug arguments for FROM, and return it if we
6838 find one. */
6839
6840 vec<tree, va_gc> **
6841 decl_debug_args_lookup (tree from)
6842 {
6843 struct tree_vec_map *h, in;
6844
6845 if (!DECL_HAS_DEBUG_ARGS_P (from))
6846 return NULL;
6847 gcc_checking_assert (debug_args_for_decl != NULL);
6848 in.base.from = from;
6849 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6850 if (h)
6851 return &h->to;
6852 return NULL;
6853 }
6854
6855 /* Insert a mapping FROM->empty vector of debug arguments in the value
6856 expression hashtable. */
6857
6858 vec<tree, va_gc> **
6859 decl_debug_args_insert (tree from)
6860 {
6861 struct tree_vec_map *h;
6862 tree_vec_map **loc;
6863
6864 if (DECL_HAS_DEBUG_ARGS_P (from))
6865 return decl_debug_args_lookup (from);
6866 if (debug_args_for_decl == NULL)
6867 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6868 h = ggc_alloc<tree_vec_map> ();
6869 h->base.from = from;
6870 h->to = NULL;
6871 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6872 *loc = h;
6873 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6874 return &h->to;
6875 }
6876
6877 /* Hashing of types so that we don't make duplicates.
6878 The entry point is `type_hash_canon'. */
6879
6880 /* Generate the default hash code for TYPE. This is designed for
6881 speed, rather than maximum entropy. */
6882
6883 hashval_t
6884 type_hash_canon_hash (tree type)
6885 {
6886 inchash::hash hstate;
6887
6888 hstate.add_int (TREE_CODE (type));
6889
6890 if (TREE_TYPE (type))
6891 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6892
6893 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6894 /* Just the identifier is adequate to distinguish. */
6895 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6896
6897 switch (TREE_CODE (type))
6898 {
6899 case METHOD_TYPE:
6900 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6901 /* FALLTHROUGH. */
6902 case FUNCTION_TYPE:
6903 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6904 if (TREE_VALUE (t) != error_mark_node)
6905 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6906 break;
6907
6908 case OFFSET_TYPE:
6909 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6910 break;
6911
6912 case ARRAY_TYPE:
6913 {
6914 if (TYPE_DOMAIN (type))
6915 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6916 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6917 {
6918 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6919 hstate.add_object (typeless);
6920 }
6921 }
6922 break;
6923
6924 case INTEGER_TYPE:
6925 {
6926 tree t = TYPE_MAX_VALUE (type);
6927 if (!t)
6928 t = TYPE_MIN_VALUE (type);
6929 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6930 hstate.add_object (TREE_INT_CST_ELT (t, i));
6931 break;
6932 }
6933
6934 case REAL_TYPE:
6935 case FIXED_POINT_TYPE:
6936 {
6937 unsigned prec = TYPE_PRECISION (type);
6938 hstate.add_object (prec);
6939 break;
6940 }
6941
6942 case VECTOR_TYPE:
6943 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6944 break;
6945
6946 default:
6947 break;
6948 }
6949
6950 return hstate.end ();
6951 }
6952
6953 /* These are the Hashtable callback functions. */
6954
6955 /* Returns true iff the types are equivalent. */
6956
6957 bool
6958 type_cache_hasher::equal (type_hash *a, type_hash *b)
6959 {
6960 /* First test the things that are the same for all types. */
6961 if (a->hash != b->hash
6962 || TREE_CODE (a->type) != TREE_CODE (b->type)
6963 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6964 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6965 TYPE_ATTRIBUTES (b->type))
6966 || (TREE_CODE (a->type) != COMPLEX_TYPE
6967 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6968 return 0;
6969
6970 /* Be careful about comparing arrays before and after the element type
6971 has been completed; don't compare TYPE_ALIGN unless both types are
6972 complete. */
6973 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6974 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6975 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6976 return 0;
6977
6978 switch (TREE_CODE (a->type))
6979 {
6980 case VOID_TYPE:
6981 case COMPLEX_TYPE:
6982 case POINTER_TYPE:
6983 case REFERENCE_TYPE:
6984 case NULLPTR_TYPE:
6985 return 1;
6986
6987 case VECTOR_TYPE:
6988 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6989 TYPE_VECTOR_SUBPARTS (b->type));
6990
6991 case ENUMERAL_TYPE:
6992 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6993 && !(TYPE_VALUES (a->type)
6994 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6995 && TYPE_VALUES (b->type)
6996 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6997 && type_list_equal (TYPE_VALUES (a->type),
6998 TYPE_VALUES (b->type))))
6999 return 0;
7000
7001 /* fall through */
7002
7003 case INTEGER_TYPE:
7004 case REAL_TYPE:
7005 case BOOLEAN_TYPE:
7006 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7007 return false;
7008 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7009 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7010 TYPE_MAX_VALUE (b->type)))
7011 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7012 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7013 TYPE_MIN_VALUE (b->type))));
7014
7015 case FIXED_POINT_TYPE:
7016 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7017
7018 case OFFSET_TYPE:
7019 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7020
7021 case METHOD_TYPE:
7022 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7023 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7024 || (TYPE_ARG_TYPES (a->type)
7025 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7026 && TYPE_ARG_TYPES (b->type)
7027 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7028 && type_list_equal (TYPE_ARG_TYPES (a->type),
7029 TYPE_ARG_TYPES (b->type)))))
7030 break;
7031 return 0;
7032 case ARRAY_TYPE:
7033 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7034 where the flag should be inherited from the element type
7035 and can change after ARRAY_TYPEs are created; on non-aggregates
7036 compare it and hash it, scalars will never have that flag set
7037 and we need to differentiate between arrays created by different
7038 front-ends or middle-end created arrays. */
7039 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7040 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7041 || (TYPE_TYPELESS_STORAGE (a->type)
7042 == TYPE_TYPELESS_STORAGE (b->type))));
7043
7044 case RECORD_TYPE:
7045 case UNION_TYPE:
7046 case QUAL_UNION_TYPE:
7047 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7048 || (TYPE_FIELDS (a->type)
7049 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7050 && TYPE_FIELDS (b->type)
7051 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7052 && type_list_equal (TYPE_FIELDS (a->type),
7053 TYPE_FIELDS (b->type))));
7054
7055 case FUNCTION_TYPE:
7056 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7057 || (TYPE_ARG_TYPES (a->type)
7058 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7059 && TYPE_ARG_TYPES (b->type)
7060 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7061 && type_list_equal (TYPE_ARG_TYPES (a->type),
7062 TYPE_ARG_TYPES (b->type))))
7063 break;
7064 return 0;
7065
7066 default:
7067 return 0;
7068 }
7069
7070 if (lang_hooks.types.type_hash_eq != NULL)
7071 return lang_hooks.types.type_hash_eq (a->type, b->type);
7072
7073 return 1;
7074 }
7075
7076 /* Given TYPE, and HASHCODE its hash code, return the canonical
7077 object for an identical type if one already exists.
7078 Otherwise, return TYPE, and record it as the canonical object.
7079
7080 To use this function, first create a type of the sort you want.
7081 Then compute its hash code from the fields of the type that
7082 make it different from other similar types.
7083 Then call this function and use the value. */
7084
7085 tree
7086 type_hash_canon (unsigned int hashcode, tree type)
7087 {
7088 type_hash in;
7089 type_hash **loc;
7090
7091 /* The hash table only contains main variants, so ensure that's what we're
7092 being passed. */
7093 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7094
7095 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7096 must call that routine before comparing TYPE_ALIGNs. */
7097 layout_type (type);
7098
7099 in.hash = hashcode;
7100 in.type = type;
7101
7102 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7103 if (*loc)
7104 {
7105 tree t1 = ((type_hash *) *loc)->type;
7106 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7107 && t1 != type);
7108 if (TYPE_UID (type) + 1 == next_type_uid)
7109 --next_type_uid;
7110 /* Free also min/max values and the cache for integer
7111 types. This can't be done in free_node, as LTO frees
7112 those on its own. */
7113 if (TREE_CODE (type) == INTEGER_TYPE)
7114 {
7115 if (TYPE_MIN_VALUE (type)
7116 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7117 {
7118 /* Zero is always in TYPE_CACHED_VALUES. */
7119 if (! TYPE_UNSIGNED (type))
7120 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7121 ggc_free (TYPE_MIN_VALUE (type));
7122 }
7123 if (TYPE_MAX_VALUE (type)
7124 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7125 {
7126 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7127 ggc_free (TYPE_MAX_VALUE (type));
7128 }
7129 if (TYPE_CACHED_VALUES_P (type))
7130 ggc_free (TYPE_CACHED_VALUES (type));
7131 }
7132 free_node (type);
7133 return t1;
7134 }
7135 else
7136 {
7137 struct type_hash *h;
7138
7139 h = ggc_alloc<type_hash> ();
7140 h->hash = hashcode;
7141 h->type = type;
7142 *loc = h;
7143
7144 return type;
7145 }
7146 }
7147
7148 static void
7149 print_type_hash_statistics (void)
7150 {
7151 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7152 (long) type_hash_table->size (),
7153 (long) type_hash_table->elements (),
7154 type_hash_table->collisions ());
7155 }
7156
7157 /* Given two lists of types
7158 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7159 return 1 if the lists contain the same types in the same order.
7160 Also, the TREE_PURPOSEs must match. */
7161
7162 bool
7163 type_list_equal (const_tree l1, const_tree l2)
7164 {
7165 const_tree t1, t2;
7166
7167 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7168 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7169 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7170 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7171 && (TREE_TYPE (TREE_PURPOSE (t1))
7172 == TREE_TYPE (TREE_PURPOSE (t2))))))
7173 return false;
7174
7175 return t1 == t2;
7176 }
7177
7178 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7179 given by TYPE. If the argument list accepts variable arguments,
7180 then this function counts only the ordinary arguments. */
7181
7182 int
7183 type_num_arguments (const_tree fntype)
7184 {
7185 int i = 0;
7186
7187 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7188 /* If the function does not take a variable number of arguments,
7189 the last element in the list will have type `void'. */
7190 if (VOID_TYPE_P (TREE_VALUE (t)))
7191 break;
7192 else
7193 ++i;
7194
7195 return i;
7196 }
7197
7198 /* Return the type of the function TYPE's argument ARGNO if known.
7199 For vararg function's where ARGNO refers to one of the variadic
7200 arguments return null. Otherwise, return a void_type_node for
7201 out-of-bounds ARGNO. */
7202
7203 tree
7204 type_argument_type (const_tree fntype, unsigned argno)
7205 {
7206 /* Treat zero the same as an out-of-bounds argument number. */
7207 if (!argno)
7208 return void_type_node;
7209
7210 function_args_iterator iter;
7211
7212 tree argtype;
7213 unsigned i = 1;
7214 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7215 {
7216 /* A vararg function's argument list ends in a null. Otherwise,
7217 an ordinary function's argument list ends with void. Return
7218 null if ARGNO refers to a vararg argument, void_type_node if
7219 it's out of bounds, and the formal argument type otherwise. */
7220 if (!argtype)
7221 break;
7222
7223 if (i == argno || VOID_TYPE_P (argtype))
7224 return argtype;
7225
7226 ++i;
7227 }
7228
7229 return NULL_TREE;
7230 }
7231
7232 /* Nonzero if integer constants T1 and T2
7233 represent the same constant value. */
7234
7235 int
7236 tree_int_cst_equal (const_tree t1, const_tree t2)
7237 {
7238 if (t1 == t2)
7239 return 1;
7240
7241 if (t1 == 0 || t2 == 0)
7242 return 0;
7243
7244 STRIP_ANY_LOCATION_WRAPPER (t1);
7245 STRIP_ANY_LOCATION_WRAPPER (t2);
7246
7247 if (TREE_CODE (t1) == INTEGER_CST
7248 && TREE_CODE (t2) == INTEGER_CST
7249 && wi::to_widest (t1) == wi::to_widest (t2))
7250 return 1;
7251
7252 return 0;
7253 }
7254
7255 /* Return true if T is an INTEGER_CST whose numerical value (extended
7256 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7257
7258 bool
7259 tree_fits_shwi_p (const_tree t)
7260 {
7261 return (t != NULL_TREE
7262 && TREE_CODE (t) == INTEGER_CST
7263 && wi::fits_shwi_p (wi::to_widest (t)));
7264 }
7265
7266 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7267 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7268
7269 bool
7270 tree_fits_poly_int64_p (const_tree t)
7271 {
7272 if (t == NULL_TREE)
7273 return false;
7274 if (POLY_INT_CST_P (t))
7275 {
7276 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7277 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7278 return false;
7279 return true;
7280 }
7281 return (TREE_CODE (t) == INTEGER_CST
7282 && wi::fits_shwi_p (wi::to_widest (t)));
7283 }
7284
7285 /* Return true if T is an INTEGER_CST whose numerical value (extended
7286 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7287
7288 bool
7289 tree_fits_uhwi_p (const_tree t)
7290 {
7291 return (t != NULL_TREE
7292 && TREE_CODE (t) == INTEGER_CST
7293 && wi::fits_uhwi_p (wi::to_widest (t)));
7294 }
7295
7296 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7297 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7298
7299 bool
7300 tree_fits_poly_uint64_p (const_tree t)
7301 {
7302 if (t == NULL_TREE)
7303 return false;
7304 if (POLY_INT_CST_P (t))
7305 {
7306 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7307 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7308 return false;
7309 return true;
7310 }
7311 return (TREE_CODE (t) == INTEGER_CST
7312 && wi::fits_uhwi_p (wi::to_widest (t)));
7313 }
7314
7315 /* T is an INTEGER_CST whose numerical value (extended according to
7316 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7317 HOST_WIDE_INT. */
7318
7319 HOST_WIDE_INT
7320 tree_to_shwi (const_tree t)
7321 {
7322 gcc_assert (tree_fits_shwi_p (t));
7323 return TREE_INT_CST_LOW (t);
7324 }
7325
7326 /* T is an INTEGER_CST whose numerical value (extended according to
7327 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7328 HOST_WIDE_INT. */
7329
7330 unsigned HOST_WIDE_INT
7331 tree_to_uhwi (const_tree t)
7332 {
7333 gcc_assert (tree_fits_uhwi_p (t));
7334 return TREE_INT_CST_LOW (t);
7335 }
7336
7337 /* Return the most significant (sign) bit of T. */
7338
7339 int
7340 tree_int_cst_sign_bit (const_tree t)
7341 {
7342 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7343
7344 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7345 }
7346
7347 /* Return an indication of the sign of the integer constant T.
7348 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7349 Note that -1 will never be returned if T's type is unsigned. */
7350
7351 int
7352 tree_int_cst_sgn (const_tree t)
7353 {
7354 if (wi::to_wide (t) == 0)
7355 return 0;
7356 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7357 return 1;
7358 else if (wi::neg_p (wi::to_wide (t)))
7359 return -1;
7360 else
7361 return 1;
7362 }
7363
7364 /* Return the minimum number of bits needed to represent VALUE in a
7365 signed or unsigned type, UNSIGNEDP says which. */
7366
7367 unsigned int
7368 tree_int_cst_min_precision (tree value, signop sgn)
7369 {
7370 /* If the value is negative, compute its negative minus 1. The latter
7371 adjustment is because the absolute value of the largest negative value
7372 is one larger than the largest positive value. This is equivalent to
7373 a bit-wise negation, so use that operation instead. */
7374
7375 if (tree_int_cst_sgn (value) < 0)
7376 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7377
7378 /* Return the number of bits needed, taking into account the fact
7379 that we need one more bit for a signed than unsigned type.
7380 If value is 0 or -1, the minimum precision is 1 no matter
7381 whether unsignedp is true or false. */
7382
7383 if (integer_zerop (value))
7384 return 1;
7385 else
7386 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7387 }
7388
7389 /* Return truthvalue of whether T1 is the same tree structure as T2.
7390 Return 1 if they are the same.
7391 Return 0 if they are understandably different.
7392 Return -1 if either contains tree structure not understood by
7393 this function. */
7394
7395 int
7396 simple_cst_equal (const_tree t1, const_tree t2)
7397 {
7398 enum tree_code code1, code2;
7399 int cmp;
7400 int i;
7401
7402 if (t1 == t2)
7403 return 1;
7404 if (t1 == 0 || t2 == 0)
7405 return 0;
7406
7407 /* For location wrappers to be the same, they must be at the same
7408 source location (and wrap the same thing). */
7409 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7410 {
7411 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7412 return 0;
7413 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7414 }
7415
7416 code1 = TREE_CODE (t1);
7417 code2 = TREE_CODE (t2);
7418
7419 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7420 {
7421 if (CONVERT_EXPR_CODE_P (code2)
7422 || code2 == NON_LVALUE_EXPR)
7423 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7424 else
7425 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7426 }
7427
7428 else if (CONVERT_EXPR_CODE_P (code2)
7429 || code2 == NON_LVALUE_EXPR)
7430 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7431
7432 if (code1 != code2)
7433 return 0;
7434
7435 switch (code1)
7436 {
7437 case INTEGER_CST:
7438 return wi::to_widest (t1) == wi::to_widest (t2);
7439
7440 case REAL_CST:
7441 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7442
7443 case FIXED_CST:
7444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7445
7446 case STRING_CST:
7447 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7448 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7449 TREE_STRING_LENGTH (t1)));
7450
7451 case CONSTRUCTOR:
7452 {
7453 unsigned HOST_WIDE_INT idx;
7454 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7455 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7456
7457 if (vec_safe_length (v1) != vec_safe_length (v2))
7458 return false;
7459
7460 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7461 /* ??? Should we handle also fields here? */
7462 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7463 return false;
7464 return true;
7465 }
7466
7467 case SAVE_EXPR:
7468 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7469
7470 case CALL_EXPR:
7471 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7472 if (cmp <= 0)
7473 return cmp;
7474 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7475 return 0;
7476 {
7477 const_tree arg1, arg2;
7478 const_call_expr_arg_iterator iter1, iter2;
7479 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7480 arg2 = first_const_call_expr_arg (t2, &iter2);
7481 arg1 && arg2;
7482 arg1 = next_const_call_expr_arg (&iter1),
7483 arg2 = next_const_call_expr_arg (&iter2))
7484 {
7485 cmp = simple_cst_equal (arg1, arg2);
7486 if (cmp <= 0)
7487 return cmp;
7488 }
7489 return arg1 == arg2;
7490 }
7491
7492 case TARGET_EXPR:
7493 /* Special case: if either target is an unallocated VAR_DECL,
7494 it means that it's going to be unified with whatever the
7495 TARGET_EXPR is really supposed to initialize, so treat it
7496 as being equivalent to anything. */
7497 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7498 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7499 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7500 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7501 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7502 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7503 cmp = 1;
7504 else
7505 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7506
7507 if (cmp <= 0)
7508 return cmp;
7509
7510 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7511
7512 case WITH_CLEANUP_EXPR:
7513 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7514 if (cmp <= 0)
7515 return cmp;
7516
7517 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7518
7519 case COMPONENT_REF:
7520 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7521 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7522
7523 return 0;
7524
7525 case VAR_DECL:
7526 case PARM_DECL:
7527 case CONST_DECL:
7528 case FUNCTION_DECL:
7529 return 0;
7530
7531 default:
7532 if (POLY_INT_CST_P (t1))
7533 /* A false return means maybe_ne rather than known_ne. */
7534 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7535 TYPE_SIGN (TREE_TYPE (t1))),
7536 poly_widest_int::from (poly_int_cst_value (t2),
7537 TYPE_SIGN (TREE_TYPE (t2))));
7538 break;
7539 }
7540
7541 /* This general rule works for most tree codes. All exceptions should be
7542 handled above. If this is a language-specific tree code, we can't
7543 trust what might be in the operand, so say we don't know
7544 the situation. */
7545 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7546 return -1;
7547
7548 switch (TREE_CODE_CLASS (code1))
7549 {
7550 case tcc_unary:
7551 case tcc_binary:
7552 case tcc_comparison:
7553 case tcc_expression:
7554 case tcc_reference:
7555 case tcc_statement:
7556 cmp = 1;
7557 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7558 {
7559 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7560 if (cmp <= 0)
7561 return cmp;
7562 }
7563
7564 return cmp;
7565
7566 default:
7567 return -1;
7568 }
7569 }
7570
7571 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7572 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7573 than U, respectively. */
7574
7575 int
7576 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7577 {
7578 if (tree_int_cst_sgn (t) < 0)
7579 return -1;
7580 else if (!tree_fits_uhwi_p (t))
7581 return 1;
7582 else if (TREE_INT_CST_LOW (t) == u)
7583 return 0;
7584 else if (TREE_INT_CST_LOW (t) < u)
7585 return -1;
7586 else
7587 return 1;
7588 }
7589
7590 /* Return true if SIZE represents a constant size that is in bounds of
7591 what the middle-end and the backend accepts (covering not more than
7592 half of the address-space).
7593 When PERR is non-null, set *PERR on failure to the description of
7594 why SIZE is not valid. */
7595
7596 bool
7597 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7598 {
7599 if (POLY_INT_CST_P (size))
7600 {
7601 if (TREE_OVERFLOW (size))
7602 return false;
7603 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7604 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7605 return false;
7606 return true;
7607 }
7608
7609 cst_size_error error;
7610 if (!perr)
7611 perr = &error;
7612
7613 if (TREE_CODE (size) != INTEGER_CST)
7614 {
7615 *perr = cst_size_not_constant;
7616 return false;
7617 }
7618
7619 if (TREE_OVERFLOW_P (size))
7620 {
7621 *perr = cst_size_overflow;
7622 return false;
7623 }
7624
7625 if (tree_int_cst_sgn (size) < 0)
7626 {
7627 *perr = cst_size_negative;
7628 return false;
7629 }
7630 if (!tree_fits_uhwi_p (size)
7631 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7632 < wi::to_widest (size) * 2))
7633 {
7634 *perr = cst_size_too_big;
7635 return false;
7636 }
7637
7638 return true;
7639 }
7640
7641 /* Return the precision of the type, or for a complex or vector type the
7642 precision of the type of its elements. */
7643
7644 unsigned int
7645 element_precision (const_tree type)
7646 {
7647 if (!TYPE_P (type))
7648 type = TREE_TYPE (type);
7649 enum tree_code code = TREE_CODE (type);
7650 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7651 type = TREE_TYPE (type);
7652
7653 return TYPE_PRECISION (type);
7654 }
7655
7656 /* Return true if CODE represents an associative tree code. Otherwise
7657 return false. */
7658 bool
7659 associative_tree_code (enum tree_code code)
7660 {
7661 switch (code)
7662 {
7663 case BIT_IOR_EXPR:
7664 case BIT_AND_EXPR:
7665 case BIT_XOR_EXPR:
7666 case PLUS_EXPR:
7667 case MULT_EXPR:
7668 case MIN_EXPR:
7669 case MAX_EXPR:
7670 return true;
7671
7672 default:
7673 break;
7674 }
7675 return false;
7676 }
7677
7678 /* Return true if CODE represents a commutative tree code. Otherwise
7679 return false. */
7680 bool
7681 commutative_tree_code (enum tree_code code)
7682 {
7683 switch (code)
7684 {
7685 case PLUS_EXPR:
7686 case MULT_EXPR:
7687 case MULT_HIGHPART_EXPR:
7688 case MIN_EXPR:
7689 case MAX_EXPR:
7690 case BIT_IOR_EXPR:
7691 case BIT_XOR_EXPR:
7692 case BIT_AND_EXPR:
7693 case NE_EXPR:
7694 case EQ_EXPR:
7695 case UNORDERED_EXPR:
7696 case ORDERED_EXPR:
7697 case UNEQ_EXPR:
7698 case LTGT_EXPR:
7699 case TRUTH_AND_EXPR:
7700 case TRUTH_XOR_EXPR:
7701 case TRUTH_OR_EXPR:
7702 case WIDEN_MULT_EXPR:
7703 case VEC_WIDEN_MULT_HI_EXPR:
7704 case VEC_WIDEN_MULT_LO_EXPR:
7705 case VEC_WIDEN_MULT_EVEN_EXPR:
7706 case VEC_WIDEN_MULT_ODD_EXPR:
7707 return true;
7708
7709 default:
7710 break;
7711 }
7712 return false;
7713 }
7714
7715 /* Return true if CODE represents a ternary tree code for which the
7716 first two operands are commutative. Otherwise return false. */
7717 bool
7718 commutative_ternary_tree_code (enum tree_code code)
7719 {
7720 switch (code)
7721 {
7722 case WIDEN_MULT_PLUS_EXPR:
7723 case WIDEN_MULT_MINUS_EXPR:
7724 case DOT_PROD_EXPR:
7725 return true;
7726
7727 default:
7728 break;
7729 }
7730 return false;
7731 }
7732
7733 /* Returns true if CODE can overflow. */
7734
7735 bool
7736 operation_can_overflow (enum tree_code code)
7737 {
7738 switch (code)
7739 {
7740 case PLUS_EXPR:
7741 case MINUS_EXPR:
7742 case MULT_EXPR:
7743 case LSHIFT_EXPR:
7744 /* Can overflow in various ways. */
7745 return true;
7746 case TRUNC_DIV_EXPR:
7747 case EXACT_DIV_EXPR:
7748 case FLOOR_DIV_EXPR:
7749 case CEIL_DIV_EXPR:
7750 /* For INT_MIN / -1. */
7751 return true;
7752 case NEGATE_EXPR:
7753 case ABS_EXPR:
7754 /* For -INT_MIN. */
7755 return true;
7756 default:
7757 /* These operators cannot overflow. */
7758 return false;
7759 }
7760 }
7761
7762 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7763 ftrapv doesn't generate trapping insns for CODE. */
7764
7765 bool
7766 operation_no_trapping_overflow (tree type, enum tree_code code)
7767 {
7768 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7769
7770 /* We don't generate instructions that trap on overflow for complex or vector
7771 types. */
7772 if (!INTEGRAL_TYPE_P (type))
7773 return true;
7774
7775 if (!TYPE_OVERFLOW_TRAPS (type))
7776 return true;
7777
7778 switch (code)
7779 {
7780 case PLUS_EXPR:
7781 case MINUS_EXPR:
7782 case MULT_EXPR:
7783 case NEGATE_EXPR:
7784 case ABS_EXPR:
7785 /* These operators can overflow, and -ftrapv generates trapping code for
7786 these. */
7787 return false;
7788 case TRUNC_DIV_EXPR:
7789 case EXACT_DIV_EXPR:
7790 case FLOOR_DIV_EXPR:
7791 case CEIL_DIV_EXPR:
7792 case LSHIFT_EXPR:
7793 /* These operators can overflow, but -ftrapv does not generate trapping
7794 code for these. */
7795 return true;
7796 default:
7797 /* These operators cannot overflow. */
7798 return true;
7799 }
7800 }
7801
7802 /* Constructors for pointer, array and function types.
7803 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7804 constructed by language-dependent code, not here.) */
7805
7806 /* Construct, lay out and return the type of pointers to TO_TYPE with
7807 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7808 reference all of memory. If such a type has already been
7809 constructed, reuse it. */
7810
7811 tree
7812 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7813 bool can_alias_all)
7814 {
7815 tree t;
7816 bool could_alias = can_alias_all;
7817
7818 if (to_type == error_mark_node)
7819 return error_mark_node;
7820
7821 /* If the pointed-to type has the may_alias attribute set, force
7822 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7823 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7824 can_alias_all = true;
7825
7826 /* In some cases, languages will have things that aren't a POINTER_TYPE
7827 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7828 In that case, return that type without regard to the rest of our
7829 operands.
7830
7831 ??? This is a kludge, but consistent with the way this function has
7832 always operated and there doesn't seem to be a good way to avoid this
7833 at the moment. */
7834 if (TYPE_POINTER_TO (to_type) != 0
7835 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7836 return TYPE_POINTER_TO (to_type);
7837
7838 /* First, if we already have a type for pointers to TO_TYPE and it's
7839 the proper mode, use it. */
7840 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7841 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7842 return t;
7843
7844 t = make_node (POINTER_TYPE);
7845
7846 TREE_TYPE (t) = to_type;
7847 SET_TYPE_MODE (t, mode);
7848 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7849 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7850 TYPE_POINTER_TO (to_type) = t;
7851
7852 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7853 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7854 SET_TYPE_STRUCTURAL_EQUALITY (t);
7855 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7856 TYPE_CANONICAL (t)
7857 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7858 mode, false);
7859
7860 /* Lay out the type. This function has many callers that are concerned
7861 with expression-construction, and this simplifies them all. */
7862 layout_type (t);
7863
7864 return t;
7865 }
7866
7867 /* By default build pointers in ptr_mode. */
7868
7869 tree
7870 build_pointer_type (tree to_type)
7871 {
7872 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7873 : TYPE_ADDR_SPACE (to_type);
7874 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7875 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7876 }
7877
7878 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7879
7880 tree
7881 build_reference_type_for_mode (tree to_type, machine_mode mode,
7882 bool can_alias_all)
7883 {
7884 tree t;
7885 bool could_alias = can_alias_all;
7886
7887 if (to_type == error_mark_node)
7888 return error_mark_node;
7889
7890 /* If the pointed-to type has the may_alias attribute set, force
7891 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7892 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7893 can_alias_all = true;
7894
7895 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7896 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7897 In that case, return that type without regard to the rest of our
7898 operands.
7899
7900 ??? This is a kludge, but consistent with the way this function has
7901 always operated and there doesn't seem to be a good way to avoid this
7902 at the moment. */
7903 if (TYPE_REFERENCE_TO (to_type) != 0
7904 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7905 return TYPE_REFERENCE_TO (to_type);
7906
7907 /* First, if we already have a type for pointers to TO_TYPE and it's
7908 the proper mode, use it. */
7909 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7910 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7911 return t;
7912
7913 t = make_node (REFERENCE_TYPE);
7914
7915 TREE_TYPE (t) = to_type;
7916 SET_TYPE_MODE (t, mode);
7917 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7918 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7919 TYPE_REFERENCE_TO (to_type) = t;
7920
7921 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7922 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7923 SET_TYPE_STRUCTURAL_EQUALITY (t);
7924 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7925 TYPE_CANONICAL (t)
7926 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7927 mode, false);
7928
7929 layout_type (t);
7930
7931 return t;
7932 }
7933
7934
7935 /* Build the node for the type of references-to-TO_TYPE by default
7936 in ptr_mode. */
7937
7938 tree
7939 build_reference_type (tree to_type)
7940 {
7941 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7942 : TYPE_ADDR_SPACE (to_type);
7943 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7944 return build_reference_type_for_mode (to_type, pointer_mode, false);
7945 }
7946
7947 #define MAX_INT_CACHED_PREC \
7948 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7949 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7950
7951 /* Builds a signed or unsigned integer type of precision PRECISION.
7952 Used for C bitfields whose precision does not match that of
7953 built-in target types. */
7954 tree
7955 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7956 int unsignedp)
7957 {
7958 tree itype, ret;
7959
7960 if (unsignedp)
7961 unsignedp = MAX_INT_CACHED_PREC + 1;
7962
7963 if (precision <= MAX_INT_CACHED_PREC)
7964 {
7965 itype = nonstandard_integer_type_cache[precision + unsignedp];
7966 if (itype)
7967 return itype;
7968 }
7969
7970 itype = make_node (INTEGER_TYPE);
7971 TYPE_PRECISION (itype) = precision;
7972
7973 if (unsignedp)
7974 fixup_unsigned_type (itype);
7975 else
7976 fixup_signed_type (itype);
7977
7978 inchash::hash hstate;
7979 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7980 ret = type_hash_canon (hstate.end (), itype);
7981 if (precision <= MAX_INT_CACHED_PREC)
7982 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7983
7984 return ret;
7985 }
7986
7987 #define MAX_BOOL_CACHED_PREC \
7988 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7989 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7990
7991 /* Builds a boolean type of precision PRECISION.
7992 Used for boolean vectors to choose proper vector element size. */
7993 tree
7994 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7995 {
7996 tree type;
7997
7998 if (precision <= MAX_BOOL_CACHED_PREC)
7999 {
8000 type = nonstandard_boolean_type_cache[precision];
8001 if (type)
8002 return type;
8003 }
8004
8005 type = make_node (BOOLEAN_TYPE);
8006 TYPE_PRECISION (type) = precision;
8007 fixup_signed_type (type);
8008
8009 if (precision <= MAX_INT_CACHED_PREC)
8010 nonstandard_boolean_type_cache[precision] = type;
8011
8012 return type;
8013 }
8014
8015 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8016 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8017 is true, reuse such a type that has already been constructed. */
8018
8019 static tree
8020 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8021 {
8022 tree itype = make_node (INTEGER_TYPE);
8023
8024 TREE_TYPE (itype) = type;
8025
8026 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8027 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8028
8029 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8030 SET_TYPE_MODE (itype, TYPE_MODE (type));
8031 TYPE_SIZE (itype) = TYPE_SIZE (type);
8032 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8033 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8034 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8035 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8036
8037 if (!shared)
8038 return itype;
8039
8040 if ((TYPE_MIN_VALUE (itype)
8041 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8042 || (TYPE_MAX_VALUE (itype)
8043 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8044 {
8045 /* Since we cannot reliably merge this type, we need to compare it using
8046 structural equality checks. */
8047 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8048 return itype;
8049 }
8050
8051 hashval_t hash = type_hash_canon_hash (itype);
8052 itype = type_hash_canon (hash, itype);
8053
8054 return itype;
8055 }
8056
8057 /* Wrapper around build_range_type_1 with SHARED set to true. */
8058
8059 tree
8060 build_range_type (tree type, tree lowval, tree highval)
8061 {
8062 return build_range_type_1 (type, lowval, highval, true);
8063 }
8064
8065 /* Wrapper around build_range_type_1 with SHARED set to false. */
8066
8067 tree
8068 build_nonshared_range_type (tree type, tree lowval, tree highval)
8069 {
8070 return build_range_type_1 (type, lowval, highval, false);
8071 }
8072
8073 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8074 MAXVAL should be the maximum value in the domain
8075 (one less than the length of the array).
8076
8077 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8078 We don't enforce this limit, that is up to caller (e.g. language front end).
8079 The limit exists because the result is a signed type and we don't handle
8080 sizes that use more than one HOST_WIDE_INT. */
8081
8082 tree
8083 build_index_type (tree maxval)
8084 {
8085 return build_range_type (sizetype, size_zero_node, maxval);
8086 }
8087
8088 /* Return true if the debug information for TYPE, a subtype, should be emitted
8089 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8090 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8091 debug info and doesn't reflect the source code. */
8092
8093 bool
8094 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8095 {
8096 tree base_type = TREE_TYPE (type), low, high;
8097
8098 /* Subrange types have a base type which is an integral type. */
8099 if (!INTEGRAL_TYPE_P (base_type))
8100 return false;
8101
8102 /* Get the real bounds of the subtype. */
8103 if (lang_hooks.types.get_subrange_bounds)
8104 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8105 else
8106 {
8107 low = TYPE_MIN_VALUE (type);
8108 high = TYPE_MAX_VALUE (type);
8109 }
8110
8111 /* If the type and its base type have the same representation and the same
8112 name, then the type is not a subrange but a copy of the base type. */
8113 if ((TREE_CODE (base_type) == INTEGER_TYPE
8114 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8115 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8116 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8117 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8118 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8119 return false;
8120
8121 if (lowval)
8122 *lowval = low;
8123 if (highval)
8124 *highval = high;
8125 return true;
8126 }
8127
8128 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8129 and number of elements specified by the range of values of INDEX_TYPE.
8130 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8131 If SHARED is true, reuse such a type that has already been constructed.
8132 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
8133
8134 static tree
8135 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8136 bool shared, bool set_canonical)
8137 {
8138 tree t;
8139
8140 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8141 {
8142 error ("arrays of functions are not meaningful");
8143 elt_type = integer_type_node;
8144 }
8145
8146 t = make_node (ARRAY_TYPE);
8147 TREE_TYPE (t) = elt_type;
8148 TYPE_DOMAIN (t) = index_type;
8149 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8150 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8151 layout_type (t);
8152
8153 if (shared)
8154 {
8155 hashval_t hash = type_hash_canon_hash (t);
8156 t = type_hash_canon (hash, t);
8157 }
8158
8159 if (TYPE_CANONICAL (t) == t && set_canonical)
8160 {
8161 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8162 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8163 || in_lto_p)
8164 SET_TYPE_STRUCTURAL_EQUALITY (t);
8165 else if (TYPE_CANONICAL (elt_type) != elt_type
8166 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8167 TYPE_CANONICAL (t)
8168 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8169 index_type
8170 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8171 typeless_storage, shared, set_canonical);
8172 }
8173
8174 return t;
8175 }
8176
8177 /* Wrapper around build_array_type_1 with SHARED set to true. */
8178
8179 tree
8180 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8181 {
8182 return
8183 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
8184 }
8185
8186 /* Wrapper around build_array_type_1 with SHARED set to false. */
8187
8188 tree
8189 build_nonshared_array_type (tree elt_type, tree index_type)
8190 {
8191 return build_array_type_1 (elt_type, index_type, false, false, true);
8192 }
8193
8194 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8195 sizetype. */
8196
8197 tree
8198 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8199 {
8200 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8201 }
8202
8203 /* Recursively examines the array elements of TYPE, until a non-array
8204 element type is found. */
8205
8206 tree
8207 strip_array_types (tree type)
8208 {
8209 while (TREE_CODE (type) == ARRAY_TYPE)
8210 type = TREE_TYPE (type);
8211
8212 return type;
8213 }
8214
8215 /* Computes the canonical argument types from the argument type list
8216 ARGTYPES.
8217
8218 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8219 on entry to this function, or if any of the ARGTYPES are
8220 structural.
8221
8222 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8223 true on entry to this function, or if any of the ARGTYPES are
8224 non-canonical.
8225
8226 Returns a canonical argument list, which may be ARGTYPES when the
8227 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8228 true) or would not differ from ARGTYPES. */
8229
8230 static tree
8231 maybe_canonicalize_argtypes (tree argtypes,
8232 bool *any_structural_p,
8233 bool *any_noncanonical_p)
8234 {
8235 tree arg;
8236 bool any_noncanonical_argtypes_p = false;
8237
8238 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8239 {
8240 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8241 /* Fail gracefully by stating that the type is structural. */
8242 *any_structural_p = true;
8243 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8244 *any_structural_p = true;
8245 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8246 || TREE_PURPOSE (arg))
8247 /* If the argument has a default argument, we consider it
8248 non-canonical even though the type itself is canonical.
8249 That way, different variants of function and method types
8250 with default arguments will all point to the variant with
8251 no defaults as their canonical type. */
8252 any_noncanonical_argtypes_p = true;
8253 }
8254
8255 if (*any_structural_p)
8256 return argtypes;
8257
8258 if (any_noncanonical_argtypes_p)
8259 {
8260 /* Build the canonical list of argument types. */
8261 tree canon_argtypes = NULL_TREE;
8262 bool is_void = false;
8263
8264 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8265 {
8266 if (arg == void_list_node)
8267 is_void = true;
8268 else
8269 canon_argtypes = tree_cons (NULL_TREE,
8270 TYPE_CANONICAL (TREE_VALUE (arg)),
8271 canon_argtypes);
8272 }
8273
8274 canon_argtypes = nreverse (canon_argtypes);
8275 if (is_void)
8276 canon_argtypes = chainon (canon_argtypes, void_list_node);
8277
8278 /* There is a non-canonical type. */
8279 *any_noncanonical_p = true;
8280 return canon_argtypes;
8281 }
8282
8283 /* The canonical argument types are the same as ARGTYPES. */
8284 return argtypes;
8285 }
8286
8287 /* Construct, lay out and return
8288 the type of functions returning type VALUE_TYPE
8289 given arguments of types ARG_TYPES.
8290 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8291 are data type nodes for the arguments of the function.
8292 If such a type has already been constructed, reuse it. */
8293
8294 tree
8295 build_function_type (tree value_type, tree arg_types)
8296 {
8297 tree t;
8298 inchash::hash hstate;
8299 bool any_structural_p, any_noncanonical_p;
8300 tree canon_argtypes;
8301
8302 gcc_assert (arg_types != error_mark_node);
8303
8304 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8305 {
8306 error ("function return type cannot be function");
8307 value_type = integer_type_node;
8308 }
8309
8310 /* Make a node of the sort we want. */
8311 t = make_node (FUNCTION_TYPE);
8312 TREE_TYPE (t) = value_type;
8313 TYPE_ARG_TYPES (t) = arg_types;
8314
8315 /* If we already have such a type, use the old one. */
8316 hashval_t hash = type_hash_canon_hash (t);
8317 t = type_hash_canon (hash, t);
8318
8319 /* Set up the canonical type. */
8320 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8321 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8322 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8323 &any_structural_p,
8324 &any_noncanonical_p);
8325 if (any_structural_p)
8326 SET_TYPE_STRUCTURAL_EQUALITY (t);
8327 else if (any_noncanonical_p)
8328 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8329 canon_argtypes);
8330
8331 if (!COMPLETE_TYPE_P (t))
8332 layout_type (t);
8333 return t;
8334 }
8335
8336 /* Build a function type. The RETURN_TYPE is the type returned by the
8337 function. If VAARGS is set, no void_type_node is appended to the
8338 list. ARGP must be always be terminated be a NULL_TREE. */
8339
8340 static tree
8341 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8342 {
8343 tree t, args, last;
8344
8345 t = va_arg (argp, tree);
8346 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8347 args = tree_cons (NULL_TREE, t, args);
8348
8349 if (vaargs)
8350 {
8351 last = args;
8352 if (args != NULL_TREE)
8353 args = nreverse (args);
8354 gcc_assert (last != void_list_node);
8355 }
8356 else if (args == NULL_TREE)
8357 args = void_list_node;
8358 else
8359 {
8360 last = args;
8361 args = nreverse (args);
8362 TREE_CHAIN (last) = void_list_node;
8363 }
8364 args = build_function_type (return_type, args);
8365
8366 return args;
8367 }
8368
8369 /* Build a function type. The RETURN_TYPE is the type returned by the
8370 function. If additional arguments are provided, they are
8371 additional argument types. The list of argument types must always
8372 be terminated by NULL_TREE. */
8373
8374 tree
8375 build_function_type_list (tree return_type, ...)
8376 {
8377 tree args;
8378 va_list p;
8379
8380 va_start (p, return_type);
8381 args = build_function_type_list_1 (false, return_type, p);
8382 va_end (p);
8383 return args;
8384 }
8385
8386 /* Build a variable argument function type. The RETURN_TYPE is the
8387 type returned by the function. If additional arguments are provided,
8388 they are additional argument types. The list of argument types must
8389 always be terminated by NULL_TREE. */
8390
8391 tree
8392 build_varargs_function_type_list (tree return_type, ...)
8393 {
8394 tree args;
8395 va_list p;
8396
8397 va_start (p, return_type);
8398 args = build_function_type_list_1 (true, return_type, p);
8399 va_end (p);
8400
8401 return args;
8402 }
8403
8404 /* Build a function type. RETURN_TYPE is the type returned by the
8405 function; VAARGS indicates whether the function takes varargs. The
8406 function takes N named arguments, the types of which are provided in
8407 ARG_TYPES. */
8408
8409 static tree
8410 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8411 tree *arg_types)
8412 {
8413 int i;
8414 tree t = vaargs ? NULL_TREE : void_list_node;
8415
8416 for (i = n - 1; i >= 0; i--)
8417 t = tree_cons (NULL_TREE, arg_types[i], t);
8418
8419 return build_function_type (return_type, t);
8420 }
8421
8422 /* Build a function type. RETURN_TYPE is the type returned by the
8423 function. The function takes N named arguments, the types of which
8424 are provided in ARG_TYPES. */
8425
8426 tree
8427 build_function_type_array (tree return_type, int n, tree *arg_types)
8428 {
8429 return build_function_type_array_1 (false, return_type, n, arg_types);
8430 }
8431
8432 /* Build a variable argument function type. RETURN_TYPE is the type
8433 returned by the function. The function takes N named arguments, the
8434 types of which are provided in ARG_TYPES. */
8435
8436 tree
8437 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8438 {
8439 return build_function_type_array_1 (true, return_type, n, arg_types);
8440 }
8441
8442 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8443 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8444 for the method. An implicit additional parameter (of type
8445 pointer-to-BASETYPE) is added to the ARGTYPES. */
8446
8447 tree
8448 build_method_type_directly (tree basetype,
8449 tree rettype,
8450 tree argtypes)
8451 {
8452 tree t;
8453 tree ptype;
8454 bool any_structural_p, any_noncanonical_p;
8455 tree canon_argtypes;
8456
8457 /* Make a node of the sort we want. */
8458 t = make_node (METHOD_TYPE);
8459
8460 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8461 TREE_TYPE (t) = rettype;
8462 ptype = build_pointer_type (basetype);
8463
8464 /* The actual arglist for this function includes a "hidden" argument
8465 which is "this". Put it into the list of argument types. */
8466 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8467 TYPE_ARG_TYPES (t) = argtypes;
8468
8469 /* If we already have such a type, use the old one. */
8470 hashval_t hash = type_hash_canon_hash (t);
8471 t = type_hash_canon (hash, t);
8472
8473 /* Set up the canonical type. */
8474 any_structural_p
8475 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8476 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8477 any_noncanonical_p
8478 = (TYPE_CANONICAL (basetype) != basetype
8479 || TYPE_CANONICAL (rettype) != rettype);
8480 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8481 &any_structural_p,
8482 &any_noncanonical_p);
8483 if (any_structural_p)
8484 SET_TYPE_STRUCTURAL_EQUALITY (t);
8485 else if (any_noncanonical_p)
8486 TYPE_CANONICAL (t)
8487 = build_method_type_directly (TYPE_CANONICAL (basetype),
8488 TYPE_CANONICAL (rettype),
8489 canon_argtypes);
8490 if (!COMPLETE_TYPE_P (t))
8491 layout_type (t);
8492
8493 return t;
8494 }
8495
8496 /* Construct, lay out and return the type of methods belonging to class
8497 BASETYPE and whose arguments and values are described by TYPE.
8498 If that type exists already, reuse it.
8499 TYPE must be a FUNCTION_TYPE node. */
8500
8501 tree
8502 build_method_type (tree basetype, tree type)
8503 {
8504 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8505
8506 return build_method_type_directly (basetype,
8507 TREE_TYPE (type),
8508 TYPE_ARG_TYPES (type));
8509 }
8510
8511 /* Construct, lay out and return the type of offsets to a value
8512 of type TYPE, within an object of type BASETYPE.
8513 If a suitable offset type exists already, reuse it. */
8514
8515 tree
8516 build_offset_type (tree basetype, tree type)
8517 {
8518 tree t;
8519
8520 /* Make a node of the sort we want. */
8521 t = make_node (OFFSET_TYPE);
8522
8523 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8524 TREE_TYPE (t) = type;
8525
8526 /* If we already have such a type, use the old one. */
8527 hashval_t hash = type_hash_canon_hash (t);
8528 t = type_hash_canon (hash, t);
8529
8530 if (!COMPLETE_TYPE_P (t))
8531 layout_type (t);
8532
8533 if (TYPE_CANONICAL (t) == t)
8534 {
8535 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8536 || TYPE_STRUCTURAL_EQUALITY_P (type))
8537 SET_TYPE_STRUCTURAL_EQUALITY (t);
8538 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8539 || TYPE_CANONICAL (type) != type)
8540 TYPE_CANONICAL (t)
8541 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8542 TYPE_CANONICAL (type));
8543 }
8544
8545 return t;
8546 }
8547
8548 /* Create a complex type whose components are COMPONENT_TYPE.
8549
8550 If NAMED is true, the type is given a TYPE_NAME. We do not always
8551 do so because this creates a DECL node and thus make the DECL_UIDs
8552 dependent on the type canonicalization hashtable, which is GC-ed,
8553 so the DECL_UIDs would not be stable wrt garbage collection. */
8554
8555 tree
8556 build_complex_type (tree component_type, bool named)
8557 {
8558 gcc_assert (INTEGRAL_TYPE_P (component_type)
8559 || SCALAR_FLOAT_TYPE_P (component_type)
8560 || FIXED_POINT_TYPE_P (component_type));
8561
8562 /* Make a node of the sort we want. */
8563 tree probe = make_node (COMPLEX_TYPE);
8564
8565 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8566
8567 /* If we already have such a type, use the old one. */
8568 hashval_t hash = type_hash_canon_hash (probe);
8569 tree t = type_hash_canon (hash, probe);
8570
8571 if (t == probe)
8572 {
8573 /* We created a new type. The hash insertion will have laid
8574 out the type. We need to check the canonicalization and
8575 maybe set the name. */
8576 gcc_checking_assert (COMPLETE_TYPE_P (t)
8577 && !TYPE_NAME (t)
8578 && TYPE_CANONICAL (t) == t);
8579
8580 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8581 SET_TYPE_STRUCTURAL_EQUALITY (t);
8582 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8583 TYPE_CANONICAL (t)
8584 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8585
8586 /* We need to create a name, since complex is a fundamental type. */
8587 if (named)
8588 {
8589 const char *name = NULL;
8590
8591 if (TREE_TYPE (t) == char_type_node)
8592 name = "complex char";
8593 else if (TREE_TYPE (t) == signed_char_type_node)
8594 name = "complex signed char";
8595 else if (TREE_TYPE (t) == unsigned_char_type_node)
8596 name = "complex unsigned char";
8597 else if (TREE_TYPE (t) == short_integer_type_node)
8598 name = "complex short int";
8599 else if (TREE_TYPE (t) == short_unsigned_type_node)
8600 name = "complex short unsigned int";
8601 else if (TREE_TYPE (t) == integer_type_node)
8602 name = "complex int";
8603 else if (TREE_TYPE (t) == unsigned_type_node)
8604 name = "complex unsigned int";
8605 else if (TREE_TYPE (t) == long_integer_type_node)
8606 name = "complex long int";
8607 else if (TREE_TYPE (t) == long_unsigned_type_node)
8608 name = "complex long unsigned int";
8609 else if (TREE_TYPE (t) == long_long_integer_type_node)
8610 name = "complex long long int";
8611 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8612 name = "complex long long unsigned int";
8613
8614 if (name != NULL)
8615 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8616 get_identifier (name), t);
8617 }
8618 }
8619
8620 return build_qualified_type (t, TYPE_QUALS (component_type));
8621 }
8622
8623 /* If TYPE is a real or complex floating-point type and the target
8624 does not directly support arithmetic on TYPE then return the wider
8625 type to be used for arithmetic on TYPE. Otherwise, return
8626 NULL_TREE. */
8627
8628 tree
8629 excess_precision_type (tree type)
8630 {
8631 /* The target can give two different responses to the question of
8632 which excess precision mode it would like depending on whether we
8633 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8634
8635 enum excess_precision_type requested_type
8636 = (flag_excess_precision == EXCESS_PRECISION_FAST
8637 ? EXCESS_PRECISION_TYPE_FAST
8638 : EXCESS_PRECISION_TYPE_STANDARD);
8639
8640 enum flt_eval_method target_flt_eval_method
8641 = targetm.c.excess_precision (requested_type);
8642
8643 /* The target should not ask for unpredictable float evaluation (though
8644 it might advertise that implicitly the evaluation is unpredictable,
8645 but we don't care about that here, it will have been reported
8646 elsewhere). If it does ask for unpredictable evaluation, we have
8647 nothing to do here. */
8648 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8649
8650 /* Nothing to do. The target has asked for all types we know about
8651 to be computed with their native precision and range. */
8652 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8653 return NULL_TREE;
8654
8655 /* The target will promote this type in a target-dependent way, so excess
8656 precision ought to leave it alone. */
8657 if (targetm.promoted_type (type) != NULL_TREE)
8658 return NULL_TREE;
8659
8660 machine_mode float16_type_mode = (float16_type_node
8661 ? TYPE_MODE (float16_type_node)
8662 : VOIDmode);
8663 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8664 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8665
8666 switch (TREE_CODE (type))
8667 {
8668 case REAL_TYPE:
8669 {
8670 machine_mode type_mode = TYPE_MODE (type);
8671 switch (target_flt_eval_method)
8672 {
8673 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8674 if (type_mode == float16_type_mode)
8675 return float_type_node;
8676 break;
8677 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8678 if (type_mode == float16_type_mode
8679 || type_mode == float_type_mode)
8680 return double_type_node;
8681 break;
8682 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8683 if (type_mode == float16_type_mode
8684 || type_mode == float_type_mode
8685 || type_mode == double_type_mode)
8686 return long_double_type_node;
8687 break;
8688 default:
8689 gcc_unreachable ();
8690 }
8691 break;
8692 }
8693 case COMPLEX_TYPE:
8694 {
8695 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8696 return NULL_TREE;
8697 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8698 switch (target_flt_eval_method)
8699 {
8700 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8701 if (type_mode == float16_type_mode)
8702 return complex_float_type_node;
8703 break;
8704 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8705 if (type_mode == float16_type_mode
8706 || type_mode == float_type_mode)
8707 return complex_double_type_node;
8708 break;
8709 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8710 if (type_mode == float16_type_mode
8711 || type_mode == float_type_mode
8712 || type_mode == double_type_mode)
8713 return complex_long_double_type_node;
8714 break;
8715 default:
8716 gcc_unreachable ();
8717 }
8718 break;
8719 }
8720 default:
8721 break;
8722 }
8723
8724 return NULL_TREE;
8725 }
8726 \f
8727 /* Return OP, stripped of any conversions to wider types as much as is safe.
8728 Converting the value back to OP's type makes a value equivalent to OP.
8729
8730 If FOR_TYPE is nonzero, we return a value which, if converted to
8731 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8732
8733 OP must have integer, real or enumeral type. Pointers are not allowed!
8734
8735 There are some cases where the obvious value we could return
8736 would regenerate to OP if converted to OP's type,
8737 but would not extend like OP to wider types.
8738 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8739 For example, if OP is (unsigned short)(signed char)-1,
8740 we avoid returning (signed char)-1 if FOR_TYPE is int,
8741 even though extending that to an unsigned short would regenerate OP,
8742 since the result of extending (signed char)-1 to (int)
8743 is different from (int) OP. */
8744
8745 tree
8746 get_unwidened (tree op, tree for_type)
8747 {
8748 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8749 tree type = TREE_TYPE (op);
8750 unsigned final_prec
8751 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8752 int uns
8753 = (for_type != 0 && for_type != type
8754 && final_prec > TYPE_PRECISION (type)
8755 && TYPE_UNSIGNED (type));
8756 tree win = op;
8757
8758 while (CONVERT_EXPR_P (op))
8759 {
8760 int bitschange;
8761
8762 /* TYPE_PRECISION on vector types has different meaning
8763 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8764 so avoid them here. */
8765 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8766 break;
8767
8768 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8769 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8770
8771 /* Truncations are many-one so cannot be removed.
8772 Unless we are later going to truncate down even farther. */
8773 if (bitschange < 0
8774 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8775 break;
8776
8777 /* See what's inside this conversion. If we decide to strip it,
8778 we will set WIN. */
8779 op = TREE_OPERAND (op, 0);
8780
8781 /* If we have not stripped any zero-extensions (uns is 0),
8782 we can strip any kind of extension.
8783 If we have previously stripped a zero-extension,
8784 only zero-extensions can safely be stripped.
8785 Any extension can be stripped if the bits it would produce
8786 are all going to be discarded later by truncating to FOR_TYPE. */
8787
8788 if (bitschange > 0)
8789 {
8790 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8791 win = op;
8792 /* TYPE_UNSIGNED says whether this is a zero-extension.
8793 Let's avoid computing it if it does not affect WIN
8794 and if UNS will not be needed again. */
8795 if ((uns
8796 || CONVERT_EXPR_P (op))
8797 && TYPE_UNSIGNED (TREE_TYPE (op)))
8798 {
8799 uns = 1;
8800 win = op;
8801 }
8802 }
8803 }
8804
8805 /* If we finally reach a constant see if it fits in sth smaller and
8806 in that case convert it. */
8807 if (TREE_CODE (win) == INTEGER_CST)
8808 {
8809 tree wtype = TREE_TYPE (win);
8810 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8811 if (for_type)
8812 prec = MAX (prec, final_prec);
8813 if (prec < TYPE_PRECISION (wtype))
8814 {
8815 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8816 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8817 win = fold_convert (t, win);
8818 }
8819 }
8820
8821 return win;
8822 }
8823 \f
8824 /* Return OP or a simpler expression for a narrower value
8825 which can be sign-extended or zero-extended to give back OP.
8826 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8827 or 0 if the value should be sign-extended. */
8828
8829 tree
8830 get_narrower (tree op, int *unsignedp_ptr)
8831 {
8832 int uns = 0;
8833 int first = 1;
8834 tree win = op;
8835 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8836
8837 if (TREE_CODE (op) == COMPOUND_EXPR)
8838 {
8839 do
8840 op = TREE_OPERAND (op, 1);
8841 while (TREE_CODE (op) == COMPOUND_EXPR);
8842 tree ret = get_narrower (op, unsignedp_ptr);
8843 if (ret == op)
8844 return win;
8845 auto_vec <tree, 16> v;
8846 unsigned int i;
8847 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8848 op = TREE_OPERAND (op, 1))
8849 v.safe_push (op);
8850 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8851 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8852 TREE_TYPE (win), TREE_OPERAND (op, 0),
8853 ret);
8854 return ret;
8855 }
8856 while (TREE_CODE (op) == NOP_EXPR)
8857 {
8858 int bitschange
8859 = (TYPE_PRECISION (TREE_TYPE (op))
8860 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8861
8862 /* Truncations are many-one so cannot be removed. */
8863 if (bitschange < 0)
8864 break;
8865
8866 /* See what's inside this conversion. If we decide to strip it,
8867 we will set WIN. */
8868
8869 if (bitschange > 0)
8870 {
8871 op = TREE_OPERAND (op, 0);
8872 /* An extension: the outermost one can be stripped,
8873 but remember whether it is zero or sign extension. */
8874 if (first)
8875 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8876 /* Otherwise, if a sign extension has been stripped,
8877 only sign extensions can now be stripped;
8878 if a zero extension has been stripped, only zero-extensions. */
8879 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8880 break;
8881 first = 0;
8882 }
8883 else /* bitschange == 0 */
8884 {
8885 /* A change in nominal type can always be stripped, but we must
8886 preserve the unsignedness. */
8887 if (first)
8888 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8889 first = 0;
8890 op = TREE_OPERAND (op, 0);
8891 /* Keep trying to narrow, but don't assign op to win if it
8892 would turn an integral type into something else. */
8893 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8894 continue;
8895 }
8896
8897 win = op;
8898 }
8899
8900 if (TREE_CODE (op) == COMPONENT_REF
8901 /* Since type_for_size always gives an integer type. */
8902 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8903 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8904 /* Ensure field is laid out already. */
8905 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8906 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8907 {
8908 unsigned HOST_WIDE_INT innerprec
8909 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8910 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8911 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8912 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8913
8914 /* We can get this structure field in a narrower type that fits it,
8915 but the resulting extension to its nominal type (a fullword type)
8916 must satisfy the same conditions as for other extensions.
8917
8918 Do this only for fields that are aligned (not bit-fields),
8919 because when bit-field insns will be used there is no
8920 advantage in doing this. */
8921
8922 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8923 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8924 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8925 && type != 0)
8926 {
8927 if (first)
8928 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8929 win = fold_convert (type, op);
8930 }
8931 }
8932
8933 *unsignedp_ptr = uns;
8934 return win;
8935 }
8936 \f
8937 /* Return true if integer constant C has a value that is permissible
8938 for TYPE, an integral type. */
8939
8940 bool
8941 int_fits_type_p (const_tree c, const_tree type)
8942 {
8943 tree type_low_bound, type_high_bound;
8944 bool ok_for_low_bound, ok_for_high_bound;
8945 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8946
8947 /* Non-standard boolean types can have arbitrary precision but various
8948 transformations assume that they can only take values 0 and +/-1. */
8949 if (TREE_CODE (type) == BOOLEAN_TYPE)
8950 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8951
8952 retry:
8953 type_low_bound = TYPE_MIN_VALUE (type);
8954 type_high_bound = TYPE_MAX_VALUE (type);
8955
8956 /* If at least one bound of the type is a constant integer, we can check
8957 ourselves and maybe make a decision. If no such decision is possible, but
8958 this type is a subtype, try checking against that. Otherwise, use
8959 fits_to_tree_p, which checks against the precision.
8960
8961 Compute the status for each possibly constant bound, and return if we see
8962 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8963 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8964 for "constant known to fit". */
8965
8966 /* Check if c >= type_low_bound. */
8967 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8968 {
8969 if (tree_int_cst_lt (c, type_low_bound))
8970 return false;
8971 ok_for_low_bound = true;
8972 }
8973 else
8974 ok_for_low_bound = false;
8975
8976 /* Check if c <= type_high_bound. */
8977 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8978 {
8979 if (tree_int_cst_lt (type_high_bound, c))
8980 return false;
8981 ok_for_high_bound = true;
8982 }
8983 else
8984 ok_for_high_bound = false;
8985
8986 /* If the constant fits both bounds, the result is known. */
8987 if (ok_for_low_bound && ok_for_high_bound)
8988 return true;
8989
8990 /* Perform some generic filtering which may allow making a decision
8991 even if the bounds are not constant. First, negative integers
8992 never fit in unsigned types, */
8993 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8994 return false;
8995
8996 /* Second, narrower types always fit in wider ones. */
8997 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8998 return true;
8999
9000 /* Third, unsigned integers with top bit set never fit signed types. */
9001 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9002 {
9003 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9004 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9005 {
9006 /* When a tree_cst is converted to a wide-int, the precision
9007 is taken from the type. However, if the precision of the
9008 mode underneath the type is smaller than that, it is
9009 possible that the value will not fit. The test below
9010 fails if any bit is set between the sign bit of the
9011 underlying mode and the top bit of the type. */
9012 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9013 return false;
9014 }
9015 else if (wi::neg_p (wi::to_wide (c)))
9016 return false;
9017 }
9018
9019 /* If we haven't been able to decide at this point, there nothing more we
9020 can check ourselves here. Look at the base type if we have one and it
9021 has the same precision. */
9022 if (TREE_CODE (type) == INTEGER_TYPE
9023 && TREE_TYPE (type) != 0
9024 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9025 {
9026 type = TREE_TYPE (type);
9027 goto retry;
9028 }
9029
9030 /* Or to fits_to_tree_p, if nothing else. */
9031 return wi::fits_to_tree_p (wi::to_wide (c), type);
9032 }
9033
9034 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9035 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9036 represented (assuming two's-complement arithmetic) within the bit
9037 precision of the type are returned instead. */
9038
9039 void
9040 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9041 {
9042 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9043 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9044 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9045 else
9046 {
9047 if (TYPE_UNSIGNED (type))
9048 mpz_set_ui (min, 0);
9049 else
9050 {
9051 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9052 wi::to_mpz (mn, min, SIGNED);
9053 }
9054 }
9055
9056 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9057 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9058 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9059 else
9060 {
9061 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9062 wi::to_mpz (mn, max, TYPE_SIGN (type));
9063 }
9064 }
9065
9066 /* Return true if VAR is an automatic variable. */
9067
9068 bool
9069 auto_var_p (const_tree var)
9070 {
9071 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9072 || TREE_CODE (var) == PARM_DECL)
9073 && ! TREE_STATIC (var))
9074 || TREE_CODE (var) == RESULT_DECL);
9075 }
9076
9077 /* Return true if VAR is an automatic variable defined in function FN. */
9078
9079 bool
9080 auto_var_in_fn_p (const_tree var, const_tree fn)
9081 {
9082 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9083 && (auto_var_p (var)
9084 || TREE_CODE (var) == LABEL_DECL));
9085 }
9086
9087 /* Subprogram of following function. Called by walk_tree.
9088
9089 Return *TP if it is an automatic variable or parameter of the
9090 function passed in as DATA. */
9091
9092 static tree
9093 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9094 {
9095 tree fn = (tree) data;
9096
9097 if (TYPE_P (*tp))
9098 *walk_subtrees = 0;
9099
9100 else if (DECL_P (*tp)
9101 && auto_var_in_fn_p (*tp, fn))
9102 return *tp;
9103
9104 return NULL_TREE;
9105 }
9106
9107 /* Returns true if T is, contains, or refers to a type with variable
9108 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9109 arguments, but not the return type. If FN is nonzero, only return
9110 true if a modifier of the type or position of FN is a variable or
9111 parameter inside FN.
9112
9113 This concept is more general than that of C99 'variably modified types':
9114 in C99, a struct type is never variably modified because a VLA may not
9115 appear as a structure member. However, in GNU C code like:
9116
9117 struct S { int i[f()]; };
9118
9119 is valid, and other languages may define similar constructs. */
9120
9121 bool
9122 variably_modified_type_p (tree type, tree fn)
9123 {
9124 tree t;
9125
9126 /* Test if T is either variable (if FN is zero) or an expression containing
9127 a variable in FN. If TYPE isn't gimplified, return true also if
9128 gimplify_one_sizepos would gimplify the expression into a local
9129 variable. */
9130 #define RETURN_TRUE_IF_VAR(T) \
9131 do { tree _t = (T); \
9132 if (_t != NULL_TREE \
9133 && _t != error_mark_node \
9134 && !CONSTANT_CLASS_P (_t) \
9135 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9136 && (!fn \
9137 || (!TYPE_SIZES_GIMPLIFIED (type) \
9138 && (TREE_CODE (_t) != VAR_DECL \
9139 && !CONTAINS_PLACEHOLDER_P (_t))) \
9140 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9141 return true; } while (0)
9142
9143 if (type == error_mark_node)
9144 return false;
9145
9146 /* If TYPE itself has variable size, it is variably modified. */
9147 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9148 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9149
9150 switch (TREE_CODE (type))
9151 {
9152 case POINTER_TYPE:
9153 case REFERENCE_TYPE:
9154 case VECTOR_TYPE:
9155 /* Ada can have pointer types refering to themselves indirectly. */
9156 if (TREE_VISITED (type))
9157 return false;
9158 TREE_VISITED (type) = true;
9159 if (variably_modified_type_p (TREE_TYPE (type), fn))
9160 {
9161 TREE_VISITED (type) = false;
9162 return true;
9163 }
9164 TREE_VISITED (type) = false;
9165 break;
9166
9167 case FUNCTION_TYPE:
9168 case METHOD_TYPE:
9169 /* If TYPE is a function type, it is variably modified if the
9170 return type is variably modified. */
9171 if (variably_modified_type_p (TREE_TYPE (type), fn))
9172 return true;
9173 break;
9174
9175 case INTEGER_TYPE:
9176 case REAL_TYPE:
9177 case FIXED_POINT_TYPE:
9178 case ENUMERAL_TYPE:
9179 case BOOLEAN_TYPE:
9180 /* Scalar types are variably modified if their end points
9181 aren't constant. */
9182 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9183 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9184 break;
9185
9186 case RECORD_TYPE:
9187 case UNION_TYPE:
9188 case QUAL_UNION_TYPE:
9189 /* We can't see if any of the fields are variably-modified by the
9190 definition we normally use, since that would produce infinite
9191 recursion via pointers. */
9192 /* This is variably modified if some field's type is. */
9193 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9194 if (TREE_CODE (t) == FIELD_DECL)
9195 {
9196 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9197 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9198 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9199
9200 /* If the type is a qualified union, then the DECL_QUALIFIER
9201 of fields can also be an expression containing a variable. */
9202 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9203 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9204
9205 /* If the field is a qualified union, then it's only a container
9206 for what's inside so we look into it. That's necessary in LTO
9207 mode because the sizes of the field tested above have been set
9208 to PLACEHOLDER_EXPRs by free_lang_data. */
9209 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
9210 && variably_modified_type_p (TREE_TYPE (t), fn))
9211 return true;
9212 }
9213 break;
9214
9215 case ARRAY_TYPE:
9216 /* Do not call ourselves to avoid infinite recursion. This is
9217 variably modified if the element type is. */
9218 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9219 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9220 break;
9221
9222 default:
9223 break;
9224 }
9225
9226 /* The current language may have other cases to check, but in general,
9227 all other types are not variably modified. */
9228 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9229
9230 #undef RETURN_TRUE_IF_VAR
9231 }
9232
9233 /* Given a DECL or TYPE, return the scope in which it was declared, or
9234 NULL_TREE if there is no containing scope. */
9235
9236 tree
9237 get_containing_scope (const_tree t)
9238 {
9239 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9240 }
9241
9242 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9243
9244 const_tree
9245 get_ultimate_context (const_tree decl)
9246 {
9247 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9248 {
9249 if (TREE_CODE (decl) == BLOCK)
9250 decl = BLOCK_SUPERCONTEXT (decl);
9251 else
9252 decl = get_containing_scope (decl);
9253 }
9254 return decl;
9255 }
9256
9257 /* Return the innermost context enclosing DECL that is
9258 a FUNCTION_DECL, or zero if none. */
9259
9260 tree
9261 decl_function_context (const_tree decl)
9262 {
9263 tree context;
9264
9265 if (TREE_CODE (decl) == ERROR_MARK)
9266 return 0;
9267
9268 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9269 where we look up the function at runtime. Such functions always take
9270 a first argument of type 'pointer to real context'.
9271
9272 C++ should really be fixed to use DECL_CONTEXT for the real context,
9273 and use something else for the "virtual context". */
9274 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9275 context
9276 = TYPE_MAIN_VARIANT
9277 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9278 else
9279 context = DECL_CONTEXT (decl);
9280
9281 while (context && TREE_CODE (context) != FUNCTION_DECL)
9282 {
9283 if (TREE_CODE (context) == BLOCK)
9284 context = BLOCK_SUPERCONTEXT (context);
9285 else
9286 context = get_containing_scope (context);
9287 }
9288
9289 return context;
9290 }
9291
9292 /* Return the innermost context enclosing DECL that is
9293 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9294 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9295
9296 tree
9297 decl_type_context (const_tree decl)
9298 {
9299 tree context = DECL_CONTEXT (decl);
9300
9301 while (context)
9302 switch (TREE_CODE (context))
9303 {
9304 case NAMESPACE_DECL:
9305 case TRANSLATION_UNIT_DECL:
9306 return NULL_TREE;
9307
9308 case RECORD_TYPE:
9309 case UNION_TYPE:
9310 case QUAL_UNION_TYPE:
9311 return context;
9312
9313 case TYPE_DECL:
9314 case FUNCTION_DECL:
9315 context = DECL_CONTEXT (context);
9316 break;
9317
9318 case BLOCK:
9319 context = BLOCK_SUPERCONTEXT (context);
9320 break;
9321
9322 default:
9323 gcc_unreachable ();
9324 }
9325
9326 return NULL_TREE;
9327 }
9328
9329 /* CALL is a CALL_EXPR. Return the declaration for the function
9330 called, or NULL_TREE if the called function cannot be
9331 determined. */
9332
9333 tree
9334 get_callee_fndecl (const_tree call)
9335 {
9336 tree addr;
9337
9338 if (call == error_mark_node)
9339 return error_mark_node;
9340
9341 /* It's invalid to call this function with anything but a
9342 CALL_EXPR. */
9343 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9344
9345 /* The first operand to the CALL is the address of the function
9346 called. */
9347 addr = CALL_EXPR_FN (call);
9348
9349 /* If there is no function, return early. */
9350 if (addr == NULL_TREE)
9351 return NULL_TREE;
9352
9353 STRIP_NOPS (addr);
9354
9355 /* If this is a readonly function pointer, extract its initial value. */
9356 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9357 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9358 && DECL_INITIAL (addr))
9359 addr = DECL_INITIAL (addr);
9360
9361 /* If the address is just `&f' for some function `f', then we know
9362 that `f' is being called. */
9363 if (TREE_CODE (addr) == ADDR_EXPR
9364 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9365 return TREE_OPERAND (addr, 0);
9366
9367 /* We couldn't figure out what was being called. */
9368 return NULL_TREE;
9369 }
9370
9371 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9372 return the associated function code, otherwise return CFN_LAST. */
9373
9374 combined_fn
9375 get_call_combined_fn (const_tree call)
9376 {
9377 /* It's invalid to call this function with anything but a CALL_EXPR. */
9378 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9379
9380 if (!CALL_EXPR_FN (call))
9381 return as_combined_fn (CALL_EXPR_IFN (call));
9382
9383 tree fndecl = get_callee_fndecl (call);
9384 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9385 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9386
9387 return CFN_LAST;
9388 }
9389
9390 /* Comparator of indices based on tree_node_counts. */
9391
9392 static int
9393 tree_nodes_cmp (const void *p1, const void *p2)
9394 {
9395 const unsigned *n1 = (const unsigned *)p1;
9396 const unsigned *n2 = (const unsigned *)p2;
9397
9398 return tree_node_counts[*n1] - tree_node_counts[*n2];
9399 }
9400
9401 /* Comparator of indices based on tree_code_counts. */
9402
9403 static int
9404 tree_codes_cmp (const void *p1, const void *p2)
9405 {
9406 const unsigned *n1 = (const unsigned *)p1;
9407 const unsigned *n2 = (const unsigned *)p2;
9408
9409 return tree_code_counts[*n1] - tree_code_counts[*n2];
9410 }
9411
9412 #define TREE_MEM_USAGE_SPACES 40
9413
9414 /* Print debugging information about tree nodes generated during the compile,
9415 and any language-specific information. */
9416
9417 void
9418 dump_tree_statistics (void)
9419 {
9420 if (GATHER_STATISTICS)
9421 {
9422 uint64_t total_nodes, total_bytes;
9423 fprintf (stderr, "\nKind Nodes Bytes\n");
9424 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9425 total_nodes = total_bytes = 0;
9426
9427 {
9428 auto_vec<unsigned> indices (all_kinds);
9429 for (unsigned i = 0; i < all_kinds; i++)
9430 indices.quick_push (i);
9431 indices.qsort (tree_nodes_cmp);
9432
9433 for (unsigned i = 0; i < (int) all_kinds; i++)
9434 {
9435 unsigned j = indices[i];
9436 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9437 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
9438 SIZE_AMOUNT (tree_node_sizes[j]));
9439 total_nodes += tree_node_counts[j];
9440 total_bytes += tree_node_sizes[j];
9441 }
9442 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9443 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9444 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9445 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9446 }
9447
9448 {
9449 fprintf (stderr, "Code Nodes\n");
9450 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9451
9452 auto_vec<unsigned> indices (MAX_TREE_CODES);
9453 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9454 indices.quick_push (i);
9455 indices.qsort (tree_codes_cmp);
9456
9457 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9458 {
9459 unsigned j = indices[i];
9460 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9461 get_tree_code_name ((enum tree_code) j),
9462 SIZE_AMOUNT (tree_code_counts[j]));
9463 }
9464 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9465 fprintf (stderr, "\n");
9466 ssanames_print_statistics ();
9467 fprintf (stderr, "\n");
9468 phinodes_print_statistics ();
9469 fprintf (stderr, "\n");
9470 }
9471 }
9472 else
9473 fprintf (stderr, "(No per-node statistics)\n");
9474
9475 print_type_hash_statistics ();
9476 print_debug_expr_statistics ();
9477 print_value_expr_statistics ();
9478 lang_hooks.print_statistics ();
9479 }
9480 \f
9481 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9482
9483 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9484
9485 unsigned
9486 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9487 {
9488 /* This relies on the raw feedback's top 4 bits being zero. */
9489 #define FEEDBACK(X) ((X) * 0x04c11db7)
9490 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9491 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9492 static const unsigned syndromes[16] =
9493 {
9494 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9495 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9496 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9497 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9498 };
9499 #undef FEEDBACK
9500 #undef SYNDROME
9501
9502 value <<= (32 - bytes * 8);
9503 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9504 {
9505 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9506
9507 chksum = (chksum << 4) ^ feedback;
9508 }
9509
9510 return chksum;
9511 }
9512
9513 /* Generate a crc32 of a string. */
9514
9515 unsigned
9516 crc32_string (unsigned chksum, const char *string)
9517 {
9518 do
9519 chksum = crc32_byte (chksum, *string);
9520 while (*string++);
9521 return chksum;
9522 }
9523
9524 /* P is a string that will be used in a symbol. Mask out any characters
9525 that are not valid in that context. */
9526
9527 void
9528 clean_symbol_name (char *p)
9529 {
9530 for (; *p; p++)
9531 if (! (ISALNUM (*p)
9532 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9533 || *p == '$'
9534 #endif
9535 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9536 || *p == '.'
9537 #endif
9538 ))
9539 *p = '_';
9540 }
9541
9542 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9543
9544 /* Create a unique anonymous identifier. The identifier is still a
9545 valid assembly label. */
9546
9547 tree
9548 make_anon_name ()
9549 {
9550 const char *fmt =
9551 #if !defined (NO_DOT_IN_LABEL)
9552 "."
9553 #elif !defined (NO_DOLLAR_IN_LABEL)
9554 "$"
9555 #else
9556 "_"
9557 #endif
9558 "_anon_%d";
9559
9560 char buf[24];
9561 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9562 gcc_checking_assert (len < int (sizeof (buf)));
9563
9564 tree id = get_identifier_with_length (buf, len);
9565 IDENTIFIER_ANON_P (id) = true;
9566
9567 return id;
9568 }
9569
9570 /* Generate a name for a special-purpose function.
9571 The generated name may need to be unique across the whole link.
9572 Changes to this function may also require corresponding changes to
9573 xstrdup_mask_random.
9574 TYPE is some string to identify the purpose of this function to the
9575 linker or collect2; it must start with an uppercase letter,
9576 one of:
9577 I - for constructors
9578 D - for destructors
9579 N - for C++ anonymous namespaces
9580 F - for DWARF unwind frame information. */
9581
9582 tree
9583 get_file_function_name (const char *type)
9584 {
9585 char *buf;
9586 const char *p;
9587 char *q;
9588
9589 /* If we already have a name we know to be unique, just use that. */
9590 if (first_global_object_name)
9591 p = q = ASTRDUP (first_global_object_name);
9592 /* If the target is handling the constructors/destructors, they
9593 will be local to this file and the name is only necessary for
9594 debugging purposes.
9595 We also assign sub_I and sub_D sufixes to constructors called from
9596 the global static constructors. These are always local. */
9597 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9598 || (strncmp (type, "sub_", 4) == 0
9599 && (type[4] == 'I' || type[4] == 'D')))
9600 {
9601 const char *file = main_input_filename;
9602 if (! file)
9603 file = LOCATION_FILE (input_location);
9604 /* Just use the file's basename, because the full pathname
9605 might be quite long. */
9606 p = q = ASTRDUP (lbasename (file));
9607 }
9608 else
9609 {
9610 /* Otherwise, the name must be unique across the entire link.
9611 We don't have anything that we know to be unique to this translation
9612 unit, so use what we do have and throw in some randomness. */
9613 unsigned len;
9614 const char *name = weak_global_object_name;
9615 const char *file = main_input_filename;
9616
9617 if (! name)
9618 name = "";
9619 if (! file)
9620 file = LOCATION_FILE (input_location);
9621
9622 len = strlen (file);
9623 q = (char *) alloca (9 + 19 + len + 1);
9624 memcpy (q, file, len + 1);
9625
9626 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9627 crc32_string (0, name), get_random_seed (false));
9628
9629 p = q;
9630 }
9631
9632 clean_symbol_name (q);
9633 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9634 + strlen (type));
9635
9636 /* Set up the name of the file-level functions we may need.
9637 Use a global object (which is already required to be unique over
9638 the program) rather than the file name (which imposes extra
9639 constraints). */
9640 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9641
9642 return get_identifier (buf);
9643 }
9644 \f
9645 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9646
9647 /* Complain that the tree code of NODE does not match the expected 0
9648 terminated list of trailing codes. The trailing code list can be
9649 empty, for a more vague error message. FILE, LINE, and FUNCTION
9650 are of the caller. */
9651
9652 void
9653 tree_check_failed (const_tree node, const char *file,
9654 int line, const char *function, ...)
9655 {
9656 va_list args;
9657 const char *buffer;
9658 unsigned length = 0;
9659 enum tree_code code;
9660
9661 va_start (args, function);
9662 while ((code = (enum tree_code) va_arg (args, int)))
9663 length += 4 + strlen (get_tree_code_name (code));
9664 va_end (args);
9665 if (length)
9666 {
9667 char *tmp;
9668 va_start (args, function);
9669 length += strlen ("expected ");
9670 buffer = tmp = (char *) alloca (length);
9671 length = 0;
9672 while ((code = (enum tree_code) va_arg (args, int)))
9673 {
9674 const char *prefix = length ? " or " : "expected ";
9675
9676 strcpy (tmp + length, prefix);
9677 length += strlen (prefix);
9678 strcpy (tmp + length, get_tree_code_name (code));
9679 length += strlen (get_tree_code_name (code));
9680 }
9681 va_end (args);
9682 }
9683 else
9684 buffer = "unexpected node";
9685
9686 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9687 buffer, get_tree_code_name (TREE_CODE (node)),
9688 function, trim_filename (file), line);
9689 }
9690
9691 /* Complain that the tree code of NODE does match the expected 0
9692 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9693 the caller. */
9694
9695 void
9696 tree_not_check_failed (const_tree node, const char *file,
9697 int line, const char *function, ...)
9698 {
9699 va_list args;
9700 char *buffer;
9701 unsigned length = 0;
9702 enum tree_code code;
9703
9704 va_start (args, function);
9705 while ((code = (enum tree_code) va_arg (args, int)))
9706 length += 4 + strlen (get_tree_code_name (code));
9707 va_end (args);
9708 va_start (args, function);
9709 buffer = (char *) alloca (length);
9710 length = 0;
9711 while ((code = (enum tree_code) va_arg (args, int)))
9712 {
9713 if (length)
9714 {
9715 strcpy (buffer + length, " or ");
9716 length += 4;
9717 }
9718 strcpy (buffer + length, get_tree_code_name (code));
9719 length += strlen (get_tree_code_name (code));
9720 }
9721 va_end (args);
9722
9723 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9724 buffer, get_tree_code_name (TREE_CODE (node)),
9725 function, trim_filename (file), line);
9726 }
9727
9728 /* Similar to tree_check_failed, except that we check for a class of tree
9729 code, given in CL. */
9730
9731 void
9732 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9733 const char *file, int line, const char *function)
9734 {
9735 internal_error
9736 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9737 TREE_CODE_CLASS_STRING (cl),
9738 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9739 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9740 }
9741
9742 /* Similar to tree_check_failed, except that instead of specifying a
9743 dozen codes, use the knowledge that they're all sequential. */
9744
9745 void
9746 tree_range_check_failed (const_tree node, const char *file, int line,
9747 const char *function, enum tree_code c1,
9748 enum tree_code c2)
9749 {
9750 char *buffer;
9751 unsigned length = 0;
9752 unsigned int c;
9753
9754 for (c = c1; c <= c2; ++c)
9755 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9756
9757 length += strlen ("expected ");
9758 buffer = (char *) alloca (length);
9759 length = 0;
9760
9761 for (c = c1; c <= c2; ++c)
9762 {
9763 const char *prefix = length ? " or " : "expected ";
9764
9765 strcpy (buffer + length, prefix);
9766 length += strlen (prefix);
9767 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9768 length += strlen (get_tree_code_name ((enum tree_code) c));
9769 }
9770
9771 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9772 buffer, get_tree_code_name (TREE_CODE (node)),
9773 function, trim_filename (file), line);
9774 }
9775
9776
9777 /* Similar to tree_check_failed, except that we check that a tree does
9778 not have the specified code, given in CL. */
9779
9780 void
9781 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9782 const char *file, int line, const char *function)
9783 {
9784 internal_error
9785 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9786 TREE_CODE_CLASS_STRING (cl),
9787 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9788 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9789 }
9790
9791
9792 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9793
9794 void
9795 omp_clause_check_failed (const_tree node, const char *file, int line,
9796 const char *function, enum omp_clause_code code)
9797 {
9798 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9799 "in %s, at %s:%d",
9800 omp_clause_code_name[code],
9801 get_tree_code_name (TREE_CODE (node)),
9802 function, trim_filename (file), line);
9803 }
9804
9805
9806 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9807
9808 void
9809 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9810 const char *function, enum omp_clause_code c1,
9811 enum omp_clause_code c2)
9812 {
9813 char *buffer;
9814 unsigned length = 0;
9815 unsigned int c;
9816
9817 for (c = c1; c <= c2; ++c)
9818 length += 4 + strlen (omp_clause_code_name[c]);
9819
9820 length += strlen ("expected ");
9821 buffer = (char *) alloca (length);
9822 length = 0;
9823
9824 for (c = c1; c <= c2; ++c)
9825 {
9826 const char *prefix = length ? " or " : "expected ";
9827
9828 strcpy (buffer + length, prefix);
9829 length += strlen (prefix);
9830 strcpy (buffer + length, omp_clause_code_name[c]);
9831 length += strlen (omp_clause_code_name[c]);
9832 }
9833
9834 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9835 buffer, omp_clause_code_name[TREE_CODE (node)],
9836 function, trim_filename (file), line);
9837 }
9838
9839
9840 #undef DEFTREESTRUCT
9841 #define DEFTREESTRUCT(VAL, NAME) NAME,
9842
9843 static const char *ts_enum_names[] = {
9844 #include "treestruct.def"
9845 };
9846 #undef DEFTREESTRUCT
9847
9848 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9849
9850 /* Similar to tree_class_check_failed, except that we check for
9851 whether CODE contains the tree structure identified by EN. */
9852
9853 void
9854 tree_contains_struct_check_failed (const_tree node,
9855 const enum tree_node_structure_enum en,
9856 const char *file, int line,
9857 const char *function)
9858 {
9859 internal_error
9860 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9861 TS_ENUM_NAME (en),
9862 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9863 }
9864
9865
9866 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9867 (dynamically sized) vector. */
9868
9869 void
9870 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9871 const char *function)
9872 {
9873 internal_error
9874 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9875 "at %s:%d",
9876 idx + 1, len, function, trim_filename (file), line);
9877 }
9878
9879 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9880 (dynamically sized) vector. */
9881
9882 void
9883 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9884 const char *function)
9885 {
9886 internal_error
9887 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9888 idx + 1, len, function, trim_filename (file), line);
9889 }
9890
9891 /* Similar to above, except that the check is for the bounds of the operand
9892 vector of an expression node EXP. */
9893
9894 void
9895 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9896 int line, const char *function)
9897 {
9898 enum tree_code code = TREE_CODE (exp);
9899 internal_error
9900 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9901 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9902 function, trim_filename (file), line);
9903 }
9904
9905 /* Similar to above, except that the check is for the number of
9906 operands of an OMP_CLAUSE node. */
9907
9908 void
9909 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9910 int line, const char *function)
9911 {
9912 internal_error
9913 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9914 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9915 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9916 trim_filename (file), line);
9917 }
9918 #endif /* ENABLE_TREE_CHECKING */
9919 \f
9920 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9921 and mapped to the machine mode MODE. Initialize its fields and build
9922 the information necessary for debugging output. */
9923
9924 static tree
9925 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9926 {
9927 tree t;
9928 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9929
9930 t = make_node (VECTOR_TYPE);
9931 TREE_TYPE (t) = mv_innertype;
9932 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9933 SET_TYPE_MODE (t, mode);
9934
9935 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9936 SET_TYPE_STRUCTURAL_EQUALITY (t);
9937 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9938 || mode != VOIDmode)
9939 && !VECTOR_BOOLEAN_TYPE_P (t))
9940 TYPE_CANONICAL (t)
9941 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9942
9943 layout_type (t);
9944
9945 hashval_t hash = type_hash_canon_hash (t);
9946 t = type_hash_canon (hash, t);
9947
9948 /* We have built a main variant, based on the main variant of the
9949 inner type. Use it to build the variant we return. */
9950 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9951 && TREE_TYPE (t) != innertype)
9952 return build_type_attribute_qual_variant (t,
9953 TYPE_ATTRIBUTES (innertype),
9954 TYPE_QUALS (innertype));
9955
9956 return t;
9957 }
9958
9959 static tree
9960 make_or_reuse_type (unsigned size, int unsignedp)
9961 {
9962 int i;
9963
9964 if (size == INT_TYPE_SIZE)
9965 return unsignedp ? unsigned_type_node : integer_type_node;
9966 if (size == CHAR_TYPE_SIZE)
9967 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9968 if (size == SHORT_TYPE_SIZE)
9969 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9970 if (size == LONG_TYPE_SIZE)
9971 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9972 if (size == LONG_LONG_TYPE_SIZE)
9973 return (unsignedp ? long_long_unsigned_type_node
9974 : long_long_integer_type_node);
9975
9976 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9977 if (size == int_n_data[i].bitsize
9978 && int_n_enabled_p[i])
9979 return (unsignedp ? int_n_trees[i].unsigned_type
9980 : int_n_trees[i].signed_type);
9981
9982 if (unsignedp)
9983 return make_unsigned_type (size);
9984 else
9985 return make_signed_type (size);
9986 }
9987
9988 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9989
9990 static tree
9991 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9992 {
9993 if (satp)
9994 {
9995 if (size == SHORT_FRACT_TYPE_SIZE)
9996 return unsignedp ? sat_unsigned_short_fract_type_node
9997 : sat_short_fract_type_node;
9998 if (size == FRACT_TYPE_SIZE)
9999 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10000 if (size == LONG_FRACT_TYPE_SIZE)
10001 return unsignedp ? sat_unsigned_long_fract_type_node
10002 : sat_long_fract_type_node;
10003 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10004 return unsignedp ? sat_unsigned_long_long_fract_type_node
10005 : sat_long_long_fract_type_node;
10006 }
10007 else
10008 {
10009 if (size == SHORT_FRACT_TYPE_SIZE)
10010 return unsignedp ? unsigned_short_fract_type_node
10011 : short_fract_type_node;
10012 if (size == FRACT_TYPE_SIZE)
10013 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10014 if (size == LONG_FRACT_TYPE_SIZE)
10015 return unsignedp ? unsigned_long_fract_type_node
10016 : long_fract_type_node;
10017 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10018 return unsignedp ? unsigned_long_long_fract_type_node
10019 : long_long_fract_type_node;
10020 }
10021
10022 return make_fract_type (size, unsignedp, satp);
10023 }
10024
10025 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10026
10027 static tree
10028 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10029 {
10030 if (satp)
10031 {
10032 if (size == SHORT_ACCUM_TYPE_SIZE)
10033 return unsignedp ? sat_unsigned_short_accum_type_node
10034 : sat_short_accum_type_node;
10035 if (size == ACCUM_TYPE_SIZE)
10036 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10037 if (size == LONG_ACCUM_TYPE_SIZE)
10038 return unsignedp ? sat_unsigned_long_accum_type_node
10039 : sat_long_accum_type_node;
10040 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10041 return unsignedp ? sat_unsigned_long_long_accum_type_node
10042 : sat_long_long_accum_type_node;
10043 }
10044 else
10045 {
10046 if (size == SHORT_ACCUM_TYPE_SIZE)
10047 return unsignedp ? unsigned_short_accum_type_node
10048 : short_accum_type_node;
10049 if (size == ACCUM_TYPE_SIZE)
10050 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10051 if (size == LONG_ACCUM_TYPE_SIZE)
10052 return unsignedp ? unsigned_long_accum_type_node
10053 : long_accum_type_node;
10054 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10055 return unsignedp ? unsigned_long_long_accum_type_node
10056 : long_long_accum_type_node;
10057 }
10058
10059 return make_accum_type (size, unsignedp, satp);
10060 }
10061
10062
10063 /* Create an atomic variant node for TYPE. This routine is called
10064 during initialization of data types to create the 5 basic atomic
10065 types. The generic build_variant_type function requires these to
10066 already be set up in order to function properly, so cannot be
10067 called from there. If ALIGN is non-zero, then ensure alignment is
10068 overridden to this value. */
10069
10070 static tree
10071 build_atomic_base (tree type, unsigned int align)
10072 {
10073 tree t;
10074
10075 /* Make sure its not already registered. */
10076 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10077 return t;
10078
10079 t = build_variant_type_copy (type);
10080 set_type_quals (t, TYPE_QUAL_ATOMIC);
10081
10082 if (align)
10083 SET_TYPE_ALIGN (t, align);
10084
10085 return t;
10086 }
10087
10088 /* Information about the _FloatN and _FloatNx types. This must be in
10089 the same order as the corresponding TI_* enum values. */
10090 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10091 {
10092 { 16, false },
10093 { 32, false },
10094 { 64, false },
10095 { 128, false },
10096 { 32, true },
10097 { 64, true },
10098 { 128, true },
10099 };
10100
10101
10102 /* Create nodes for all integer types (and error_mark_node) using the sizes
10103 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10104
10105 void
10106 build_common_tree_nodes (bool signed_char)
10107 {
10108 int i;
10109
10110 error_mark_node = make_node (ERROR_MARK);
10111 TREE_TYPE (error_mark_node) = error_mark_node;
10112
10113 initialize_sizetypes ();
10114
10115 /* Define both `signed char' and `unsigned char'. */
10116 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10117 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10118 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10119 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10120
10121 /* Define `char', which is like either `signed char' or `unsigned char'
10122 but not the same as either. */
10123 char_type_node
10124 = (signed_char
10125 ? make_signed_type (CHAR_TYPE_SIZE)
10126 : make_unsigned_type (CHAR_TYPE_SIZE));
10127 TYPE_STRING_FLAG (char_type_node) = 1;
10128
10129 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10130 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10131 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10132 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10133 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10134 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10135 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10136 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10137
10138 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10139 {
10140 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10141 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10142
10143 if (int_n_enabled_p[i])
10144 {
10145 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10146 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10147 }
10148 }
10149
10150 /* Define a boolean type. This type only represents boolean values but
10151 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10152 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10153 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10154 TYPE_PRECISION (boolean_type_node) = 1;
10155 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10156
10157 /* Define what type to use for size_t. */
10158 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10159 size_type_node = unsigned_type_node;
10160 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10161 size_type_node = long_unsigned_type_node;
10162 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10163 size_type_node = long_long_unsigned_type_node;
10164 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10165 size_type_node = short_unsigned_type_node;
10166 else
10167 {
10168 int i;
10169
10170 size_type_node = NULL_TREE;
10171 for (i = 0; i < NUM_INT_N_ENTS; i++)
10172 if (int_n_enabled_p[i])
10173 {
10174 char name[50], altname[50];
10175 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10176 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10177
10178 if (strcmp (name, SIZE_TYPE) == 0
10179 || strcmp (altname, SIZE_TYPE) == 0)
10180 {
10181 size_type_node = int_n_trees[i].unsigned_type;
10182 }
10183 }
10184 if (size_type_node == NULL_TREE)
10185 gcc_unreachable ();
10186 }
10187
10188 /* Define what type to use for ptrdiff_t. */
10189 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10190 ptrdiff_type_node = integer_type_node;
10191 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10192 ptrdiff_type_node = long_integer_type_node;
10193 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10194 ptrdiff_type_node = long_long_integer_type_node;
10195 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10196 ptrdiff_type_node = short_integer_type_node;
10197 else
10198 {
10199 ptrdiff_type_node = NULL_TREE;
10200 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10201 if (int_n_enabled_p[i])
10202 {
10203 char name[50], altname[50];
10204 sprintf (name, "__int%d", int_n_data[i].bitsize);
10205 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10206
10207 if (strcmp (name, PTRDIFF_TYPE) == 0
10208 || strcmp (altname, PTRDIFF_TYPE) == 0)
10209 ptrdiff_type_node = int_n_trees[i].signed_type;
10210 }
10211 if (ptrdiff_type_node == NULL_TREE)
10212 gcc_unreachable ();
10213 }
10214
10215 /* Fill in the rest of the sized types. Reuse existing type nodes
10216 when possible. */
10217 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10218 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10219 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10220 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10221 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10222
10223 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10224 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10225 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10226 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10227 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10228
10229 /* Don't call build_qualified type for atomics. That routine does
10230 special processing for atomics, and until they are initialized
10231 it's better not to make that call.
10232
10233 Check to see if there is a target override for atomic types. */
10234
10235 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10236 targetm.atomic_align_for_mode (QImode));
10237 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10238 targetm.atomic_align_for_mode (HImode));
10239 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10240 targetm.atomic_align_for_mode (SImode));
10241 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10242 targetm.atomic_align_for_mode (DImode));
10243 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10244 targetm.atomic_align_for_mode (TImode));
10245
10246 access_public_node = get_identifier ("public");
10247 access_protected_node = get_identifier ("protected");
10248 access_private_node = get_identifier ("private");
10249
10250 /* Define these next since types below may used them. */
10251 integer_zero_node = build_int_cst (integer_type_node, 0);
10252 integer_one_node = build_int_cst (integer_type_node, 1);
10253 integer_three_node = build_int_cst (integer_type_node, 3);
10254 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10255
10256 size_zero_node = size_int (0);
10257 size_one_node = size_int (1);
10258 bitsize_zero_node = bitsize_int (0);
10259 bitsize_one_node = bitsize_int (1);
10260 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10261
10262 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10263 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10264
10265 void_type_node = make_node (VOID_TYPE);
10266 layout_type (void_type_node);
10267
10268 /* We are not going to have real types in C with less than byte alignment,
10269 so we might as well not have any types that claim to have it. */
10270 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10271 TYPE_USER_ALIGN (void_type_node) = 0;
10272
10273 void_node = make_node (VOID_CST);
10274 TREE_TYPE (void_node) = void_type_node;
10275
10276 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10277 layout_type (TREE_TYPE (null_pointer_node));
10278
10279 ptr_type_node = build_pointer_type (void_type_node);
10280 const_ptr_type_node
10281 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10282 for (unsigned i = 0;
10283 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10284 ++i)
10285 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10286
10287 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10288
10289 float_type_node = make_node (REAL_TYPE);
10290 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10291 layout_type (float_type_node);
10292
10293 double_type_node = make_node (REAL_TYPE);
10294 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10295 layout_type (double_type_node);
10296
10297 long_double_type_node = make_node (REAL_TYPE);
10298 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10299 layout_type (long_double_type_node);
10300
10301 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10302 {
10303 int n = floatn_nx_types[i].n;
10304 bool extended = floatn_nx_types[i].extended;
10305 scalar_float_mode mode;
10306 if (!targetm.floatn_mode (n, extended).exists (&mode))
10307 continue;
10308 int precision = GET_MODE_PRECISION (mode);
10309 /* Work around the rs6000 KFmode having precision 113 not
10310 128. */
10311 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10312 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10313 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10314 if (!extended)
10315 gcc_assert (min_precision == n);
10316 if (precision < min_precision)
10317 precision = min_precision;
10318 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10319 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10320 layout_type (FLOATN_NX_TYPE_NODE (i));
10321 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10322 }
10323
10324 float_ptr_type_node = build_pointer_type (float_type_node);
10325 double_ptr_type_node = build_pointer_type (double_type_node);
10326 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10327 integer_ptr_type_node = build_pointer_type (integer_type_node);
10328
10329 /* Fixed size integer types. */
10330 uint16_type_node = make_or_reuse_type (16, 1);
10331 uint32_type_node = make_or_reuse_type (32, 1);
10332 uint64_type_node = make_or_reuse_type (64, 1);
10333
10334 /* Decimal float types. */
10335 if (targetm.decimal_float_supported_p ())
10336 {
10337 dfloat32_type_node = make_node (REAL_TYPE);
10338 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10339 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10340 layout_type (dfloat32_type_node);
10341
10342 dfloat64_type_node = make_node (REAL_TYPE);
10343 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10344 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10345 layout_type (dfloat64_type_node);
10346
10347 dfloat128_type_node = make_node (REAL_TYPE);
10348 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10349 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10350 layout_type (dfloat128_type_node);
10351 }
10352
10353 complex_integer_type_node = build_complex_type (integer_type_node, true);
10354 complex_float_type_node = build_complex_type (float_type_node, true);
10355 complex_double_type_node = build_complex_type (double_type_node, true);
10356 complex_long_double_type_node = build_complex_type (long_double_type_node,
10357 true);
10358
10359 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10360 {
10361 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10362 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10363 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10364 }
10365
10366 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10367 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10368 sat_ ## KIND ## _type_node = \
10369 make_sat_signed_ ## KIND ## _type (SIZE); \
10370 sat_unsigned_ ## KIND ## _type_node = \
10371 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10372 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10373 unsigned_ ## KIND ## _type_node = \
10374 make_unsigned_ ## KIND ## _type (SIZE);
10375
10376 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10377 sat_ ## WIDTH ## KIND ## _type_node = \
10378 make_sat_signed_ ## KIND ## _type (SIZE); \
10379 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10380 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10381 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10382 unsigned_ ## WIDTH ## KIND ## _type_node = \
10383 make_unsigned_ ## KIND ## _type (SIZE);
10384
10385 /* Make fixed-point type nodes based on four different widths. */
10386 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10387 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10388 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10389 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10390 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10391
10392 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10393 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10394 NAME ## _type_node = \
10395 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10396 u ## NAME ## _type_node = \
10397 make_or_reuse_unsigned_ ## KIND ## _type \
10398 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10399 sat_ ## NAME ## _type_node = \
10400 make_or_reuse_sat_signed_ ## KIND ## _type \
10401 (GET_MODE_BITSIZE (MODE ## mode)); \
10402 sat_u ## NAME ## _type_node = \
10403 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10404 (GET_MODE_BITSIZE (U ## MODE ## mode));
10405
10406 /* Fixed-point type and mode nodes. */
10407 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10408 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10409 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10410 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10411 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10412 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10413 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10414 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10415 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10416 MAKE_FIXED_MODE_NODE (accum, da, DA)
10417 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10418
10419 {
10420 tree t = targetm.build_builtin_va_list ();
10421
10422 /* Many back-ends define record types without setting TYPE_NAME.
10423 If we copied the record type here, we'd keep the original
10424 record type without a name. This breaks name mangling. So,
10425 don't copy record types and let c_common_nodes_and_builtins()
10426 declare the type to be __builtin_va_list. */
10427 if (TREE_CODE (t) != RECORD_TYPE)
10428 t = build_variant_type_copy (t);
10429
10430 va_list_type_node = t;
10431 }
10432
10433 /* SCEV analyzer global shared trees. */
10434 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10435 TREE_TYPE (chrec_dont_know) = void_type_node;
10436 chrec_known = make_node (SCEV_KNOWN);
10437 TREE_TYPE (chrec_known) = void_type_node;
10438 }
10439
10440 /* Modify DECL for given flags.
10441 TM_PURE attribute is set only on types, so the function will modify
10442 DECL's type when ECF_TM_PURE is used. */
10443
10444 void
10445 set_call_expr_flags (tree decl, int flags)
10446 {
10447 if (flags & ECF_NOTHROW)
10448 TREE_NOTHROW (decl) = 1;
10449 if (flags & ECF_CONST)
10450 TREE_READONLY (decl) = 1;
10451 if (flags & ECF_PURE)
10452 DECL_PURE_P (decl) = 1;
10453 if (flags & ECF_LOOPING_CONST_OR_PURE)
10454 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10455 if (flags & ECF_NOVOPS)
10456 DECL_IS_NOVOPS (decl) = 1;
10457 if (flags & ECF_NORETURN)
10458 TREE_THIS_VOLATILE (decl) = 1;
10459 if (flags & ECF_MALLOC)
10460 DECL_IS_MALLOC (decl) = 1;
10461 if (flags & ECF_RETURNS_TWICE)
10462 DECL_IS_RETURNS_TWICE (decl) = 1;
10463 if (flags & ECF_LEAF)
10464 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10465 NULL, DECL_ATTRIBUTES (decl));
10466 if (flags & ECF_COLD)
10467 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10468 NULL, DECL_ATTRIBUTES (decl));
10469 if (flags & ECF_RET1)
10470 DECL_ATTRIBUTES (decl)
10471 = tree_cons (get_identifier ("fn spec"),
10472 build_tree_list (NULL_TREE, build_string (1, "1")),
10473 DECL_ATTRIBUTES (decl));
10474 if ((flags & ECF_TM_PURE) && flag_tm)
10475 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10476 /* Looping const or pure is implied by noreturn.
10477 There is currently no way to declare looping const or looping pure alone. */
10478 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10479 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10480 }
10481
10482
10483 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10484
10485 static void
10486 local_define_builtin (const char *name, tree type, enum built_in_function code,
10487 const char *library_name, int ecf_flags)
10488 {
10489 tree decl;
10490
10491 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10492 library_name, NULL_TREE);
10493 set_call_expr_flags (decl, ecf_flags);
10494
10495 set_builtin_decl (code, decl, true);
10496 }
10497
10498 /* Call this function after instantiating all builtins that the language
10499 front end cares about. This will build the rest of the builtins
10500 and internal functions that are relied upon by the tree optimizers and
10501 the middle-end. */
10502
10503 void
10504 build_common_builtin_nodes (void)
10505 {
10506 tree tmp, ftype;
10507 int ecf_flags;
10508
10509 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10510 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10511 {
10512 ftype = build_function_type (void_type_node, void_list_node);
10513 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10514 local_define_builtin ("__builtin_unreachable", ftype,
10515 BUILT_IN_UNREACHABLE,
10516 "__builtin_unreachable",
10517 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10518 | ECF_CONST | ECF_COLD);
10519 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10520 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10521 "abort",
10522 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10523 }
10524
10525 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10526 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10527 {
10528 ftype = build_function_type_list (ptr_type_node,
10529 ptr_type_node, const_ptr_type_node,
10530 size_type_node, NULL_TREE);
10531
10532 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10533 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10534 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10535 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10536 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10537 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10538 }
10539
10540 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10541 {
10542 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10543 const_ptr_type_node, size_type_node,
10544 NULL_TREE);
10545 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10546 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10547 }
10548
10549 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10550 {
10551 ftype = build_function_type_list (ptr_type_node,
10552 ptr_type_node, integer_type_node,
10553 size_type_node, NULL_TREE);
10554 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10555 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10556 }
10557
10558 /* If we're checking the stack, `alloca' can throw. */
10559 const int alloca_flags
10560 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10561
10562 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10563 {
10564 ftype = build_function_type_list (ptr_type_node,
10565 size_type_node, NULL_TREE);
10566 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10567 "alloca", alloca_flags);
10568 }
10569
10570 ftype = build_function_type_list (ptr_type_node, size_type_node,
10571 size_type_node, NULL_TREE);
10572 local_define_builtin ("__builtin_alloca_with_align", ftype,
10573 BUILT_IN_ALLOCA_WITH_ALIGN,
10574 "__builtin_alloca_with_align",
10575 alloca_flags);
10576
10577 ftype = build_function_type_list (ptr_type_node, size_type_node,
10578 size_type_node, size_type_node, NULL_TREE);
10579 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10580 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10581 "__builtin_alloca_with_align_and_max",
10582 alloca_flags);
10583
10584 ftype = build_function_type_list (void_type_node,
10585 ptr_type_node, ptr_type_node,
10586 ptr_type_node, NULL_TREE);
10587 local_define_builtin ("__builtin_init_trampoline", ftype,
10588 BUILT_IN_INIT_TRAMPOLINE,
10589 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10590 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10591 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10592 "__builtin_init_heap_trampoline",
10593 ECF_NOTHROW | ECF_LEAF);
10594 local_define_builtin ("__builtin_init_descriptor", ftype,
10595 BUILT_IN_INIT_DESCRIPTOR,
10596 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10597
10598 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10599 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10600 BUILT_IN_ADJUST_TRAMPOLINE,
10601 "__builtin_adjust_trampoline",
10602 ECF_CONST | ECF_NOTHROW);
10603 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10604 BUILT_IN_ADJUST_DESCRIPTOR,
10605 "__builtin_adjust_descriptor",
10606 ECF_CONST | ECF_NOTHROW);
10607
10608 ftype = build_function_type_list (void_type_node,
10609 ptr_type_node, ptr_type_node, NULL_TREE);
10610 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10611 BUILT_IN_NONLOCAL_GOTO,
10612 "__builtin_nonlocal_goto",
10613 ECF_NORETURN | ECF_NOTHROW);
10614
10615 ftype = build_function_type_list (void_type_node,
10616 ptr_type_node, ptr_type_node, NULL_TREE);
10617 local_define_builtin ("__builtin_setjmp_setup", ftype,
10618 BUILT_IN_SETJMP_SETUP,
10619 "__builtin_setjmp_setup", ECF_NOTHROW);
10620
10621 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10622 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10623 BUILT_IN_SETJMP_RECEIVER,
10624 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10625
10626 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10627 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10628 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10629
10630 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10631 local_define_builtin ("__builtin_stack_restore", ftype,
10632 BUILT_IN_STACK_RESTORE,
10633 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10634
10635 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10636 const_ptr_type_node, size_type_node,
10637 NULL_TREE);
10638 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10639 "__builtin_memcmp_eq",
10640 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10641
10642 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10643 "__builtin_strncmp_eq",
10644 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10645
10646 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10647 "__builtin_strcmp_eq",
10648 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10649
10650 /* If there's a possibility that we might use the ARM EABI, build the
10651 alternate __cxa_end_cleanup node used to resume from C++. */
10652 if (targetm.arm_eabi_unwinder)
10653 {
10654 ftype = build_function_type_list (void_type_node, NULL_TREE);
10655 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10656 BUILT_IN_CXA_END_CLEANUP,
10657 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10658 }
10659
10660 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10661 local_define_builtin ("__builtin_unwind_resume", ftype,
10662 BUILT_IN_UNWIND_RESUME,
10663 ((targetm_common.except_unwind_info (&global_options)
10664 == UI_SJLJ)
10665 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10666 ECF_NORETURN);
10667
10668 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10669 {
10670 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10671 NULL_TREE);
10672 local_define_builtin ("__builtin_return_address", ftype,
10673 BUILT_IN_RETURN_ADDRESS,
10674 "__builtin_return_address",
10675 ECF_NOTHROW);
10676 }
10677
10678 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10679 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10680 {
10681 ftype = build_function_type_list (void_type_node, ptr_type_node,
10682 ptr_type_node, NULL_TREE);
10683 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10684 local_define_builtin ("__cyg_profile_func_enter", ftype,
10685 BUILT_IN_PROFILE_FUNC_ENTER,
10686 "__cyg_profile_func_enter", 0);
10687 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10688 local_define_builtin ("__cyg_profile_func_exit", ftype,
10689 BUILT_IN_PROFILE_FUNC_EXIT,
10690 "__cyg_profile_func_exit", 0);
10691 }
10692
10693 /* The exception object and filter values from the runtime. The argument
10694 must be zero before exception lowering, i.e. from the front end. After
10695 exception lowering, it will be the region number for the exception
10696 landing pad. These functions are PURE instead of CONST to prevent
10697 them from being hoisted past the exception edge that will initialize
10698 its value in the landing pad. */
10699 ftype = build_function_type_list (ptr_type_node,
10700 integer_type_node, NULL_TREE);
10701 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10702 /* Only use TM_PURE if we have TM language support. */
10703 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10704 ecf_flags |= ECF_TM_PURE;
10705 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10706 "__builtin_eh_pointer", ecf_flags);
10707
10708 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10709 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10710 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10711 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10712
10713 ftype = build_function_type_list (void_type_node,
10714 integer_type_node, integer_type_node,
10715 NULL_TREE);
10716 local_define_builtin ("__builtin_eh_copy_values", ftype,
10717 BUILT_IN_EH_COPY_VALUES,
10718 "__builtin_eh_copy_values", ECF_NOTHROW);
10719
10720 /* Complex multiplication and division. These are handled as builtins
10721 rather than optabs because emit_library_call_value doesn't support
10722 complex. Further, we can do slightly better with folding these
10723 beasties if the real and complex parts of the arguments are separate. */
10724 {
10725 int mode;
10726
10727 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10728 {
10729 char mode_name_buf[4], *q;
10730 const char *p;
10731 enum built_in_function mcode, dcode;
10732 tree type, inner_type;
10733 const char *prefix = "__";
10734
10735 if (targetm.libfunc_gnu_prefix)
10736 prefix = "__gnu_";
10737
10738 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10739 if (type == NULL)
10740 continue;
10741 inner_type = TREE_TYPE (type);
10742
10743 ftype = build_function_type_list (type, inner_type, inner_type,
10744 inner_type, inner_type, NULL_TREE);
10745
10746 mcode = ((enum built_in_function)
10747 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10748 dcode = ((enum built_in_function)
10749 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10750
10751 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10752 *q = TOLOWER (*p);
10753 *q = '\0';
10754
10755 /* For -ftrapping-math these should throw from a former
10756 -fnon-call-exception stmt. */
10757 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10758 NULL);
10759 local_define_builtin (built_in_names[mcode], ftype, mcode,
10760 built_in_names[mcode],
10761 ECF_CONST | ECF_LEAF);
10762
10763 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10764 NULL);
10765 local_define_builtin (built_in_names[dcode], ftype, dcode,
10766 built_in_names[dcode],
10767 ECF_CONST | ECF_LEAF);
10768 }
10769 }
10770
10771 init_internal_fns ();
10772 }
10773
10774 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10775 better way.
10776
10777 If we requested a pointer to a vector, build up the pointers that
10778 we stripped off while looking for the inner type. Similarly for
10779 return values from functions.
10780
10781 The argument TYPE is the top of the chain, and BOTTOM is the
10782 new type which we will point to. */
10783
10784 tree
10785 reconstruct_complex_type (tree type, tree bottom)
10786 {
10787 tree inner, outer;
10788
10789 if (TREE_CODE (type) == POINTER_TYPE)
10790 {
10791 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10792 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10793 TYPE_REF_CAN_ALIAS_ALL (type));
10794 }
10795 else if (TREE_CODE (type) == REFERENCE_TYPE)
10796 {
10797 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10798 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10799 TYPE_REF_CAN_ALIAS_ALL (type));
10800 }
10801 else if (TREE_CODE (type) == ARRAY_TYPE)
10802 {
10803 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10804 outer = build_array_type (inner, TYPE_DOMAIN (type));
10805 }
10806 else if (TREE_CODE (type) == FUNCTION_TYPE)
10807 {
10808 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10809 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10810 }
10811 else if (TREE_CODE (type) == METHOD_TYPE)
10812 {
10813 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10814 /* The build_method_type_directly() routine prepends 'this' to argument list,
10815 so we must compensate by getting rid of it. */
10816 outer
10817 = build_method_type_directly
10818 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10819 inner,
10820 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10821 }
10822 else if (TREE_CODE (type) == OFFSET_TYPE)
10823 {
10824 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10825 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10826 }
10827 else
10828 return bottom;
10829
10830 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10831 TYPE_QUALS (type));
10832 }
10833
10834 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10835 the inner type. */
10836 tree
10837 build_vector_type_for_mode (tree innertype, machine_mode mode)
10838 {
10839 poly_int64 nunits;
10840 unsigned int bitsize;
10841
10842 switch (GET_MODE_CLASS (mode))
10843 {
10844 case MODE_VECTOR_BOOL:
10845 case MODE_VECTOR_INT:
10846 case MODE_VECTOR_FLOAT:
10847 case MODE_VECTOR_FRACT:
10848 case MODE_VECTOR_UFRACT:
10849 case MODE_VECTOR_ACCUM:
10850 case MODE_VECTOR_UACCUM:
10851 nunits = GET_MODE_NUNITS (mode);
10852 break;
10853
10854 case MODE_INT:
10855 /* Check that there are no leftover bits. */
10856 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10857 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10858 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10859 break;
10860
10861 default:
10862 gcc_unreachable ();
10863 }
10864
10865 return make_vector_type (innertype, nunits, mode);
10866 }
10867
10868 /* Similarly, but takes the inner type and number of units, which must be
10869 a power of two. */
10870
10871 tree
10872 build_vector_type (tree innertype, poly_int64 nunits)
10873 {
10874 return make_vector_type (innertype, nunits, VOIDmode);
10875 }
10876
10877 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10878
10879 tree
10880 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10881 {
10882 gcc_assert (mask_mode != BLKmode);
10883
10884 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10885 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10886 tree bool_type = build_nonstandard_boolean_type (esize);
10887
10888 return make_vector_type (bool_type, nunits, mask_mode);
10889 }
10890
10891 /* Build a vector type that holds one boolean result for each element of
10892 vector type VECTYPE. The public interface for this operation is
10893 truth_type_for. */
10894
10895 static tree
10896 build_truth_vector_type_for (tree vectype)
10897 {
10898 machine_mode vector_mode = TYPE_MODE (vectype);
10899 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10900
10901 machine_mode mask_mode;
10902 if (VECTOR_MODE_P (vector_mode)
10903 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10904 return build_truth_vector_type_for_mode (nunits, mask_mode);
10905
10906 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10907 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10908 tree bool_type = build_nonstandard_boolean_type (esize);
10909
10910 return make_vector_type (bool_type, nunits, BLKmode);
10911 }
10912
10913 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10914 set. */
10915
10916 tree
10917 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10918 {
10919 tree t = make_vector_type (innertype, nunits, VOIDmode);
10920 tree cand;
10921 /* We always build the non-opaque variant before the opaque one,
10922 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10923 cand = TYPE_NEXT_VARIANT (t);
10924 if (cand
10925 && TYPE_VECTOR_OPAQUE (cand)
10926 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10927 return cand;
10928 /* Othewise build a variant type and make sure to queue it after
10929 the non-opaque type. */
10930 cand = build_distinct_type_copy (t);
10931 TYPE_VECTOR_OPAQUE (cand) = true;
10932 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10933 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10934 TYPE_NEXT_VARIANT (t) = cand;
10935 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10936 return cand;
10937 }
10938
10939 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10940
10941 wide_int
10942 vector_cst_int_elt (const_tree t, unsigned int i)
10943 {
10944 /* First handle elements that are directly encoded. */
10945 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10946 if (i < encoded_nelts)
10947 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10948
10949 /* Identify the pattern that contains element I and work out the index of
10950 the last encoded element for that pattern. */
10951 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10952 unsigned int pattern = i % npatterns;
10953 unsigned int count = i / npatterns;
10954 unsigned int final_i = encoded_nelts - npatterns + pattern;
10955
10956 /* If there are no steps, the final encoded value is the right one. */
10957 if (!VECTOR_CST_STEPPED_P (t))
10958 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10959
10960 /* Otherwise work out the value from the last two encoded elements. */
10961 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10962 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10963 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
10964 return wi::to_wide (v2) + (count - 2) * diff;
10965 }
10966
10967 /* Return the value of element I of VECTOR_CST T. */
10968
10969 tree
10970 vector_cst_elt (const_tree t, unsigned int i)
10971 {
10972 /* First handle elements that are directly encoded. */
10973 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10974 if (i < encoded_nelts)
10975 return VECTOR_CST_ENCODED_ELT (t, i);
10976
10977 /* If there are no steps, the final encoded value is the right one. */
10978 if (!VECTOR_CST_STEPPED_P (t))
10979 {
10980 /* Identify the pattern that contains element I and work out the index of
10981 the last encoded element for that pattern. */
10982 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10983 unsigned int pattern = i % npatterns;
10984 unsigned int final_i = encoded_nelts - npatterns + pattern;
10985 return VECTOR_CST_ENCODED_ELT (t, final_i);
10986 }
10987
10988 /* Otherwise work out the value from the last two encoded elements. */
10989 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10990 vector_cst_int_elt (t, i));
10991 }
10992
10993 /* Given an initializer INIT, return TRUE if INIT is zero or some
10994 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10995 null, set *NONZERO if and only if INIT is known not to be all
10996 zeros. The combination of return value of false and *NONZERO
10997 false implies that INIT may but need not be all zeros. Other
10998 combinations indicate definitive answers. */
10999
11000 bool
11001 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11002 {
11003 bool dummy;
11004 if (!nonzero)
11005 nonzero = &dummy;
11006
11007 /* Conservatively clear NONZERO and set it only if INIT is definitely
11008 not all zero. */
11009 *nonzero = false;
11010
11011 STRIP_NOPS (init);
11012
11013 unsigned HOST_WIDE_INT off = 0;
11014
11015 switch (TREE_CODE (init))
11016 {
11017 case INTEGER_CST:
11018 if (integer_zerop (init))
11019 return true;
11020
11021 *nonzero = true;
11022 return false;
11023
11024 case REAL_CST:
11025 /* ??? Note that this is not correct for C4X float formats. There,
11026 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11027 negative exponent. */
11028 if (real_zerop (init)
11029 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11030 return true;
11031
11032 *nonzero = true;
11033 return false;
11034
11035 case FIXED_CST:
11036 if (fixed_zerop (init))
11037 return true;
11038
11039 *nonzero = true;
11040 return false;
11041
11042 case COMPLEX_CST:
11043 if (integer_zerop (init)
11044 || (real_zerop (init)
11045 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11046 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11047 return true;
11048
11049 *nonzero = true;
11050 return false;
11051
11052 case VECTOR_CST:
11053 if (VECTOR_CST_NPATTERNS (init) == 1
11054 && VECTOR_CST_DUPLICATE_P (init)
11055 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11056 return true;
11057
11058 *nonzero = true;
11059 return false;
11060
11061 case CONSTRUCTOR:
11062 {
11063 if (TREE_CLOBBER_P (init))
11064 return false;
11065
11066 unsigned HOST_WIDE_INT idx;
11067 tree elt;
11068
11069 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11070 if (!initializer_zerop (elt, nonzero))
11071 return false;
11072
11073 return true;
11074 }
11075
11076 case MEM_REF:
11077 {
11078 tree arg = TREE_OPERAND (init, 0);
11079 if (TREE_CODE (arg) != ADDR_EXPR)
11080 return false;
11081 tree offset = TREE_OPERAND (init, 1);
11082 if (TREE_CODE (offset) != INTEGER_CST
11083 || !tree_fits_uhwi_p (offset))
11084 return false;
11085 off = tree_to_uhwi (offset);
11086 if (INT_MAX < off)
11087 return false;
11088 arg = TREE_OPERAND (arg, 0);
11089 if (TREE_CODE (arg) != STRING_CST)
11090 return false;
11091 init = arg;
11092 }
11093 /* Fall through. */
11094
11095 case STRING_CST:
11096 {
11097 gcc_assert (off <= INT_MAX);
11098
11099 int i = off;
11100 int n = TREE_STRING_LENGTH (init);
11101 if (n <= i)
11102 return false;
11103
11104 /* We need to loop through all elements to handle cases like
11105 "\0" and "\0foobar". */
11106 for (i = 0; i < n; ++i)
11107 if (TREE_STRING_POINTER (init)[i] != '\0')
11108 {
11109 *nonzero = true;
11110 return false;
11111 }
11112
11113 return true;
11114 }
11115
11116 default:
11117 return false;
11118 }
11119 }
11120
11121 /* Return true if EXPR is an initializer expression in which every element
11122 is a constant that is numerically equal to 0 or 1. The elements do not
11123 need to be equal to each other. */
11124
11125 bool
11126 initializer_each_zero_or_onep (const_tree expr)
11127 {
11128 STRIP_ANY_LOCATION_WRAPPER (expr);
11129
11130 switch (TREE_CODE (expr))
11131 {
11132 case INTEGER_CST:
11133 return integer_zerop (expr) || integer_onep (expr);
11134
11135 case REAL_CST:
11136 return real_zerop (expr) || real_onep (expr);
11137
11138 case VECTOR_CST:
11139 {
11140 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11141 if (VECTOR_CST_STEPPED_P (expr)
11142 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11143 return false;
11144
11145 for (unsigned int i = 0; i < nelts; ++i)
11146 {
11147 tree elt = vector_cst_elt (expr, i);
11148 if (!initializer_each_zero_or_onep (elt))
11149 return false;
11150 }
11151
11152 return true;
11153 }
11154
11155 default:
11156 return false;
11157 }
11158 }
11159
11160 /* Check if vector VEC consists of all the equal elements and
11161 that the number of elements corresponds to the type of VEC.
11162 The function returns first element of the vector
11163 or NULL_TREE if the vector is not uniform. */
11164 tree
11165 uniform_vector_p (const_tree vec)
11166 {
11167 tree first, t;
11168 unsigned HOST_WIDE_INT i, nelts;
11169
11170 if (vec == NULL_TREE)
11171 return NULL_TREE;
11172
11173 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11174
11175 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11176 return TREE_OPERAND (vec, 0);
11177
11178 else if (TREE_CODE (vec) == VECTOR_CST)
11179 {
11180 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11181 return VECTOR_CST_ENCODED_ELT (vec, 0);
11182 return NULL_TREE;
11183 }
11184
11185 else if (TREE_CODE (vec) == CONSTRUCTOR
11186 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11187 {
11188 first = error_mark_node;
11189
11190 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11191 {
11192 if (i == 0)
11193 {
11194 first = t;
11195 continue;
11196 }
11197 if (!operand_equal_p (first, t, 0))
11198 return NULL_TREE;
11199 }
11200 if (i != nelts)
11201 return NULL_TREE;
11202
11203 return first;
11204 }
11205
11206 return NULL_TREE;
11207 }
11208
11209 /* If the argument is INTEGER_CST, return it. If the argument is vector
11210 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11211 return NULL_TREE.
11212 Look through location wrappers. */
11213
11214 tree
11215 uniform_integer_cst_p (tree t)
11216 {
11217 STRIP_ANY_LOCATION_WRAPPER (t);
11218
11219 if (TREE_CODE (t) == INTEGER_CST)
11220 return t;
11221
11222 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11223 {
11224 t = uniform_vector_p (t);
11225 if (t && TREE_CODE (t) == INTEGER_CST)
11226 return t;
11227 }
11228
11229 return NULL_TREE;
11230 }
11231
11232 /* If VECTOR_CST T has a single nonzero element, return the index of that
11233 element, otherwise return -1. */
11234
11235 int
11236 single_nonzero_element (const_tree t)
11237 {
11238 unsigned HOST_WIDE_INT nelts;
11239 unsigned int repeat_nelts;
11240 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11241 repeat_nelts = nelts;
11242 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11243 {
11244 nelts = vector_cst_encoded_nelts (t);
11245 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11246 }
11247 else
11248 return -1;
11249
11250 int res = -1;
11251 for (unsigned int i = 0; i < nelts; ++i)
11252 {
11253 tree elt = vector_cst_elt (t, i);
11254 if (!integer_zerop (elt) && !real_zerop (elt))
11255 {
11256 if (res >= 0 || i >= repeat_nelts)
11257 return -1;
11258 res = i;
11259 }
11260 }
11261 return res;
11262 }
11263
11264 /* Build an empty statement at location LOC. */
11265
11266 tree
11267 build_empty_stmt (location_t loc)
11268 {
11269 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11270 SET_EXPR_LOCATION (t, loc);
11271 return t;
11272 }
11273
11274
11275 /* Build an OpenMP clause with code CODE. LOC is the location of the
11276 clause. */
11277
11278 tree
11279 build_omp_clause (location_t loc, enum omp_clause_code code)
11280 {
11281 tree t;
11282 int size, length;
11283
11284 length = omp_clause_num_ops[code];
11285 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11286
11287 record_node_allocation_statistics (OMP_CLAUSE, size);
11288
11289 t = (tree) ggc_internal_alloc (size);
11290 memset (t, 0, size);
11291 TREE_SET_CODE (t, OMP_CLAUSE);
11292 OMP_CLAUSE_SET_CODE (t, code);
11293 OMP_CLAUSE_LOCATION (t) = loc;
11294
11295 return t;
11296 }
11297
11298 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11299 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11300 Except for the CODE and operand count field, other storage for the
11301 object is initialized to zeros. */
11302
11303 tree
11304 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11305 {
11306 tree t;
11307 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11308
11309 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11310 gcc_assert (len >= 1);
11311
11312 record_node_allocation_statistics (code, length);
11313
11314 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11315
11316 TREE_SET_CODE (t, code);
11317
11318 /* Can't use TREE_OPERAND to store the length because if checking is
11319 enabled, it will try to check the length before we store it. :-P */
11320 t->exp.operands[0] = build_int_cst (sizetype, len);
11321
11322 return t;
11323 }
11324
11325 /* Helper function for build_call_* functions; build a CALL_EXPR with
11326 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11327 the argument slots. */
11328
11329 static tree
11330 build_call_1 (tree return_type, tree fn, int nargs)
11331 {
11332 tree t;
11333
11334 t = build_vl_exp (CALL_EXPR, nargs + 3);
11335 TREE_TYPE (t) = return_type;
11336 CALL_EXPR_FN (t) = fn;
11337 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11338
11339 return t;
11340 }
11341
11342 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11343 FN and a null static chain slot. NARGS is the number of call arguments
11344 which are specified as "..." arguments. */
11345
11346 tree
11347 build_call_nary (tree return_type, tree fn, int nargs, ...)
11348 {
11349 tree ret;
11350 va_list args;
11351 va_start (args, nargs);
11352 ret = build_call_valist (return_type, fn, nargs, args);
11353 va_end (args);
11354 return ret;
11355 }
11356
11357 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11358 FN and a null static chain slot. NARGS is the number of call arguments
11359 which are specified as a va_list ARGS. */
11360
11361 tree
11362 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11363 {
11364 tree t;
11365 int i;
11366
11367 t = build_call_1 (return_type, fn, nargs);
11368 for (i = 0; i < nargs; i++)
11369 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11370 process_call_operands (t);
11371 return t;
11372 }
11373
11374 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11375 FN and a null static chain slot. NARGS is the number of call arguments
11376 which are specified as a tree array ARGS. */
11377
11378 tree
11379 build_call_array_loc (location_t loc, tree return_type, tree fn,
11380 int nargs, const tree *args)
11381 {
11382 tree t;
11383 int i;
11384
11385 t = build_call_1 (return_type, fn, nargs);
11386 for (i = 0; i < nargs; i++)
11387 CALL_EXPR_ARG (t, i) = args[i];
11388 process_call_operands (t);
11389 SET_EXPR_LOCATION (t, loc);
11390 return t;
11391 }
11392
11393 /* Like build_call_array, but takes a vec. */
11394
11395 tree
11396 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11397 {
11398 tree ret, t;
11399 unsigned int ix;
11400
11401 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11402 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11403 CALL_EXPR_ARG (ret, ix) = t;
11404 process_call_operands (ret);
11405 return ret;
11406 }
11407
11408 /* Conveniently construct a function call expression. FNDECL names the
11409 function to be called and N arguments are passed in the array
11410 ARGARRAY. */
11411
11412 tree
11413 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11414 {
11415 tree fntype = TREE_TYPE (fndecl);
11416 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11417
11418 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11419 }
11420
11421 /* Conveniently construct a function call expression. FNDECL names the
11422 function to be called and the arguments are passed in the vector
11423 VEC. */
11424
11425 tree
11426 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11427 {
11428 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11429 vec_safe_address (vec));
11430 }
11431
11432
11433 /* Conveniently construct a function call expression. FNDECL names the
11434 function to be called, N is the number of arguments, and the "..."
11435 parameters are the argument expressions. */
11436
11437 tree
11438 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11439 {
11440 va_list ap;
11441 tree *argarray = XALLOCAVEC (tree, n);
11442 int i;
11443
11444 va_start (ap, n);
11445 for (i = 0; i < n; i++)
11446 argarray[i] = va_arg (ap, tree);
11447 va_end (ap);
11448 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11449 }
11450
11451 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11452 varargs macros aren't supported by all bootstrap compilers. */
11453
11454 tree
11455 build_call_expr (tree fndecl, int n, ...)
11456 {
11457 va_list ap;
11458 tree *argarray = XALLOCAVEC (tree, n);
11459 int i;
11460
11461 va_start (ap, n);
11462 for (i = 0; i < n; i++)
11463 argarray[i] = va_arg (ap, tree);
11464 va_end (ap);
11465 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11466 }
11467
11468 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11469 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11470 It will get gimplified later into an ordinary internal function. */
11471
11472 tree
11473 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11474 tree type, int n, const tree *args)
11475 {
11476 tree t = build_call_1 (type, NULL_TREE, n);
11477 for (int i = 0; i < n; ++i)
11478 CALL_EXPR_ARG (t, i) = args[i];
11479 SET_EXPR_LOCATION (t, loc);
11480 CALL_EXPR_IFN (t) = ifn;
11481 process_call_operands (t);
11482 return t;
11483 }
11484
11485 /* Build internal call expression. This is just like CALL_EXPR, except
11486 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11487 internal function. */
11488
11489 tree
11490 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11491 tree type, int n, ...)
11492 {
11493 va_list ap;
11494 tree *argarray = XALLOCAVEC (tree, n);
11495 int i;
11496
11497 va_start (ap, n);
11498 for (i = 0; i < n; i++)
11499 argarray[i] = va_arg (ap, tree);
11500 va_end (ap);
11501 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11502 }
11503
11504 /* Return a function call to FN, if the target is guaranteed to support it,
11505 or null otherwise.
11506
11507 N is the number of arguments, passed in the "...", and TYPE is the
11508 type of the return value. */
11509
11510 tree
11511 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11512 int n, ...)
11513 {
11514 va_list ap;
11515 tree *argarray = XALLOCAVEC (tree, n);
11516 int i;
11517
11518 va_start (ap, n);
11519 for (i = 0; i < n; i++)
11520 argarray[i] = va_arg (ap, tree);
11521 va_end (ap);
11522 if (internal_fn_p (fn))
11523 {
11524 internal_fn ifn = as_internal_fn (fn);
11525 if (direct_internal_fn_p (ifn))
11526 {
11527 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11528 if (!direct_internal_fn_supported_p (ifn, types,
11529 OPTIMIZE_FOR_BOTH))
11530 return NULL_TREE;
11531 }
11532 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11533 }
11534 else
11535 {
11536 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11537 if (!fndecl)
11538 return NULL_TREE;
11539 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11540 }
11541 }
11542
11543 /* Return a function call to the appropriate builtin alloca variant.
11544
11545 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11546 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11547 bound for SIZE in case it is not a fixed value. */
11548
11549 tree
11550 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11551 {
11552 if (max_size >= 0)
11553 {
11554 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11555 return
11556 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11557 }
11558 else if (align > 0)
11559 {
11560 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11561 return build_call_expr (t, 2, size, size_int (align));
11562 }
11563 else
11564 {
11565 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11566 return build_call_expr (t, 1, size);
11567 }
11568 }
11569
11570 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11571 if SIZE == -1) and return a tree node representing char* pointer to
11572 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value
11573 is the LEN bytes at STR (the representation of the string, which may
11574 be wide). */
11575
11576 tree
11577 build_string_literal (int len, const char *str,
11578 tree eltype /* = char_type_node */,
11579 unsigned HOST_WIDE_INT size /* = -1 */)
11580 {
11581 tree t = build_string (len, str);
11582 /* Set the maximum valid index based on the string length or SIZE. */
11583 unsigned HOST_WIDE_INT maxidx
11584 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11585
11586 tree index = build_index_type (size_int (maxidx));
11587 eltype = build_type_variant (eltype, 1, 0);
11588 tree type = build_array_type (eltype, index);
11589 TREE_TYPE (t) = type;
11590 TREE_CONSTANT (t) = 1;
11591 TREE_READONLY (t) = 1;
11592 TREE_STATIC (t) = 1;
11593
11594 type = build_pointer_type (eltype);
11595 t = build1 (ADDR_EXPR, type,
11596 build4 (ARRAY_REF, eltype,
11597 t, integer_zero_node, NULL_TREE, NULL_TREE));
11598 return t;
11599 }
11600
11601
11602
11603 /* Return true if T (assumed to be a DECL) must be assigned a memory
11604 location. */
11605
11606 bool
11607 needs_to_live_in_memory (const_tree t)
11608 {
11609 return (TREE_ADDRESSABLE (t)
11610 || is_global_var (t)
11611 || (TREE_CODE (t) == RESULT_DECL
11612 && !DECL_BY_REFERENCE (t)
11613 && aggregate_value_p (t, current_function_decl)));
11614 }
11615
11616 /* Return value of a constant X and sign-extend it. */
11617
11618 HOST_WIDE_INT
11619 int_cst_value (const_tree x)
11620 {
11621 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11622 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11623
11624 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11625 gcc_assert (cst_and_fits_in_hwi (x));
11626
11627 if (bits < HOST_BITS_PER_WIDE_INT)
11628 {
11629 bool negative = ((val >> (bits - 1)) & 1) != 0;
11630 if (negative)
11631 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11632 else
11633 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11634 }
11635
11636 return val;
11637 }
11638
11639 /* If TYPE is an integral or pointer type, return an integer type with
11640 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11641 if TYPE is already an integer type of signedness UNSIGNEDP.
11642 If TYPE is a floating-point type, return an integer type with the same
11643 bitsize and with the signedness given by UNSIGNEDP; this is useful
11644 when doing bit-level operations on a floating-point value. */
11645
11646 tree
11647 signed_or_unsigned_type_for (int unsignedp, tree type)
11648 {
11649 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11650 return type;
11651
11652 if (TREE_CODE (type) == VECTOR_TYPE)
11653 {
11654 tree inner = TREE_TYPE (type);
11655 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11656 if (!inner2)
11657 return NULL_TREE;
11658 if (inner == inner2)
11659 return type;
11660 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11661 }
11662
11663 if (TREE_CODE (type) == COMPLEX_TYPE)
11664 {
11665 tree inner = TREE_TYPE (type);
11666 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11667 if (!inner2)
11668 return NULL_TREE;
11669 if (inner == inner2)
11670 return type;
11671 return build_complex_type (inner2);
11672 }
11673
11674 unsigned int bits;
11675 if (INTEGRAL_TYPE_P (type)
11676 || POINTER_TYPE_P (type)
11677 || TREE_CODE (type) == OFFSET_TYPE)
11678 bits = TYPE_PRECISION (type);
11679 else if (TREE_CODE (type) == REAL_TYPE)
11680 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11681 else
11682 return NULL_TREE;
11683
11684 return build_nonstandard_integer_type (bits, unsignedp);
11685 }
11686
11687 /* If TYPE is an integral or pointer type, return an integer type with
11688 the same precision which is unsigned, or itself if TYPE is already an
11689 unsigned integer type. If TYPE is a floating-point type, return an
11690 unsigned integer type with the same bitsize as TYPE. */
11691
11692 tree
11693 unsigned_type_for (tree type)
11694 {
11695 return signed_or_unsigned_type_for (1, type);
11696 }
11697
11698 /* If TYPE is an integral or pointer type, return an integer type with
11699 the same precision which is signed, or itself if TYPE is already a
11700 signed integer type. If TYPE is a floating-point type, return a
11701 signed integer type with the same bitsize as TYPE. */
11702
11703 tree
11704 signed_type_for (tree type)
11705 {
11706 return signed_or_unsigned_type_for (0, type);
11707 }
11708
11709 /* If TYPE is a vector type, return a signed integer vector type with the
11710 same width and number of subparts. Otherwise return boolean_type_node. */
11711
11712 tree
11713 truth_type_for (tree type)
11714 {
11715 if (TREE_CODE (type) == VECTOR_TYPE)
11716 {
11717 if (VECTOR_BOOLEAN_TYPE_P (type))
11718 return type;
11719 return build_truth_vector_type_for (type);
11720 }
11721 else
11722 return boolean_type_node;
11723 }
11724
11725 /* Returns the largest value obtainable by casting something in INNER type to
11726 OUTER type. */
11727
11728 tree
11729 upper_bound_in_type (tree outer, tree inner)
11730 {
11731 unsigned int det = 0;
11732 unsigned oprec = TYPE_PRECISION (outer);
11733 unsigned iprec = TYPE_PRECISION (inner);
11734 unsigned prec;
11735
11736 /* Compute a unique number for every combination. */
11737 det |= (oprec > iprec) ? 4 : 0;
11738 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11739 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11740
11741 /* Determine the exponent to use. */
11742 switch (det)
11743 {
11744 case 0:
11745 case 1:
11746 /* oprec <= iprec, outer: signed, inner: don't care. */
11747 prec = oprec - 1;
11748 break;
11749 case 2:
11750 case 3:
11751 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11752 prec = oprec;
11753 break;
11754 case 4:
11755 /* oprec > iprec, outer: signed, inner: signed. */
11756 prec = iprec - 1;
11757 break;
11758 case 5:
11759 /* oprec > iprec, outer: signed, inner: unsigned. */
11760 prec = iprec;
11761 break;
11762 case 6:
11763 /* oprec > iprec, outer: unsigned, inner: signed. */
11764 prec = oprec;
11765 break;
11766 case 7:
11767 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11768 prec = iprec;
11769 break;
11770 default:
11771 gcc_unreachable ();
11772 }
11773
11774 return wide_int_to_tree (outer,
11775 wi::mask (prec, false, TYPE_PRECISION (outer)));
11776 }
11777
11778 /* Returns the smallest value obtainable by casting something in INNER type to
11779 OUTER type. */
11780
11781 tree
11782 lower_bound_in_type (tree outer, tree inner)
11783 {
11784 unsigned oprec = TYPE_PRECISION (outer);
11785 unsigned iprec = TYPE_PRECISION (inner);
11786
11787 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11788 and obtain 0. */
11789 if (TYPE_UNSIGNED (outer)
11790 /* If we are widening something of an unsigned type, OUTER type
11791 contains all values of INNER type. In particular, both INNER
11792 and OUTER types have zero in common. */
11793 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11794 return build_int_cst (outer, 0);
11795 else
11796 {
11797 /* If we are widening a signed type to another signed type, we
11798 want to obtain -2^^(iprec-1). If we are keeping the
11799 precision or narrowing to a signed type, we want to obtain
11800 -2^(oprec-1). */
11801 unsigned prec = oprec > iprec ? iprec : oprec;
11802 return wide_int_to_tree (outer,
11803 wi::mask (prec - 1, true,
11804 TYPE_PRECISION (outer)));
11805 }
11806 }
11807
11808 /* Return nonzero if two operands that are suitable for PHI nodes are
11809 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11810 SSA_NAME or invariant. Note that this is strictly an optimization.
11811 That is, callers of this function can directly call operand_equal_p
11812 and get the same result, only slower. */
11813
11814 int
11815 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11816 {
11817 if (arg0 == arg1)
11818 return 1;
11819 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11820 return 0;
11821 return operand_equal_p (arg0, arg1, 0);
11822 }
11823
11824 /* Returns number of zeros at the end of binary representation of X. */
11825
11826 tree
11827 num_ending_zeros (const_tree x)
11828 {
11829 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11830 }
11831
11832
11833 #define WALK_SUBTREE(NODE) \
11834 do \
11835 { \
11836 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11837 if (result) \
11838 return result; \
11839 } \
11840 while (0)
11841
11842 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11843 be walked whenever a type is seen in the tree. Rest of operands and return
11844 value are as for walk_tree. */
11845
11846 static tree
11847 walk_type_fields (tree type, walk_tree_fn func, void *data,
11848 hash_set<tree> *pset, walk_tree_lh lh)
11849 {
11850 tree result = NULL_TREE;
11851
11852 switch (TREE_CODE (type))
11853 {
11854 case POINTER_TYPE:
11855 case REFERENCE_TYPE:
11856 case VECTOR_TYPE:
11857 /* We have to worry about mutually recursive pointers. These can't
11858 be written in C. They can in Ada. It's pathological, but
11859 there's an ACATS test (c38102a) that checks it. Deal with this
11860 by checking if we're pointing to another pointer, that one
11861 points to another pointer, that one does too, and we have no htab.
11862 If so, get a hash table. We check three levels deep to avoid
11863 the cost of the hash table if we don't need one. */
11864 if (POINTER_TYPE_P (TREE_TYPE (type))
11865 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11866 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11867 && !pset)
11868 {
11869 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11870 func, data);
11871 if (result)
11872 return result;
11873
11874 break;
11875 }
11876
11877 /* fall through */
11878
11879 case COMPLEX_TYPE:
11880 WALK_SUBTREE (TREE_TYPE (type));
11881 break;
11882
11883 case METHOD_TYPE:
11884 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11885
11886 /* Fall through. */
11887
11888 case FUNCTION_TYPE:
11889 WALK_SUBTREE (TREE_TYPE (type));
11890 {
11891 tree arg;
11892
11893 /* We never want to walk into default arguments. */
11894 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11895 WALK_SUBTREE (TREE_VALUE (arg));
11896 }
11897 break;
11898
11899 case ARRAY_TYPE:
11900 /* Don't follow this nodes's type if a pointer for fear that
11901 we'll have infinite recursion. If we have a PSET, then we
11902 need not fear. */
11903 if (pset
11904 || (!POINTER_TYPE_P (TREE_TYPE (type))
11905 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11906 WALK_SUBTREE (TREE_TYPE (type));
11907 WALK_SUBTREE (TYPE_DOMAIN (type));
11908 break;
11909
11910 case OFFSET_TYPE:
11911 WALK_SUBTREE (TREE_TYPE (type));
11912 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11913 break;
11914
11915 default:
11916 break;
11917 }
11918
11919 return NULL_TREE;
11920 }
11921
11922 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11923 called with the DATA and the address of each sub-tree. If FUNC returns a
11924 non-NULL value, the traversal is stopped, and the value returned by FUNC
11925 is returned. If PSET is non-NULL it is used to record the nodes visited,
11926 and to avoid visiting a node more than once. */
11927
11928 tree
11929 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11930 hash_set<tree> *pset, walk_tree_lh lh)
11931 {
11932 enum tree_code code;
11933 int walk_subtrees;
11934 tree result;
11935
11936 #define WALK_SUBTREE_TAIL(NODE) \
11937 do \
11938 { \
11939 tp = & (NODE); \
11940 goto tail_recurse; \
11941 } \
11942 while (0)
11943
11944 tail_recurse:
11945 /* Skip empty subtrees. */
11946 if (!*tp)
11947 return NULL_TREE;
11948
11949 /* Don't walk the same tree twice, if the user has requested
11950 that we avoid doing so. */
11951 if (pset && pset->add (*tp))
11952 return NULL_TREE;
11953
11954 /* Call the function. */
11955 walk_subtrees = 1;
11956 result = (*func) (tp, &walk_subtrees, data);
11957
11958 /* If we found something, return it. */
11959 if (result)
11960 return result;
11961
11962 code = TREE_CODE (*tp);
11963
11964 /* Even if we didn't, FUNC may have decided that there was nothing
11965 interesting below this point in the tree. */
11966 if (!walk_subtrees)
11967 {
11968 /* But we still need to check our siblings. */
11969 if (code == TREE_LIST)
11970 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11971 else if (code == OMP_CLAUSE)
11972 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11973 else
11974 return NULL_TREE;
11975 }
11976
11977 if (lh)
11978 {
11979 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11980 if (result || !walk_subtrees)
11981 return result;
11982 }
11983
11984 switch (code)
11985 {
11986 case ERROR_MARK:
11987 case IDENTIFIER_NODE:
11988 case INTEGER_CST:
11989 case REAL_CST:
11990 case FIXED_CST:
11991 case VECTOR_CST:
11992 case STRING_CST:
11993 case BLOCK:
11994 case PLACEHOLDER_EXPR:
11995 case SSA_NAME:
11996 case FIELD_DECL:
11997 case RESULT_DECL:
11998 /* None of these have subtrees other than those already walked
11999 above. */
12000 break;
12001
12002 case TREE_LIST:
12003 WALK_SUBTREE (TREE_VALUE (*tp));
12004 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12005 break;
12006
12007 case TREE_VEC:
12008 {
12009 int len = TREE_VEC_LENGTH (*tp);
12010
12011 if (len == 0)
12012 break;
12013
12014 /* Walk all elements but the first. */
12015 while (--len)
12016 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12017
12018 /* Now walk the first one as a tail call. */
12019 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12020 }
12021
12022 case COMPLEX_CST:
12023 WALK_SUBTREE (TREE_REALPART (*tp));
12024 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12025
12026 case CONSTRUCTOR:
12027 {
12028 unsigned HOST_WIDE_INT idx;
12029 constructor_elt *ce;
12030
12031 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12032 idx++)
12033 WALK_SUBTREE (ce->value);
12034 }
12035 break;
12036
12037 case SAVE_EXPR:
12038 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12039
12040 case BIND_EXPR:
12041 {
12042 tree decl;
12043 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12044 {
12045 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12046 into declarations that are just mentioned, rather than
12047 declared; they don't really belong to this part of the tree.
12048 And, we can see cycles: the initializer for a declaration
12049 can refer to the declaration itself. */
12050 WALK_SUBTREE (DECL_INITIAL (decl));
12051 WALK_SUBTREE (DECL_SIZE (decl));
12052 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12053 }
12054 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12055 }
12056
12057 case STATEMENT_LIST:
12058 {
12059 tree_stmt_iterator i;
12060 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12061 WALK_SUBTREE (*tsi_stmt_ptr (i));
12062 }
12063 break;
12064
12065 case OMP_CLAUSE:
12066 switch (OMP_CLAUSE_CODE (*tp))
12067 {
12068 case OMP_CLAUSE_GANG:
12069 case OMP_CLAUSE__GRIDDIM_:
12070 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12071 /* FALLTHRU */
12072
12073 case OMP_CLAUSE_ASYNC:
12074 case OMP_CLAUSE_WAIT:
12075 case OMP_CLAUSE_WORKER:
12076 case OMP_CLAUSE_VECTOR:
12077 case OMP_CLAUSE_NUM_GANGS:
12078 case OMP_CLAUSE_NUM_WORKERS:
12079 case OMP_CLAUSE_VECTOR_LENGTH:
12080 case OMP_CLAUSE_PRIVATE:
12081 case OMP_CLAUSE_SHARED:
12082 case OMP_CLAUSE_FIRSTPRIVATE:
12083 case OMP_CLAUSE_COPYIN:
12084 case OMP_CLAUSE_COPYPRIVATE:
12085 case OMP_CLAUSE_FINAL:
12086 case OMP_CLAUSE_IF:
12087 case OMP_CLAUSE_NUM_THREADS:
12088 case OMP_CLAUSE_SCHEDULE:
12089 case OMP_CLAUSE_UNIFORM:
12090 case OMP_CLAUSE_DEPEND:
12091 case OMP_CLAUSE_NONTEMPORAL:
12092 case OMP_CLAUSE_NUM_TEAMS:
12093 case OMP_CLAUSE_THREAD_LIMIT:
12094 case OMP_CLAUSE_DEVICE:
12095 case OMP_CLAUSE_DIST_SCHEDULE:
12096 case OMP_CLAUSE_SAFELEN:
12097 case OMP_CLAUSE_SIMDLEN:
12098 case OMP_CLAUSE_ORDERED:
12099 case OMP_CLAUSE_PRIORITY:
12100 case OMP_CLAUSE_GRAINSIZE:
12101 case OMP_CLAUSE_NUM_TASKS:
12102 case OMP_CLAUSE_HINT:
12103 case OMP_CLAUSE_TO_DECLARE:
12104 case OMP_CLAUSE_LINK:
12105 case OMP_CLAUSE_USE_DEVICE_PTR:
12106 case OMP_CLAUSE_USE_DEVICE_ADDR:
12107 case OMP_CLAUSE_IS_DEVICE_PTR:
12108 case OMP_CLAUSE_INCLUSIVE:
12109 case OMP_CLAUSE_EXCLUSIVE:
12110 case OMP_CLAUSE__LOOPTEMP_:
12111 case OMP_CLAUSE__REDUCTEMP_:
12112 case OMP_CLAUSE__CONDTEMP_:
12113 case OMP_CLAUSE__SCANTEMP_:
12114 case OMP_CLAUSE__SIMDUID_:
12115 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12116 /* FALLTHRU */
12117
12118 case OMP_CLAUSE_INDEPENDENT:
12119 case OMP_CLAUSE_NOWAIT:
12120 case OMP_CLAUSE_DEFAULT:
12121 case OMP_CLAUSE_UNTIED:
12122 case OMP_CLAUSE_MERGEABLE:
12123 case OMP_CLAUSE_PROC_BIND:
12124 case OMP_CLAUSE_DEVICE_TYPE:
12125 case OMP_CLAUSE_INBRANCH:
12126 case OMP_CLAUSE_NOTINBRANCH:
12127 case OMP_CLAUSE_FOR:
12128 case OMP_CLAUSE_PARALLEL:
12129 case OMP_CLAUSE_SECTIONS:
12130 case OMP_CLAUSE_TASKGROUP:
12131 case OMP_CLAUSE_NOGROUP:
12132 case OMP_CLAUSE_THREADS:
12133 case OMP_CLAUSE_SIMD:
12134 case OMP_CLAUSE_DEFAULTMAP:
12135 case OMP_CLAUSE_ORDER:
12136 case OMP_CLAUSE_BIND:
12137 case OMP_CLAUSE_AUTO:
12138 case OMP_CLAUSE_SEQ:
12139 case OMP_CLAUSE_TILE:
12140 case OMP_CLAUSE__SIMT_:
12141 case OMP_CLAUSE_IF_PRESENT:
12142 case OMP_CLAUSE_FINALIZE:
12143 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12144
12145 case OMP_CLAUSE_LASTPRIVATE:
12146 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12147 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12148 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12149
12150 case OMP_CLAUSE_COLLAPSE:
12151 {
12152 int i;
12153 for (i = 0; i < 3; i++)
12154 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12155 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12156 }
12157
12158 case OMP_CLAUSE_LINEAR:
12159 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12160 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12161 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12162 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12163
12164 case OMP_CLAUSE_ALIGNED:
12165 case OMP_CLAUSE_FROM:
12166 case OMP_CLAUSE_TO:
12167 case OMP_CLAUSE_MAP:
12168 case OMP_CLAUSE__CACHE_:
12169 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12170 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12171 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12172
12173 case OMP_CLAUSE_REDUCTION:
12174 case OMP_CLAUSE_TASK_REDUCTION:
12175 case OMP_CLAUSE_IN_REDUCTION:
12176 {
12177 int i;
12178 for (i = 0; i < 5; i++)
12179 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12180 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12181 }
12182
12183 default:
12184 gcc_unreachable ();
12185 }
12186 break;
12187
12188 case TARGET_EXPR:
12189 {
12190 int i, len;
12191
12192 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12193 But, we only want to walk once. */
12194 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12195 for (i = 0; i < len; ++i)
12196 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12197 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12198 }
12199
12200 case DECL_EXPR:
12201 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12202 defining. We only want to walk into these fields of a type in this
12203 case and not in the general case of a mere reference to the type.
12204
12205 The criterion is as follows: if the field can be an expression, it
12206 must be walked only here. This should be in keeping with the fields
12207 that are directly gimplified in gimplify_type_sizes in order for the
12208 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12209 variable-sized types.
12210
12211 Note that DECLs get walked as part of processing the BIND_EXPR. */
12212 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12213 {
12214 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12215 if (TREE_CODE (*type_p) == ERROR_MARK)
12216 return NULL_TREE;
12217
12218 /* Call the function for the type. See if it returns anything or
12219 doesn't want us to continue. If we are to continue, walk both
12220 the normal fields and those for the declaration case. */
12221 result = (*func) (type_p, &walk_subtrees, data);
12222 if (result || !walk_subtrees)
12223 return result;
12224
12225 /* But do not walk a pointed-to type since it may itself need to
12226 be walked in the declaration case if it isn't anonymous. */
12227 if (!POINTER_TYPE_P (*type_p))
12228 {
12229 result = walk_type_fields (*type_p, func, data, pset, lh);
12230 if (result)
12231 return result;
12232 }
12233
12234 /* If this is a record type, also walk the fields. */
12235 if (RECORD_OR_UNION_TYPE_P (*type_p))
12236 {
12237 tree field;
12238
12239 for (field = TYPE_FIELDS (*type_p); field;
12240 field = DECL_CHAIN (field))
12241 {
12242 /* We'd like to look at the type of the field, but we can
12243 easily get infinite recursion. So assume it's pointed
12244 to elsewhere in the tree. Also, ignore things that
12245 aren't fields. */
12246 if (TREE_CODE (field) != FIELD_DECL)
12247 continue;
12248
12249 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12250 WALK_SUBTREE (DECL_SIZE (field));
12251 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12252 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12253 WALK_SUBTREE (DECL_QUALIFIER (field));
12254 }
12255 }
12256
12257 /* Same for scalar types. */
12258 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12259 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12260 || TREE_CODE (*type_p) == INTEGER_TYPE
12261 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12262 || TREE_CODE (*type_p) == REAL_TYPE)
12263 {
12264 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12265 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12266 }
12267
12268 WALK_SUBTREE (TYPE_SIZE (*type_p));
12269 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12270 }
12271 /* FALLTHRU */
12272
12273 default:
12274 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12275 {
12276 int i, len;
12277
12278 /* Walk over all the sub-trees of this operand. */
12279 len = TREE_OPERAND_LENGTH (*tp);
12280
12281 /* Go through the subtrees. We need to do this in forward order so
12282 that the scope of a FOR_EXPR is handled properly. */
12283 if (len)
12284 {
12285 for (i = 0; i < len - 1; ++i)
12286 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12287 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12288 }
12289 }
12290 /* If this is a type, walk the needed fields in the type. */
12291 else if (TYPE_P (*tp))
12292 return walk_type_fields (*tp, func, data, pset, lh);
12293 break;
12294 }
12295
12296 /* We didn't find what we were looking for. */
12297 return NULL_TREE;
12298
12299 #undef WALK_SUBTREE_TAIL
12300 }
12301 #undef WALK_SUBTREE
12302
12303 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12304
12305 tree
12306 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12307 walk_tree_lh lh)
12308 {
12309 tree result;
12310
12311 hash_set<tree> pset;
12312 result = walk_tree_1 (tp, func, data, &pset, lh);
12313 return result;
12314 }
12315
12316
12317 tree
12318 tree_block (tree t)
12319 {
12320 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12321
12322 if (IS_EXPR_CODE_CLASS (c))
12323 return LOCATION_BLOCK (t->exp.locus);
12324 gcc_unreachable ();
12325 return NULL;
12326 }
12327
12328 void
12329 tree_set_block (tree t, tree b)
12330 {
12331 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12332
12333 if (IS_EXPR_CODE_CLASS (c))
12334 {
12335 t->exp.locus = set_block (t->exp.locus, b);
12336 }
12337 else
12338 gcc_unreachable ();
12339 }
12340
12341 /* Create a nameless artificial label and put it in the current
12342 function context. The label has a location of LOC. Returns the
12343 newly created label. */
12344
12345 tree
12346 create_artificial_label (location_t loc)
12347 {
12348 tree lab = build_decl (loc,
12349 LABEL_DECL, NULL_TREE, void_type_node);
12350
12351 DECL_ARTIFICIAL (lab) = 1;
12352 DECL_IGNORED_P (lab) = 1;
12353 DECL_CONTEXT (lab) = current_function_decl;
12354 return lab;
12355 }
12356
12357 /* Given a tree, try to return a useful variable name that we can use
12358 to prefix a temporary that is being assigned the value of the tree.
12359 I.E. given <temp> = &A, return A. */
12360
12361 const char *
12362 get_name (tree t)
12363 {
12364 tree stripped_decl;
12365
12366 stripped_decl = t;
12367 STRIP_NOPS (stripped_decl);
12368 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12369 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12370 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12371 {
12372 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12373 if (!name)
12374 return NULL;
12375 return IDENTIFIER_POINTER (name);
12376 }
12377 else
12378 {
12379 switch (TREE_CODE (stripped_decl))
12380 {
12381 case ADDR_EXPR:
12382 return get_name (TREE_OPERAND (stripped_decl, 0));
12383 default:
12384 return NULL;
12385 }
12386 }
12387 }
12388
12389 /* Return true if TYPE has a variable argument list. */
12390
12391 bool
12392 stdarg_p (const_tree fntype)
12393 {
12394 function_args_iterator args_iter;
12395 tree n = NULL_TREE, t;
12396
12397 if (!fntype)
12398 return false;
12399
12400 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12401 {
12402 n = t;
12403 }
12404
12405 return n != NULL_TREE && n != void_type_node;
12406 }
12407
12408 /* Return true if TYPE has a prototype. */
12409
12410 bool
12411 prototype_p (const_tree fntype)
12412 {
12413 tree t;
12414
12415 gcc_assert (fntype != NULL_TREE);
12416
12417 t = TYPE_ARG_TYPES (fntype);
12418 return (t != NULL_TREE);
12419 }
12420
12421 /* If BLOCK is inlined from an __attribute__((__artificial__))
12422 routine, return pointer to location from where it has been
12423 called. */
12424 location_t *
12425 block_nonartificial_location (tree block)
12426 {
12427 location_t *ret = NULL;
12428
12429 while (block && TREE_CODE (block) == BLOCK
12430 && BLOCK_ABSTRACT_ORIGIN (block))
12431 {
12432 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12433 if (TREE_CODE (ao) == FUNCTION_DECL)
12434 {
12435 /* If AO is an artificial inline, point RET to the
12436 call site locus at which it has been inlined and continue
12437 the loop, in case AO's caller is also an artificial
12438 inline. */
12439 if (DECL_DECLARED_INLINE_P (ao)
12440 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12441 ret = &BLOCK_SOURCE_LOCATION (block);
12442 else
12443 break;
12444 }
12445 else if (TREE_CODE (ao) != BLOCK)
12446 break;
12447
12448 block = BLOCK_SUPERCONTEXT (block);
12449 }
12450 return ret;
12451 }
12452
12453
12454 /* If EXP is inlined from an __attribute__((__artificial__))
12455 function, return the location of the original call expression. */
12456
12457 location_t
12458 tree_nonartificial_location (tree exp)
12459 {
12460 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12461
12462 if (loc)
12463 return *loc;
12464 else
12465 return EXPR_LOCATION (exp);
12466 }
12467
12468
12469 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12470 nodes. */
12471
12472 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12473
12474 hashval_t
12475 cl_option_hasher::hash (tree x)
12476 {
12477 const_tree const t = x;
12478 const char *p;
12479 size_t i;
12480 size_t len = 0;
12481 hashval_t hash = 0;
12482
12483 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12484 {
12485 p = (const char *)TREE_OPTIMIZATION (t);
12486 len = sizeof (struct cl_optimization);
12487 }
12488
12489 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12490 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12491
12492 else
12493 gcc_unreachable ();
12494
12495 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12496 something else. */
12497 for (i = 0; i < len; i++)
12498 if (p[i])
12499 hash = (hash << 4) ^ ((i << 2) | p[i]);
12500
12501 return hash;
12502 }
12503
12504 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12505 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12506 same. */
12507
12508 bool
12509 cl_option_hasher::equal (tree x, tree y)
12510 {
12511 const_tree const xt = x;
12512 const_tree const yt = y;
12513
12514 if (TREE_CODE (xt) != TREE_CODE (yt))
12515 return 0;
12516
12517 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12518 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12519 TREE_OPTIMIZATION (yt));
12520 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12521 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12522 TREE_TARGET_OPTION (yt));
12523 else
12524 gcc_unreachable ();
12525 }
12526
12527 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12528
12529 tree
12530 build_optimization_node (struct gcc_options *opts)
12531 {
12532 tree t;
12533
12534 /* Use the cache of optimization nodes. */
12535
12536 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12537 opts);
12538
12539 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12540 t = *slot;
12541 if (!t)
12542 {
12543 /* Insert this one into the hash table. */
12544 t = cl_optimization_node;
12545 *slot = t;
12546
12547 /* Make a new node for next time round. */
12548 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12549 }
12550
12551 return t;
12552 }
12553
12554 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12555
12556 tree
12557 build_target_option_node (struct gcc_options *opts)
12558 {
12559 tree t;
12560
12561 /* Use the cache of optimization nodes. */
12562
12563 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12564 opts);
12565
12566 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12567 t = *slot;
12568 if (!t)
12569 {
12570 /* Insert this one into the hash table. */
12571 t = cl_target_option_node;
12572 *slot = t;
12573
12574 /* Make a new node for next time round. */
12575 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12576 }
12577
12578 return t;
12579 }
12580
12581 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12582 so that they aren't saved during PCH writing. */
12583
12584 void
12585 prepare_target_option_nodes_for_pch (void)
12586 {
12587 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12588 for (; iter != cl_option_hash_table->end (); ++iter)
12589 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12590 TREE_TARGET_GLOBALS (*iter) = NULL;
12591 }
12592
12593 /* Determine the "ultimate origin" of a block. */
12594
12595 tree
12596 block_ultimate_origin (const_tree block)
12597 {
12598 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12599
12600 if (origin == NULL_TREE)
12601 return NULL_TREE;
12602 else
12603 {
12604 gcc_checking_assert ((DECL_P (origin)
12605 && DECL_ORIGIN (origin) == origin)
12606 || BLOCK_ORIGIN (origin) == origin);
12607 return origin;
12608 }
12609 }
12610
12611 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12612 no instruction. */
12613
12614 bool
12615 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12616 {
12617 /* Do not strip casts into or out of differing address spaces. */
12618 if (POINTER_TYPE_P (outer_type)
12619 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12620 {
12621 if (!POINTER_TYPE_P (inner_type)
12622 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12623 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12624 return false;
12625 }
12626 else if (POINTER_TYPE_P (inner_type)
12627 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12628 {
12629 /* We already know that outer_type is not a pointer with
12630 a non-generic address space. */
12631 return false;
12632 }
12633
12634 /* Use precision rather then machine mode when we can, which gives
12635 the correct answer even for submode (bit-field) types. */
12636 if ((INTEGRAL_TYPE_P (outer_type)
12637 || POINTER_TYPE_P (outer_type)
12638 || TREE_CODE (outer_type) == OFFSET_TYPE)
12639 && (INTEGRAL_TYPE_P (inner_type)
12640 || POINTER_TYPE_P (inner_type)
12641 || TREE_CODE (inner_type) == OFFSET_TYPE))
12642 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12643
12644 /* Otherwise fall back on comparing machine modes (e.g. for
12645 aggregate types, floats). */
12646 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12647 }
12648
12649 /* Return true iff conversion in EXP generates no instruction. Mark
12650 it inline so that we fully inline into the stripping functions even
12651 though we have two uses of this function. */
12652
12653 static inline bool
12654 tree_nop_conversion (const_tree exp)
12655 {
12656 tree outer_type, inner_type;
12657
12658 if (location_wrapper_p (exp))
12659 return true;
12660 if (!CONVERT_EXPR_P (exp)
12661 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12662 return false;
12663
12664 outer_type = TREE_TYPE (exp);
12665 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12666 if (!inner_type || inner_type == error_mark_node)
12667 return false;
12668
12669 return tree_nop_conversion_p (outer_type, inner_type);
12670 }
12671
12672 /* Return true iff conversion in EXP generates no instruction. Don't
12673 consider conversions changing the signedness. */
12674
12675 static bool
12676 tree_sign_nop_conversion (const_tree exp)
12677 {
12678 tree outer_type, inner_type;
12679
12680 if (!tree_nop_conversion (exp))
12681 return false;
12682
12683 outer_type = TREE_TYPE (exp);
12684 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12685
12686 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12687 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12688 }
12689
12690 /* Strip conversions from EXP according to tree_nop_conversion and
12691 return the resulting expression. */
12692
12693 tree
12694 tree_strip_nop_conversions (tree exp)
12695 {
12696 while (tree_nop_conversion (exp))
12697 exp = TREE_OPERAND (exp, 0);
12698 return exp;
12699 }
12700
12701 /* Strip conversions from EXP according to tree_sign_nop_conversion
12702 and return the resulting expression. */
12703
12704 tree
12705 tree_strip_sign_nop_conversions (tree exp)
12706 {
12707 while (tree_sign_nop_conversion (exp))
12708 exp = TREE_OPERAND (exp, 0);
12709 return exp;
12710 }
12711
12712 /* Avoid any floating point extensions from EXP. */
12713 tree
12714 strip_float_extensions (tree exp)
12715 {
12716 tree sub, expt, subt;
12717
12718 /* For floating point constant look up the narrowest type that can hold
12719 it properly and handle it like (type)(narrowest_type)constant.
12720 This way we can optimize for instance a=a*2.0 where "a" is float
12721 but 2.0 is double constant. */
12722 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12723 {
12724 REAL_VALUE_TYPE orig;
12725 tree type = NULL;
12726
12727 orig = TREE_REAL_CST (exp);
12728 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12729 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12730 type = float_type_node;
12731 else if (TYPE_PRECISION (TREE_TYPE (exp))
12732 > TYPE_PRECISION (double_type_node)
12733 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12734 type = double_type_node;
12735 if (type)
12736 return build_real_truncate (type, orig);
12737 }
12738
12739 if (!CONVERT_EXPR_P (exp))
12740 return exp;
12741
12742 sub = TREE_OPERAND (exp, 0);
12743 subt = TREE_TYPE (sub);
12744 expt = TREE_TYPE (exp);
12745
12746 if (!FLOAT_TYPE_P (subt))
12747 return exp;
12748
12749 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12750 return exp;
12751
12752 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12753 return exp;
12754
12755 return strip_float_extensions (sub);
12756 }
12757
12758 /* Strip out all handled components that produce invariant
12759 offsets. */
12760
12761 const_tree
12762 strip_invariant_refs (const_tree op)
12763 {
12764 while (handled_component_p (op))
12765 {
12766 switch (TREE_CODE (op))
12767 {
12768 case ARRAY_REF:
12769 case ARRAY_RANGE_REF:
12770 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12771 || TREE_OPERAND (op, 2) != NULL_TREE
12772 || TREE_OPERAND (op, 3) != NULL_TREE)
12773 return NULL;
12774 break;
12775
12776 case COMPONENT_REF:
12777 if (TREE_OPERAND (op, 2) != NULL_TREE)
12778 return NULL;
12779 break;
12780
12781 default:;
12782 }
12783 op = TREE_OPERAND (op, 0);
12784 }
12785
12786 return op;
12787 }
12788
12789 static GTY(()) tree gcc_eh_personality_decl;
12790
12791 /* Return the GCC personality function decl. */
12792
12793 tree
12794 lhd_gcc_personality (void)
12795 {
12796 if (!gcc_eh_personality_decl)
12797 gcc_eh_personality_decl = build_personality_function ("gcc");
12798 return gcc_eh_personality_decl;
12799 }
12800
12801 /* TARGET is a call target of GIMPLE call statement
12802 (obtained by gimple_call_fn). Return true if it is
12803 OBJ_TYPE_REF representing an virtual call of C++ method.
12804 (As opposed to OBJ_TYPE_REF representing objc calls
12805 through a cast where middle-end devirtualization machinery
12806 can't apply.) */
12807
12808 bool
12809 virtual_method_call_p (const_tree target)
12810 {
12811 if (TREE_CODE (target) != OBJ_TYPE_REF)
12812 return false;
12813 tree t = TREE_TYPE (target);
12814 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12815 t = TREE_TYPE (t);
12816 if (TREE_CODE (t) == FUNCTION_TYPE)
12817 return false;
12818 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12819 /* If we do not have BINFO associated, it means that type was built
12820 without devirtualization enabled. Do not consider this a virtual
12821 call. */
12822 if (!TYPE_BINFO (obj_type_ref_class (target)))
12823 return false;
12824 return true;
12825 }
12826
12827 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12828
12829 static tree
12830 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12831 {
12832 unsigned int i;
12833 tree base_binfo, b;
12834
12835 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12836 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12837 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12838 return base_binfo;
12839 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12840 return b;
12841 return NULL;
12842 }
12843
12844 /* Try to find a base info of BINFO that would have its field decl at offset
12845 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12846 found, return, otherwise return NULL_TREE. */
12847
12848 tree
12849 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12850 {
12851 tree type = BINFO_TYPE (binfo);
12852
12853 while (true)
12854 {
12855 HOST_WIDE_INT pos, size;
12856 tree fld;
12857 int i;
12858
12859 if (types_same_for_odr (type, expected_type))
12860 return binfo;
12861 if (maybe_lt (offset, 0))
12862 return NULL_TREE;
12863
12864 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12865 {
12866 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12867 continue;
12868
12869 pos = int_bit_position (fld);
12870 size = tree_to_uhwi (DECL_SIZE (fld));
12871 if (known_in_range_p (offset, pos, size))
12872 break;
12873 }
12874 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12875 return NULL_TREE;
12876
12877 /* Offset 0 indicates the primary base, whose vtable contents are
12878 represented in the binfo for the derived class. */
12879 else if (maybe_ne (offset, 0))
12880 {
12881 tree found_binfo = NULL, base_binfo;
12882 /* Offsets in BINFO are in bytes relative to the whole structure
12883 while POS is in bits relative to the containing field. */
12884 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12885 / BITS_PER_UNIT);
12886
12887 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12888 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12889 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12890 {
12891 found_binfo = base_binfo;
12892 break;
12893 }
12894 if (found_binfo)
12895 binfo = found_binfo;
12896 else
12897 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12898 binfo_offset);
12899 }
12900
12901 type = TREE_TYPE (fld);
12902 offset -= pos;
12903 }
12904 }
12905
12906 /* Returns true if X is a typedef decl. */
12907
12908 bool
12909 is_typedef_decl (const_tree x)
12910 {
12911 return (x && TREE_CODE (x) == TYPE_DECL
12912 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12913 }
12914
12915 /* Returns true iff TYPE is a type variant created for a typedef. */
12916
12917 bool
12918 typedef_variant_p (const_tree type)
12919 {
12920 return is_typedef_decl (TYPE_NAME (type));
12921 }
12922
12923 /* PR 84195: Replace control characters in "unescaped" with their
12924 escaped equivalents. Allow newlines if -fmessage-length has
12925 been set to a non-zero value. This is done here, rather than
12926 where the attribute is recorded as the message length can
12927 change between these two locations. */
12928
12929 void
12930 escaped_string::escape (const char *unescaped)
12931 {
12932 char *escaped;
12933 size_t i, new_i, len;
12934
12935 if (m_owned)
12936 free (m_str);
12937
12938 m_str = const_cast<char *> (unescaped);
12939 m_owned = false;
12940
12941 if (unescaped == NULL || *unescaped == 0)
12942 return;
12943
12944 len = strlen (unescaped);
12945 escaped = NULL;
12946 new_i = 0;
12947
12948 for (i = 0; i < len; i++)
12949 {
12950 char c = unescaped[i];
12951
12952 if (!ISCNTRL (c))
12953 {
12954 if (escaped)
12955 escaped[new_i++] = c;
12956 continue;
12957 }
12958
12959 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12960 {
12961 if (escaped == NULL)
12962 {
12963 /* We only allocate space for a new string if we
12964 actually encounter a control character that
12965 needs replacing. */
12966 escaped = (char *) xmalloc (len * 2 + 1);
12967 strncpy (escaped, unescaped, i);
12968 new_i = i;
12969 }
12970
12971 escaped[new_i++] = '\\';
12972
12973 switch (c)
12974 {
12975 case '\a': escaped[new_i++] = 'a'; break;
12976 case '\b': escaped[new_i++] = 'b'; break;
12977 case '\f': escaped[new_i++] = 'f'; break;
12978 case '\n': escaped[new_i++] = 'n'; break;
12979 case '\r': escaped[new_i++] = 'r'; break;
12980 case '\t': escaped[new_i++] = 't'; break;
12981 case '\v': escaped[new_i++] = 'v'; break;
12982 default: escaped[new_i++] = '?'; break;
12983 }
12984 }
12985 else if (escaped)
12986 escaped[new_i++] = c;
12987 }
12988
12989 if (escaped)
12990 {
12991 escaped[new_i] = 0;
12992 m_str = escaped;
12993 m_owned = true;
12994 }
12995 }
12996
12997 /* Warn about a use of an identifier which was marked deprecated. Returns
12998 whether a warning was given. */
12999
13000 bool
13001 warn_deprecated_use (tree node, tree attr)
13002 {
13003 escaped_string msg;
13004
13005 if (node == 0 || !warn_deprecated_decl)
13006 return false;
13007
13008 if (!attr)
13009 {
13010 if (DECL_P (node))
13011 attr = DECL_ATTRIBUTES (node);
13012 else if (TYPE_P (node))
13013 {
13014 tree decl = TYPE_STUB_DECL (node);
13015 if (decl)
13016 attr = lookup_attribute ("deprecated",
13017 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13018 }
13019 }
13020
13021 if (attr)
13022 attr = lookup_attribute ("deprecated", attr);
13023
13024 if (attr)
13025 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13026
13027 bool w = false;
13028 if (DECL_P (node))
13029 {
13030 auto_diagnostic_group d;
13031 if (msg)
13032 w = warning (OPT_Wdeprecated_declarations,
13033 "%qD is deprecated: %s", node, (const char *) msg);
13034 else
13035 w = warning (OPT_Wdeprecated_declarations,
13036 "%qD is deprecated", node);
13037 if (w)
13038 inform (DECL_SOURCE_LOCATION (node), "declared here");
13039 }
13040 else if (TYPE_P (node))
13041 {
13042 tree what = NULL_TREE;
13043 tree decl = TYPE_STUB_DECL (node);
13044
13045 if (TYPE_NAME (node))
13046 {
13047 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13048 what = TYPE_NAME (node);
13049 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13050 && DECL_NAME (TYPE_NAME (node)))
13051 what = DECL_NAME (TYPE_NAME (node));
13052 }
13053
13054 auto_diagnostic_group d;
13055 if (what)
13056 {
13057 if (msg)
13058 w = warning (OPT_Wdeprecated_declarations,
13059 "%qE is deprecated: %s", what, (const char *) msg);
13060 else
13061 w = warning (OPT_Wdeprecated_declarations,
13062 "%qE is deprecated", what);
13063 }
13064 else
13065 {
13066 if (msg)
13067 w = warning (OPT_Wdeprecated_declarations,
13068 "type is deprecated: %s", (const char *) msg);
13069 else
13070 w = warning (OPT_Wdeprecated_declarations,
13071 "type is deprecated");
13072 }
13073
13074 if (w && decl)
13075 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13076 }
13077
13078 return w;
13079 }
13080
13081 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13082 somewhere in it. */
13083
13084 bool
13085 contains_bitfld_component_ref_p (const_tree ref)
13086 {
13087 while (handled_component_p (ref))
13088 {
13089 if (TREE_CODE (ref) == COMPONENT_REF
13090 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13091 return true;
13092 ref = TREE_OPERAND (ref, 0);
13093 }
13094
13095 return false;
13096 }
13097
13098 /* Try to determine whether a TRY_CATCH expression can fall through.
13099 This is a subroutine of block_may_fallthru. */
13100
13101 static bool
13102 try_catch_may_fallthru (const_tree stmt)
13103 {
13104 tree_stmt_iterator i;
13105
13106 /* If the TRY block can fall through, the whole TRY_CATCH can
13107 fall through. */
13108 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13109 return true;
13110
13111 i = tsi_start (TREE_OPERAND (stmt, 1));
13112 switch (TREE_CODE (tsi_stmt (i)))
13113 {
13114 case CATCH_EXPR:
13115 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13116 catch expression and a body. The whole TRY_CATCH may fall
13117 through iff any of the catch bodies falls through. */
13118 for (; !tsi_end_p (i); tsi_next (&i))
13119 {
13120 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13121 return true;
13122 }
13123 return false;
13124
13125 case EH_FILTER_EXPR:
13126 /* The exception filter expression only matters if there is an
13127 exception. If the exception does not match EH_FILTER_TYPES,
13128 we will execute EH_FILTER_FAILURE, and we will fall through
13129 if that falls through. If the exception does match
13130 EH_FILTER_TYPES, the stack unwinder will continue up the
13131 stack, so we will not fall through. We don't know whether we
13132 will throw an exception which matches EH_FILTER_TYPES or not,
13133 so we just ignore EH_FILTER_TYPES and assume that we might
13134 throw an exception which doesn't match. */
13135 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13136
13137 default:
13138 /* This case represents statements to be executed when an
13139 exception occurs. Those statements are implicitly followed
13140 by a RESX statement to resume execution after the exception.
13141 So in this case the TRY_CATCH never falls through. */
13142 return false;
13143 }
13144 }
13145
13146 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13147 need not be 100% accurate; simply be conservative and return true if we
13148 don't know. This is used only to avoid stupidly generating extra code.
13149 If we're wrong, we'll just delete the extra code later. */
13150
13151 bool
13152 block_may_fallthru (const_tree block)
13153 {
13154 /* This CONST_CAST is okay because expr_last returns its argument
13155 unmodified and we assign it to a const_tree. */
13156 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13157
13158 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13159 {
13160 case GOTO_EXPR:
13161 case RETURN_EXPR:
13162 /* Easy cases. If the last statement of the block implies
13163 control transfer, then we can't fall through. */
13164 return false;
13165
13166 case SWITCH_EXPR:
13167 /* If there is a default: label or case labels cover all possible
13168 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13169 to some case label in all cases and all we care is whether the
13170 SWITCH_BODY falls through. */
13171 if (SWITCH_ALL_CASES_P (stmt))
13172 return block_may_fallthru (SWITCH_BODY (stmt));
13173 return true;
13174
13175 case COND_EXPR:
13176 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13177 return true;
13178 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13179
13180 case BIND_EXPR:
13181 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13182
13183 case TRY_CATCH_EXPR:
13184 return try_catch_may_fallthru (stmt);
13185
13186 case TRY_FINALLY_EXPR:
13187 /* The finally clause is always executed after the try clause,
13188 so if it does not fall through, then the try-finally will not
13189 fall through. Otherwise, if the try clause does not fall
13190 through, then when the finally clause falls through it will
13191 resume execution wherever the try clause was going. So the
13192 whole try-finally will only fall through if both the try
13193 clause and the finally clause fall through. */
13194 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13195 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13196
13197 case EH_ELSE_EXPR:
13198 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13199
13200 case MODIFY_EXPR:
13201 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13202 stmt = TREE_OPERAND (stmt, 1);
13203 else
13204 return true;
13205 /* FALLTHRU */
13206
13207 case CALL_EXPR:
13208 /* Functions that do not return do not fall through. */
13209 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13210
13211 case CLEANUP_POINT_EXPR:
13212 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13213
13214 case TARGET_EXPR:
13215 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13216
13217 case ERROR_MARK:
13218 return true;
13219
13220 default:
13221 return lang_hooks.block_may_fallthru (stmt);
13222 }
13223 }
13224
13225 /* True if we are using EH to handle cleanups. */
13226 static bool using_eh_for_cleanups_flag = false;
13227
13228 /* This routine is called from front ends to indicate eh should be used for
13229 cleanups. */
13230 void
13231 using_eh_for_cleanups (void)
13232 {
13233 using_eh_for_cleanups_flag = true;
13234 }
13235
13236 /* Query whether EH is used for cleanups. */
13237 bool
13238 using_eh_for_cleanups_p (void)
13239 {
13240 return using_eh_for_cleanups_flag;
13241 }
13242
13243 /* Wrapper for tree_code_name to ensure that tree code is valid */
13244 const char *
13245 get_tree_code_name (enum tree_code code)
13246 {
13247 const char *invalid = "<invalid tree code>";
13248
13249 if (code >= MAX_TREE_CODES)
13250 {
13251 if (code == 0xa5a5)
13252 return "ggc_freed";
13253 return invalid;
13254 }
13255
13256 return tree_code_name[code];
13257 }
13258
13259 /* Drops the TREE_OVERFLOW flag from T. */
13260
13261 tree
13262 drop_tree_overflow (tree t)
13263 {
13264 gcc_checking_assert (TREE_OVERFLOW (t));
13265
13266 /* For tree codes with a sharing machinery re-build the result. */
13267 if (poly_int_tree_p (t))
13268 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13269
13270 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13271 and canonicalize the result. */
13272 if (TREE_CODE (t) == VECTOR_CST)
13273 {
13274 tree_vector_builder builder;
13275 builder.new_unary_operation (TREE_TYPE (t), t, true);
13276 unsigned int count = builder.encoded_nelts ();
13277 for (unsigned int i = 0; i < count; ++i)
13278 {
13279 tree elt = VECTOR_CST_ELT (t, i);
13280 if (TREE_OVERFLOW (elt))
13281 elt = drop_tree_overflow (elt);
13282 builder.quick_push (elt);
13283 }
13284 return builder.build ();
13285 }
13286
13287 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13288 and drop the flag. */
13289 t = copy_node (t);
13290 TREE_OVERFLOW (t) = 0;
13291
13292 /* For constants that contain nested constants, drop the flag
13293 from those as well. */
13294 if (TREE_CODE (t) == COMPLEX_CST)
13295 {
13296 if (TREE_OVERFLOW (TREE_REALPART (t)))
13297 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13298 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13299 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13300 }
13301
13302 return t;
13303 }
13304
13305 /* Given a memory reference expression T, return its base address.
13306 The base address of a memory reference expression is the main
13307 object being referenced. For instance, the base address for
13308 'array[i].fld[j]' is 'array'. You can think of this as stripping
13309 away the offset part from a memory address.
13310
13311 This function calls handled_component_p to strip away all the inner
13312 parts of the memory reference until it reaches the base object. */
13313
13314 tree
13315 get_base_address (tree t)
13316 {
13317 while (handled_component_p (t))
13318 t = TREE_OPERAND (t, 0);
13319
13320 if ((TREE_CODE (t) == MEM_REF
13321 || TREE_CODE (t) == TARGET_MEM_REF)
13322 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13323 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13324
13325 /* ??? Either the alias oracle or all callers need to properly deal
13326 with WITH_SIZE_EXPRs before we can look through those. */
13327 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13328 return NULL_TREE;
13329
13330 return t;
13331 }
13332
13333 /* Return a tree of sizetype representing the size, in bytes, of the element
13334 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13335
13336 tree
13337 array_ref_element_size (tree exp)
13338 {
13339 tree aligned_size = TREE_OPERAND (exp, 3);
13340 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13341 location_t loc = EXPR_LOCATION (exp);
13342
13343 /* If a size was specified in the ARRAY_REF, it's the size measured
13344 in alignment units of the element type. So multiply by that value. */
13345 if (aligned_size)
13346 {
13347 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13348 sizetype from another type of the same width and signedness. */
13349 if (TREE_TYPE (aligned_size) != sizetype)
13350 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13351 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13352 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13353 }
13354
13355 /* Otherwise, take the size from that of the element type. Substitute
13356 any PLACEHOLDER_EXPR that we have. */
13357 else
13358 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13359 }
13360
13361 /* Return a tree representing the lower bound of the array mentioned in
13362 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13363
13364 tree
13365 array_ref_low_bound (tree exp)
13366 {
13367 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13368
13369 /* If a lower bound is specified in EXP, use it. */
13370 if (TREE_OPERAND (exp, 2))
13371 return TREE_OPERAND (exp, 2);
13372
13373 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13374 substituting for a PLACEHOLDER_EXPR as needed. */
13375 if (domain_type && TYPE_MIN_VALUE (domain_type))
13376 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13377
13378 /* Otherwise, return a zero of the appropriate type. */
13379 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
13380 return (idxtype == error_mark_node
13381 ? integer_zero_node : build_int_cst (idxtype, 0));
13382 }
13383
13384 /* Return a tree representing the upper bound of the array mentioned in
13385 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13386
13387 tree
13388 array_ref_up_bound (tree exp)
13389 {
13390 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13391
13392 /* If there is a domain type and it has an upper bound, use it, substituting
13393 for a PLACEHOLDER_EXPR as needed. */
13394 if (domain_type && TYPE_MAX_VALUE (domain_type))
13395 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13396
13397 /* Otherwise fail. */
13398 return NULL_TREE;
13399 }
13400
13401 /* Returns true if REF is an array reference, component reference,
13402 or memory reference to an array at the end of a structure.
13403 If this is the case, the array may be allocated larger
13404 than its upper bound implies. */
13405
13406 bool
13407 array_at_struct_end_p (tree ref)
13408 {
13409 tree atype;
13410
13411 if (TREE_CODE (ref) == ARRAY_REF
13412 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13413 {
13414 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13415 ref = TREE_OPERAND (ref, 0);
13416 }
13417 else if (TREE_CODE (ref) == COMPONENT_REF
13418 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13419 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13420 else if (TREE_CODE (ref) == MEM_REF)
13421 {
13422 tree arg = TREE_OPERAND (ref, 0);
13423 if (TREE_CODE (arg) == ADDR_EXPR)
13424 arg = TREE_OPERAND (arg, 0);
13425 tree argtype = TREE_TYPE (arg);
13426 if (TREE_CODE (argtype) == RECORD_TYPE)
13427 {
13428 if (tree fld = last_field (argtype))
13429 {
13430 atype = TREE_TYPE (fld);
13431 if (TREE_CODE (atype) != ARRAY_TYPE)
13432 return false;
13433 if (VAR_P (arg) && DECL_SIZE (fld))
13434 return false;
13435 }
13436 else
13437 return false;
13438 }
13439 else
13440 return false;
13441 }
13442 else
13443 return false;
13444
13445 if (TREE_CODE (ref) == STRING_CST)
13446 return false;
13447
13448 tree ref_to_array = ref;
13449 while (handled_component_p (ref))
13450 {
13451 /* If the reference chain contains a component reference to a
13452 non-union type and there follows another field the reference
13453 is not at the end of a structure. */
13454 if (TREE_CODE (ref) == COMPONENT_REF)
13455 {
13456 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13457 {
13458 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13459 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13460 nextf = DECL_CHAIN (nextf);
13461 if (nextf)
13462 return false;
13463 }
13464 }
13465 /* If we have a multi-dimensional array we do not consider
13466 a non-innermost dimension as flex array if the whole
13467 multi-dimensional array is at struct end.
13468 Same for an array of aggregates with a trailing array
13469 member. */
13470 else if (TREE_CODE (ref) == ARRAY_REF)
13471 return false;
13472 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13473 ;
13474 /* If we view an underlying object as sth else then what we
13475 gathered up to now is what we have to rely on. */
13476 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13477 break;
13478 else
13479 gcc_unreachable ();
13480
13481 ref = TREE_OPERAND (ref, 0);
13482 }
13483
13484 /* The array now is at struct end. Treat flexible arrays as
13485 always subject to extend, even into just padding constrained by
13486 an underlying decl. */
13487 if (! TYPE_SIZE (atype)
13488 || ! TYPE_DOMAIN (atype)
13489 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13490 return true;
13491
13492 if (TREE_CODE (ref) == MEM_REF
13493 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13494 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13495
13496 /* If the reference is based on a declared entity, the size of the array
13497 is constrained by its given domain. (Do not trust commons PR/69368). */
13498 if (DECL_P (ref)
13499 && !(flag_unconstrained_commons
13500 && VAR_P (ref) && DECL_COMMON (ref))
13501 && DECL_SIZE_UNIT (ref)
13502 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13503 {
13504 /* Check whether the array domain covers all of the available
13505 padding. */
13506 poly_int64 offset;
13507 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13508 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13509 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13510 return true;
13511 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13512 return true;
13513
13514 /* If at least one extra element fits it is a flexarray. */
13515 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13516 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13517 + 2)
13518 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13519 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13520 return true;
13521
13522 return false;
13523 }
13524
13525 return true;
13526 }
13527
13528 /* Return a tree representing the offset, in bytes, of the field referenced
13529 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13530
13531 tree
13532 component_ref_field_offset (tree exp)
13533 {
13534 tree aligned_offset = TREE_OPERAND (exp, 2);
13535 tree field = TREE_OPERAND (exp, 1);
13536 location_t loc = EXPR_LOCATION (exp);
13537
13538 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13539 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13540 value. */
13541 if (aligned_offset)
13542 {
13543 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13544 sizetype from another type of the same width and signedness. */
13545 if (TREE_TYPE (aligned_offset) != sizetype)
13546 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13547 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13548 size_int (DECL_OFFSET_ALIGN (field)
13549 / BITS_PER_UNIT));
13550 }
13551
13552 /* Otherwise, take the offset from that of the field. Substitute
13553 any PLACEHOLDER_EXPR that we have. */
13554 else
13555 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13556 }
13557
13558 /* Given the initializer INIT, return the initializer for the field
13559 DECL if it exists, otherwise null. Used to obtain the initializer
13560 for a flexible array member and determine its size. */
13561
13562 static tree
13563 get_initializer_for (tree init, tree decl)
13564 {
13565 STRIP_NOPS (init);
13566
13567 tree fld, fld_init;
13568 unsigned HOST_WIDE_INT i;
13569 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13570 {
13571 if (decl == fld)
13572 return fld_init;
13573
13574 if (TREE_CODE (fld) == CONSTRUCTOR)
13575 {
13576 fld_init = get_initializer_for (fld_init, decl);
13577 if (fld_init)
13578 return fld_init;
13579 }
13580 }
13581
13582 return NULL_TREE;
13583 }
13584
13585 /* Determines the size of the member referenced by the COMPONENT_REF
13586 REF, using its initializer expression if necessary in order to
13587 determine the size of an initialized flexible array member.
13588 If non-null, *INTERIOR_ZERO_LENGTH is set when REF refers to
13589 an interior zero-length array.
13590 Returns the size as sizetype (which might be zero for an object
13591 with an uninitialized flexible array member) or null if the size
13592 cannot be determined. */
13593
13594 tree
13595 component_ref_size (tree ref, bool *interior_zero_length /* = NULL */)
13596 {
13597 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13598
13599 bool int_0_len = false;
13600 if (!interior_zero_length)
13601 interior_zero_length = &int_0_len;
13602
13603 tree member = TREE_OPERAND (ref, 1);
13604
13605 tree memsize = DECL_SIZE_UNIT (member);
13606 if (memsize)
13607 {
13608 tree memtype = TREE_TYPE (member);
13609 if (TREE_CODE (memtype) != ARRAY_TYPE)
13610 return memsize;
13611
13612 bool trailing = array_at_struct_end_p (ref);
13613 bool zero_length = integer_zerop (memsize);
13614 if (!trailing && (!interior_zero_length || !zero_length))
13615 /* MEMBER is either an interior array or is an array with
13616 more than one element. */
13617 return memsize;
13618
13619 *interior_zero_length = zero_length && !trailing;
13620 if (*interior_zero_length)
13621 memsize = NULL_TREE;
13622
13623 if (!zero_length)
13624 if (tree dom = TYPE_DOMAIN (memtype))
13625 if (tree min = TYPE_MIN_VALUE (dom))
13626 if (tree max = TYPE_MAX_VALUE (dom))
13627 if (TREE_CODE (min) == INTEGER_CST
13628 && TREE_CODE (max) == INTEGER_CST)
13629 {
13630 offset_int minidx = wi::to_offset (min);
13631 offset_int maxidx = wi::to_offset (max);
13632 if (maxidx - minidx > 0)
13633 /* MEMBER is an array with more than 1 element. */
13634 return memsize;
13635 }
13636 }
13637
13638 /* MEMBER is either a bona fide flexible array member, or a zero-length
13639 array member, or an array of length one treated as such. */
13640
13641 /* If the reference is to a declared object and the member a true
13642 flexible array, try to determine its size from its initializer. */
13643 poly_int64 baseoff = 0;
13644 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13645 if (!base || !VAR_P (base))
13646 {
13647 if (!*interior_zero_length)
13648 return NULL_TREE;
13649
13650 if (TREE_CODE (TREE_OPERAND (ref, 0)) != COMPONENT_REF)
13651 return NULL_TREE;
13652
13653 base = TREE_OPERAND (ref, 0);
13654 while (TREE_CODE (base) == COMPONENT_REF)
13655 base = TREE_OPERAND (base, 0);
13656 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13657 }
13658
13659 /* BASE is the declared object of which MEMBER is either a member
13660 or that is cast to REFTYPE (e.g., a char buffer used to store
13661 a REFTYPE object). */
13662 tree reftype = TREE_TYPE (TREE_OPERAND (ref, 0));
13663 tree basetype = TREE_TYPE (base);
13664
13665 /* Determine the base type of the referenced object. If it's
13666 the same as REFTYPE and MEMBER has a known size, return it. */
13667 tree bt = basetype;
13668 if (!*interior_zero_length)
13669 while (TREE_CODE (bt) == ARRAY_TYPE)
13670 bt = TREE_TYPE (bt);
13671 bool typematch = useless_type_conversion_p (reftype, bt);
13672 if (memsize && typematch)
13673 return memsize;
13674
13675 memsize = NULL_TREE;
13676
13677 if (typematch)
13678 /* MEMBER is a true flexible array member. Compute its size from
13679 the initializer of the BASE object if it has one. */
13680 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13681 if (init != error_mark_node)
13682 {
13683 init = get_initializer_for (init, member);
13684 if (init)
13685 {
13686 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13687 if (tree refsize = TYPE_SIZE_UNIT (reftype))
13688 {
13689 /* Use the larger of the initializer size and the tail
13690 padding in the enclosing struct. */
13691 poly_int64 rsz = tree_to_poly_int64 (refsize);
13692 rsz -= baseoff;
13693 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13694 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13695 }
13696
13697 baseoff = 0;
13698 }
13699 }
13700
13701 if (!memsize)
13702 {
13703 if (typematch)
13704 {
13705 if (DECL_P (base)
13706 && DECL_EXTERNAL (base)
13707 && bt == basetype
13708 && !*interior_zero_length)
13709 /* The size of a flexible array member of an extern struct
13710 with no initializer cannot be determined (it's defined
13711 in another translation unit and can have an initializer
13712 with an arbitrary number of elements). */
13713 return NULL_TREE;
13714
13715 /* Use the size of the base struct or, for interior zero-length
13716 arrays, the size of the enclosing type. */
13717 memsize = TYPE_SIZE_UNIT (bt);
13718 }
13719 else if (DECL_P (base))
13720 /* Use the size of the BASE object (possibly an array of some
13721 other type such as char used to store the struct). */
13722 memsize = DECL_SIZE_UNIT (base);
13723 else
13724 return NULL_TREE;
13725 }
13726
13727 /* If the flexible array member has a known size use the greater
13728 of it and the tail padding in the enclosing struct.
13729 Otherwise, when the size of the flexible array member is unknown
13730 and the referenced object is not a struct, use the size of its
13731 type when known. This detects sizes of array buffers when cast
13732 to struct types with flexible array members. */
13733 if (memsize)
13734 {
13735 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13736 if (known_lt (baseoff, memsz64))
13737 {
13738 memsz64 -= baseoff;
13739 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13740 }
13741 return size_zero_node;
13742 }
13743
13744 /* Return "don't know" for an external non-array object since its
13745 flexible array member can be initialized to have any number of
13746 elements. Otherwise, return zero because the flexible array
13747 member has no elements. */
13748 return (DECL_P (base)
13749 && DECL_EXTERNAL (base)
13750 && (!typematch
13751 || TREE_CODE (basetype) != ARRAY_TYPE)
13752 ? NULL_TREE : size_zero_node);
13753 }
13754
13755 /* Return the machine mode of T. For vectors, returns the mode of the
13756 inner type. The main use case is to feed the result to HONOR_NANS,
13757 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13758
13759 machine_mode
13760 element_mode (const_tree t)
13761 {
13762 if (!TYPE_P (t))
13763 t = TREE_TYPE (t);
13764 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13765 t = TREE_TYPE (t);
13766 return TYPE_MODE (t);
13767 }
13768
13769 /* Vector types need to re-check the target flags each time we report
13770 the machine mode. We need to do this because attribute target can
13771 change the result of vector_mode_supported_p and have_regs_of_mode
13772 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13773 change on a per-function basis. */
13774 /* ??? Possibly a better solution is to run through all the types
13775 referenced by a function and re-compute the TYPE_MODE once, rather
13776 than make the TYPE_MODE macro call a function. */
13777
13778 machine_mode
13779 vector_type_mode (const_tree t)
13780 {
13781 machine_mode mode;
13782
13783 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13784
13785 mode = t->type_common.mode;
13786 if (VECTOR_MODE_P (mode)
13787 && (!targetm.vector_mode_supported_p (mode)
13788 || !have_regs_of_mode[mode]))
13789 {
13790 scalar_int_mode innermode;
13791
13792 /* For integers, try mapping it to a same-sized scalar mode. */
13793 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13794 {
13795 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13796 * GET_MODE_BITSIZE (innermode));
13797 scalar_int_mode mode;
13798 if (int_mode_for_size (size, 0).exists (&mode)
13799 && have_regs_of_mode[mode])
13800 return mode;
13801 }
13802
13803 return BLKmode;
13804 }
13805
13806 return mode;
13807 }
13808
13809 /* Return the size in bits of each element of vector type TYPE. */
13810
13811 unsigned int
13812 vector_element_bits (const_tree type)
13813 {
13814 gcc_checking_assert (VECTOR_TYPE_P (type));
13815 if (VECTOR_BOOLEAN_TYPE_P (type))
13816 return vector_element_size (tree_to_poly_uint64 (TYPE_SIZE (type)),
13817 TYPE_VECTOR_SUBPARTS (type));
13818 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13819 }
13820
13821 /* Calculate the size in bits of each element of vector type TYPE
13822 and return the result as a tree of type bitsizetype. */
13823
13824 tree
13825 vector_element_bits_tree (const_tree type)
13826 {
13827 gcc_checking_assert (VECTOR_TYPE_P (type));
13828 if (VECTOR_BOOLEAN_TYPE_P (type))
13829 return bitsize_int (vector_element_bits (type));
13830 return TYPE_SIZE (TREE_TYPE (type));
13831 }
13832
13833 /* Verify that basic properties of T match TV and thus T can be a variant of
13834 TV. TV should be the more specified variant (i.e. the main variant). */
13835
13836 static bool
13837 verify_type_variant (const_tree t, tree tv)
13838 {
13839 /* Type variant can differ by:
13840
13841 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13842 ENCODE_QUAL_ADDR_SPACE.
13843 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13844 in this case some values may not be set in the variant types
13845 (see TYPE_COMPLETE_P checks).
13846 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13847 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13848 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13849 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13850 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13851 this is necessary to make it possible to merge types form different TUs
13852 - arrays, pointers and references may have TREE_TYPE that is a variant
13853 of TREE_TYPE of their main variants.
13854 - aggregates may have new TYPE_FIELDS list that list variants of
13855 the main variant TYPE_FIELDS.
13856 - vector types may differ by TYPE_VECTOR_OPAQUE
13857 */
13858
13859 /* Convenience macro for matching individual fields. */
13860 #define verify_variant_match(flag) \
13861 do { \
13862 if (flag (tv) != flag (t)) \
13863 { \
13864 error ("type variant differs by %s", #flag); \
13865 debug_tree (tv); \
13866 return false; \
13867 } \
13868 } while (false)
13869
13870 /* tree_base checks. */
13871
13872 verify_variant_match (TREE_CODE);
13873 /* FIXME: Ada builds non-artificial variants of artificial types. */
13874 if (TYPE_ARTIFICIAL (tv) && 0)
13875 verify_variant_match (TYPE_ARTIFICIAL);
13876 if (POINTER_TYPE_P (tv))
13877 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13878 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13879 verify_variant_match (TYPE_UNSIGNED);
13880 verify_variant_match (TYPE_PACKED);
13881 if (TREE_CODE (t) == REFERENCE_TYPE)
13882 verify_variant_match (TYPE_REF_IS_RVALUE);
13883 if (AGGREGATE_TYPE_P (t))
13884 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13885 else
13886 verify_variant_match (TYPE_SATURATING);
13887 /* FIXME: This check trigger during libstdc++ build. */
13888 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13889 verify_variant_match (TYPE_FINAL_P);
13890
13891 /* tree_type_common checks. */
13892
13893 if (COMPLETE_TYPE_P (t))
13894 {
13895 verify_variant_match (TYPE_MODE);
13896 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13897 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13898 verify_variant_match (TYPE_SIZE);
13899 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13900 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13901 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13902 {
13903 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13904 TYPE_SIZE_UNIT (tv), 0));
13905 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13906 debug_tree (tv);
13907 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13908 debug_tree (TYPE_SIZE_UNIT (tv));
13909 error ("type%'s %<TYPE_SIZE_UNIT%>");
13910 debug_tree (TYPE_SIZE_UNIT (t));
13911 return false;
13912 }
13913 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13914 }
13915 verify_variant_match (TYPE_PRECISION);
13916 if (RECORD_OR_UNION_TYPE_P (t))
13917 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13918 else if (TREE_CODE (t) == ARRAY_TYPE)
13919 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13920 /* During LTO we merge variant lists from diferent translation units
13921 that may differ BY TYPE_CONTEXT that in turn may point
13922 to TRANSLATION_UNIT_DECL.
13923 Ada also builds variants of types with different TYPE_CONTEXT. */
13924 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13925 verify_variant_match (TYPE_CONTEXT);
13926 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13927 verify_variant_match (TYPE_STRING_FLAG);
13928 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13929 verify_variant_match (TYPE_CXX_ODR_P);
13930 if (TYPE_ALIAS_SET_KNOWN_P (t))
13931 {
13932 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13933 debug_tree (tv);
13934 return false;
13935 }
13936
13937 /* tree_type_non_common checks. */
13938
13939 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13940 and dangle the pointer from time to time. */
13941 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13942 && (in_lto_p || !TYPE_VFIELD (tv)
13943 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13944 {
13945 error ("type variant has different %<TYPE_VFIELD%>");
13946 debug_tree (tv);
13947 return false;
13948 }
13949 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13950 || TREE_CODE (t) == INTEGER_TYPE
13951 || TREE_CODE (t) == BOOLEAN_TYPE
13952 || TREE_CODE (t) == REAL_TYPE
13953 || TREE_CODE (t) == FIXED_POINT_TYPE)
13954 {
13955 verify_variant_match (TYPE_MAX_VALUE);
13956 verify_variant_match (TYPE_MIN_VALUE);
13957 }
13958 if (TREE_CODE (t) == METHOD_TYPE)
13959 verify_variant_match (TYPE_METHOD_BASETYPE);
13960 if (TREE_CODE (t) == OFFSET_TYPE)
13961 verify_variant_match (TYPE_OFFSET_BASETYPE);
13962 if (TREE_CODE (t) == ARRAY_TYPE)
13963 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13964 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13965 or even type's main variant. This is needed to make bootstrap pass
13966 and the bug seems new in GCC 5.
13967 C++ FE should be updated to make this consistent and we should check
13968 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13969 is a match with main variant.
13970
13971 Also disable the check for Java for now because of parser hack that builds
13972 first an dummy BINFO and then sometimes replace it by real BINFO in some
13973 of the copies. */
13974 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13975 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13976 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13977 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13978 at LTO time only. */
13979 && (in_lto_p && odr_type_p (t)))
13980 {
13981 error ("type variant has different %<TYPE_BINFO%>");
13982 debug_tree (tv);
13983 error ("type variant%'s %<TYPE_BINFO%>");
13984 debug_tree (TYPE_BINFO (tv));
13985 error ("type%'s %<TYPE_BINFO%>");
13986 debug_tree (TYPE_BINFO (t));
13987 return false;
13988 }
13989
13990 /* Check various uses of TYPE_VALUES_RAW. */
13991 if (TREE_CODE (t) == ENUMERAL_TYPE
13992 && TYPE_VALUES (t))
13993 verify_variant_match (TYPE_VALUES);
13994 else if (TREE_CODE (t) == ARRAY_TYPE)
13995 verify_variant_match (TYPE_DOMAIN);
13996 /* Permit incomplete variants of complete type. While FEs may complete
13997 all variants, this does not happen for C++ templates in all cases. */
13998 else if (RECORD_OR_UNION_TYPE_P (t)
13999 && COMPLETE_TYPE_P (t)
14000 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14001 {
14002 tree f1, f2;
14003
14004 /* Fortran builds qualified variants as new records with items of
14005 qualified type. Verify that they looks same. */
14006 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14007 f1 && f2;
14008 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14009 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14010 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14011 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14012 /* FIXME: gfc_nonrestricted_type builds all types as variants
14013 with exception of pointer types. It deeply copies the type
14014 which means that we may end up with a variant type
14015 referring non-variant pointer. We may change it to
14016 produce types as variants, too, like
14017 objc_get_protocol_qualified_type does. */
14018 && !POINTER_TYPE_P (TREE_TYPE (f1)))
14019 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14020 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14021 break;
14022 if (f1 || f2)
14023 {
14024 error ("type variant has different %<TYPE_FIELDS%>");
14025 debug_tree (tv);
14026 error ("first mismatch is field");
14027 debug_tree (f1);
14028 error ("and field");
14029 debug_tree (f2);
14030 return false;
14031 }
14032 }
14033 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14034 verify_variant_match (TYPE_ARG_TYPES);
14035 /* For C++ the qualified variant of array type is really an array type
14036 of qualified TREE_TYPE.
14037 objc builds variants of pointer where pointer to type is a variant, too
14038 in objc_get_protocol_qualified_type. */
14039 if (TREE_TYPE (t) != TREE_TYPE (tv)
14040 && ((TREE_CODE (t) != ARRAY_TYPE
14041 && !POINTER_TYPE_P (t))
14042 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14043 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14044 {
14045 error ("type variant has different %<TREE_TYPE%>");
14046 debug_tree (tv);
14047 error ("type variant%'s %<TREE_TYPE%>");
14048 debug_tree (TREE_TYPE (tv));
14049 error ("type%'s %<TREE_TYPE%>");
14050 debug_tree (TREE_TYPE (t));
14051 return false;
14052 }
14053 if (type_with_alias_set_p (t)
14054 && !gimple_canonical_types_compatible_p (t, tv, false))
14055 {
14056 error ("type is not compatible with its variant");
14057 debug_tree (tv);
14058 error ("type variant%'s %<TREE_TYPE%>");
14059 debug_tree (TREE_TYPE (tv));
14060 error ("type%'s %<TREE_TYPE%>");
14061 debug_tree (TREE_TYPE (t));
14062 return false;
14063 }
14064 return true;
14065 #undef verify_variant_match
14066 }
14067
14068
14069 /* The TYPE_CANONICAL merging machinery. It should closely resemble
14070 the middle-end types_compatible_p function. It needs to avoid
14071 claiming types are different for types that should be treated
14072 the same with respect to TBAA. Canonical types are also used
14073 for IL consistency checks via the useless_type_conversion_p
14074 predicate which does not handle all type kinds itself but falls
14075 back to pointer-comparison of TYPE_CANONICAL for aggregates
14076 for example. */
14077
14078 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14079 type calculation because we need to allow inter-operability between signed
14080 and unsigned variants. */
14081
14082 bool
14083 type_with_interoperable_signedness (const_tree type)
14084 {
14085 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14086 signed char and unsigned char. Similarly fortran FE builds
14087 C_SIZE_T as signed type, while C defines it unsigned. */
14088
14089 return tree_code_for_canonical_type_merging (TREE_CODE (type))
14090 == INTEGER_TYPE
14091 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14092 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14093 }
14094
14095 /* Return true iff T1 and T2 are structurally identical for what
14096 TBAA is concerned.
14097 This function is used both by lto.c canonical type merging and by the
14098 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
14099 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
14100 only for LTO because only in these cases TYPE_CANONICAL equivalence
14101 correspond to one defined by gimple_canonical_types_compatible_p. */
14102
14103 bool
14104 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14105 bool trust_type_canonical)
14106 {
14107 /* Type variants should be same as the main variant. When not doing sanity
14108 checking to verify this fact, go to main variants and save some work. */
14109 if (trust_type_canonical)
14110 {
14111 t1 = TYPE_MAIN_VARIANT (t1);
14112 t2 = TYPE_MAIN_VARIANT (t2);
14113 }
14114
14115 /* Check first for the obvious case of pointer identity. */
14116 if (t1 == t2)
14117 return true;
14118
14119 /* Check that we have two types to compare. */
14120 if (t1 == NULL_TREE || t2 == NULL_TREE)
14121 return false;
14122
14123 /* We consider complete types always compatible with incomplete type.
14124 This does not make sense for canonical type calculation and thus we
14125 need to ensure that we are never called on it.
14126
14127 FIXME: For more correctness the function probably should have three modes
14128 1) mode assuming that types are complete mathcing their structure
14129 2) mode allowing incomplete types but producing equivalence classes
14130 and thus ignoring all info from complete types
14131 3) mode allowing incomplete types to match complete but checking
14132 compatibility between complete types.
14133
14134 1 and 2 can be used for canonical type calculation. 3 is the real
14135 definition of type compatibility that can be used i.e. for warnings during
14136 declaration merging. */
14137
14138 gcc_assert (!trust_type_canonical
14139 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14140
14141 /* If the types have been previously registered and found equal
14142 they still are. */
14143
14144 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14145 && trust_type_canonical)
14146 {
14147 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
14148 they are always NULL, but they are set to non-NULL for types
14149 constructed by build_pointer_type and variants. In this case the
14150 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14151 all pointers are considered equal. Be sure to not return false
14152 negatives. */
14153 gcc_checking_assert (canonical_type_used_p (t1)
14154 && canonical_type_used_p (t2));
14155 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14156 }
14157
14158 /* For types where we do ODR based TBAA the canonical type is always
14159 set correctly, so we know that types are different if their
14160 canonical types does not match. */
14161 if (trust_type_canonical
14162 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14163 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14164 return false;
14165
14166 /* Can't be the same type if the types don't have the same code. */
14167 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14168 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14169 return false;
14170
14171 /* Qualifiers do not matter for canonical type comparison purposes. */
14172
14173 /* Void types and nullptr types are always the same. */
14174 if (TREE_CODE (t1) == VOID_TYPE
14175 || TREE_CODE (t1) == NULLPTR_TYPE)
14176 return true;
14177
14178 /* Can't be the same type if they have different mode. */
14179 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14180 return false;
14181
14182 /* Non-aggregate types can be handled cheaply. */
14183 if (INTEGRAL_TYPE_P (t1)
14184 || SCALAR_FLOAT_TYPE_P (t1)
14185 || FIXED_POINT_TYPE_P (t1)
14186 || TREE_CODE (t1) == VECTOR_TYPE
14187 || TREE_CODE (t1) == COMPLEX_TYPE
14188 || TREE_CODE (t1) == OFFSET_TYPE
14189 || POINTER_TYPE_P (t1))
14190 {
14191 /* Can't be the same type if they have different recision. */
14192 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14193 return false;
14194
14195 /* In some cases the signed and unsigned types are required to be
14196 inter-operable. */
14197 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14198 && !type_with_interoperable_signedness (t1))
14199 return false;
14200
14201 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14202 interoperable with "signed char". Unless all frontends are revisited
14203 to agree on these types, we must ignore the flag completely. */
14204
14205 /* Fortran standard define C_PTR type that is compatible with every
14206 C pointer. For this reason we need to glob all pointers into one.
14207 Still pointers in different address spaces are not compatible. */
14208 if (POINTER_TYPE_P (t1))
14209 {
14210 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14211 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14212 return false;
14213 }
14214
14215 /* Tail-recurse to components. */
14216 if (TREE_CODE (t1) == VECTOR_TYPE
14217 || TREE_CODE (t1) == COMPLEX_TYPE)
14218 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14219 TREE_TYPE (t2),
14220 trust_type_canonical);
14221
14222 return true;
14223 }
14224
14225 /* Do type-specific comparisons. */
14226 switch (TREE_CODE (t1))
14227 {
14228 case ARRAY_TYPE:
14229 /* Array types are the same if the element types are the same and
14230 the number of elements are the same. */
14231 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14232 trust_type_canonical)
14233 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14234 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14235 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14236 return false;
14237 else
14238 {
14239 tree i1 = TYPE_DOMAIN (t1);
14240 tree i2 = TYPE_DOMAIN (t2);
14241
14242 /* For an incomplete external array, the type domain can be
14243 NULL_TREE. Check this condition also. */
14244 if (i1 == NULL_TREE && i2 == NULL_TREE)
14245 return true;
14246 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14247 return false;
14248 else
14249 {
14250 tree min1 = TYPE_MIN_VALUE (i1);
14251 tree min2 = TYPE_MIN_VALUE (i2);
14252 tree max1 = TYPE_MAX_VALUE (i1);
14253 tree max2 = TYPE_MAX_VALUE (i2);
14254
14255 /* The minimum/maximum values have to be the same. */
14256 if ((min1 == min2
14257 || (min1 && min2
14258 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14259 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14260 || operand_equal_p (min1, min2, 0))))
14261 && (max1 == max2
14262 || (max1 && max2
14263 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14264 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14265 || operand_equal_p (max1, max2, 0)))))
14266 return true;
14267 else
14268 return false;
14269 }
14270 }
14271
14272 case METHOD_TYPE:
14273 case FUNCTION_TYPE:
14274 /* Function types are the same if the return type and arguments types
14275 are the same. */
14276 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14277 trust_type_canonical))
14278 return false;
14279
14280 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14281 return true;
14282 else
14283 {
14284 tree parms1, parms2;
14285
14286 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14287 parms1 && parms2;
14288 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14289 {
14290 if (!gimple_canonical_types_compatible_p
14291 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14292 trust_type_canonical))
14293 return false;
14294 }
14295
14296 if (parms1 || parms2)
14297 return false;
14298
14299 return true;
14300 }
14301
14302 case RECORD_TYPE:
14303 case UNION_TYPE:
14304 case QUAL_UNION_TYPE:
14305 {
14306 tree f1, f2;
14307
14308 /* Don't try to compare variants of an incomplete type, before
14309 TYPE_FIELDS has been copied around. */
14310 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14311 return true;
14312
14313
14314 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14315 return false;
14316
14317 /* For aggregate types, all the fields must be the same. */
14318 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14319 f1 || f2;
14320 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14321 {
14322 /* Skip non-fields and zero-sized fields. */
14323 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14324 || (DECL_SIZE (f1)
14325 && integer_zerop (DECL_SIZE (f1)))))
14326 f1 = TREE_CHAIN (f1);
14327 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14328 || (DECL_SIZE (f2)
14329 && integer_zerop (DECL_SIZE (f2)))))
14330 f2 = TREE_CHAIN (f2);
14331 if (!f1 || !f2)
14332 break;
14333 /* The fields must have the same name, offset and type. */
14334 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14335 || !gimple_compare_field_offset (f1, f2)
14336 || !gimple_canonical_types_compatible_p
14337 (TREE_TYPE (f1), TREE_TYPE (f2),
14338 trust_type_canonical))
14339 return false;
14340 }
14341
14342 /* If one aggregate has more fields than the other, they
14343 are not the same. */
14344 if (f1 || f2)
14345 return false;
14346
14347 return true;
14348 }
14349
14350 default:
14351 /* Consider all types with language specific trees in them mutually
14352 compatible. This is executed only from verify_type and false
14353 positives can be tolerated. */
14354 gcc_assert (!in_lto_p);
14355 return true;
14356 }
14357 }
14358
14359 /* Verify type T. */
14360
14361 void
14362 verify_type (const_tree t)
14363 {
14364 bool error_found = false;
14365 tree mv = TYPE_MAIN_VARIANT (t);
14366 if (!mv)
14367 {
14368 error ("main variant is not defined");
14369 error_found = true;
14370 }
14371 else if (mv != TYPE_MAIN_VARIANT (mv))
14372 {
14373 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14374 debug_tree (mv);
14375 error_found = true;
14376 }
14377 else if (t != mv && !verify_type_variant (t, mv))
14378 error_found = true;
14379
14380 tree ct = TYPE_CANONICAL (t);
14381 if (!ct)
14382 ;
14383 else if (TYPE_CANONICAL (t) != ct)
14384 {
14385 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14386 debug_tree (ct);
14387 error_found = true;
14388 }
14389 /* Method and function types cannot be used to address memory and thus
14390 TYPE_CANONICAL really matters only for determining useless conversions.
14391
14392 FIXME: C++ FE produce declarations of builtin functions that are not
14393 compatible with main variants. */
14394 else if (TREE_CODE (t) == FUNCTION_TYPE)
14395 ;
14396 else if (t != ct
14397 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14398 with variably sized arrays because their sizes possibly
14399 gimplified to different variables. */
14400 && !variably_modified_type_p (ct, NULL)
14401 && !gimple_canonical_types_compatible_p (t, ct, false)
14402 && COMPLETE_TYPE_P (t))
14403 {
14404 error ("%<TYPE_CANONICAL%> is not compatible");
14405 debug_tree (ct);
14406 error_found = true;
14407 }
14408
14409 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14410 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14411 {
14412 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14413 debug_tree (ct);
14414 error_found = true;
14415 }
14416 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14417 {
14418 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14419 debug_tree (ct);
14420 debug_tree (TYPE_MAIN_VARIANT (ct));
14421 error_found = true;
14422 }
14423
14424
14425 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14426 if (RECORD_OR_UNION_TYPE_P (t))
14427 {
14428 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14429 and danagle the pointer from time to time. */
14430 if (TYPE_VFIELD (t)
14431 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14432 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14433 {
14434 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14435 debug_tree (TYPE_VFIELD (t));
14436 error_found = true;
14437 }
14438 }
14439 else if (TREE_CODE (t) == POINTER_TYPE)
14440 {
14441 if (TYPE_NEXT_PTR_TO (t)
14442 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14443 {
14444 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14445 debug_tree (TYPE_NEXT_PTR_TO (t));
14446 error_found = true;
14447 }
14448 }
14449 else if (TREE_CODE (t) == REFERENCE_TYPE)
14450 {
14451 if (TYPE_NEXT_REF_TO (t)
14452 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14453 {
14454 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14455 debug_tree (TYPE_NEXT_REF_TO (t));
14456 error_found = true;
14457 }
14458 }
14459 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14460 || TREE_CODE (t) == FIXED_POINT_TYPE)
14461 {
14462 /* FIXME: The following check should pass:
14463 useless_type_conversion_p (const_cast <tree> (t),
14464 TREE_TYPE (TYPE_MIN_VALUE (t))
14465 but does not for C sizetypes in LTO. */
14466 }
14467
14468 /* Check various uses of TYPE_MAXVAL_RAW. */
14469 if (RECORD_OR_UNION_TYPE_P (t))
14470 {
14471 if (!TYPE_BINFO (t))
14472 ;
14473 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14474 {
14475 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14476 debug_tree (TYPE_BINFO (t));
14477 error_found = true;
14478 }
14479 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14480 {
14481 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14482 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14483 error_found = true;
14484 }
14485 }
14486 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14487 {
14488 if (TYPE_METHOD_BASETYPE (t)
14489 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14490 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14491 {
14492 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14493 debug_tree (TYPE_METHOD_BASETYPE (t));
14494 error_found = true;
14495 }
14496 }
14497 else if (TREE_CODE (t) == OFFSET_TYPE)
14498 {
14499 if (TYPE_OFFSET_BASETYPE (t)
14500 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14501 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14502 {
14503 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14504 debug_tree (TYPE_OFFSET_BASETYPE (t));
14505 error_found = true;
14506 }
14507 }
14508 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14509 || TREE_CODE (t) == FIXED_POINT_TYPE)
14510 {
14511 /* FIXME: The following check should pass:
14512 useless_type_conversion_p (const_cast <tree> (t),
14513 TREE_TYPE (TYPE_MAX_VALUE (t))
14514 but does not for C sizetypes in LTO. */
14515 }
14516 else if (TREE_CODE (t) == ARRAY_TYPE)
14517 {
14518 if (TYPE_ARRAY_MAX_SIZE (t)
14519 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14520 {
14521 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14522 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14523 error_found = true;
14524 }
14525 }
14526 else if (TYPE_MAX_VALUE_RAW (t))
14527 {
14528 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14529 debug_tree (TYPE_MAX_VALUE_RAW (t));
14530 error_found = true;
14531 }
14532
14533 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14534 {
14535 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14536 debug_tree (TYPE_LANG_SLOT_1 (t));
14537 error_found = true;
14538 }
14539
14540 /* Check various uses of TYPE_VALUES_RAW. */
14541 if (TREE_CODE (t) == ENUMERAL_TYPE)
14542 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14543 {
14544 tree value = TREE_VALUE (l);
14545 tree name = TREE_PURPOSE (l);
14546
14547 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14548 CONST_DECL of ENUMERAL TYPE. */
14549 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14550 {
14551 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14552 debug_tree (value);
14553 debug_tree (name);
14554 error_found = true;
14555 }
14556 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14557 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14558 {
14559 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14560 "to the enum");
14561 debug_tree (value);
14562 debug_tree (name);
14563 error_found = true;
14564 }
14565 if (TREE_CODE (name) != IDENTIFIER_NODE)
14566 {
14567 error ("enum value name is not %<IDENTIFIER_NODE%>");
14568 debug_tree (value);
14569 debug_tree (name);
14570 error_found = true;
14571 }
14572 }
14573 else if (TREE_CODE (t) == ARRAY_TYPE)
14574 {
14575 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14576 {
14577 error ("array %<TYPE_DOMAIN%> is not integer type");
14578 debug_tree (TYPE_DOMAIN (t));
14579 error_found = true;
14580 }
14581 }
14582 else if (RECORD_OR_UNION_TYPE_P (t))
14583 {
14584 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14585 {
14586 error ("%<TYPE_FIELDS%> defined in incomplete type");
14587 error_found = true;
14588 }
14589 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14590 {
14591 /* TODO: verify properties of decls. */
14592 if (TREE_CODE (fld) == FIELD_DECL)
14593 ;
14594 else if (TREE_CODE (fld) == TYPE_DECL)
14595 ;
14596 else if (TREE_CODE (fld) == CONST_DECL)
14597 ;
14598 else if (VAR_P (fld))
14599 ;
14600 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14601 ;
14602 else if (TREE_CODE (fld) == USING_DECL)
14603 ;
14604 else if (TREE_CODE (fld) == FUNCTION_DECL)
14605 ;
14606 else
14607 {
14608 error ("wrong tree in %<TYPE_FIELDS%> list");
14609 debug_tree (fld);
14610 error_found = true;
14611 }
14612 }
14613 }
14614 else if (TREE_CODE (t) == INTEGER_TYPE
14615 || TREE_CODE (t) == BOOLEAN_TYPE
14616 || TREE_CODE (t) == OFFSET_TYPE
14617 || TREE_CODE (t) == REFERENCE_TYPE
14618 || TREE_CODE (t) == NULLPTR_TYPE
14619 || TREE_CODE (t) == POINTER_TYPE)
14620 {
14621 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14622 {
14623 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14624 "is %p",
14625 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14626 error_found = true;
14627 }
14628 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14629 {
14630 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14631 debug_tree (TYPE_CACHED_VALUES (t));
14632 error_found = true;
14633 }
14634 /* Verify just enough of cache to ensure that no one copied it to new type.
14635 All copying should go by copy_node that should clear it. */
14636 else if (TYPE_CACHED_VALUES_P (t))
14637 {
14638 int i;
14639 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14640 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14641 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14642 {
14643 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14644 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14645 error_found = true;
14646 break;
14647 }
14648 }
14649 }
14650 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14651 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14652 {
14653 /* C++ FE uses TREE_PURPOSE to store initial values. */
14654 if (TREE_PURPOSE (l) && in_lto_p)
14655 {
14656 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14657 debug_tree (l);
14658 error_found = true;
14659 }
14660 if (!TYPE_P (TREE_VALUE (l)))
14661 {
14662 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14663 debug_tree (l);
14664 error_found = true;
14665 }
14666 }
14667 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14668 {
14669 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14670 debug_tree (TYPE_VALUES_RAW (t));
14671 error_found = true;
14672 }
14673 if (TREE_CODE (t) != INTEGER_TYPE
14674 && TREE_CODE (t) != BOOLEAN_TYPE
14675 && TREE_CODE (t) != OFFSET_TYPE
14676 && TREE_CODE (t) != REFERENCE_TYPE
14677 && TREE_CODE (t) != NULLPTR_TYPE
14678 && TREE_CODE (t) != POINTER_TYPE
14679 && TYPE_CACHED_VALUES_P (t))
14680 {
14681 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14682 error_found = true;
14683 }
14684
14685 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14686 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14687 of a type. */
14688 if (TREE_CODE (t) == METHOD_TYPE
14689 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14690 {
14691 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14692 error_found = true;
14693 }
14694
14695 if (error_found)
14696 {
14697 debug_tree (const_cast <tree> (t));
14698 internal_error ("%qs failed", __func__);
14699 }
14700 }
14701
14702
14703 /* Return 1 if ARG interpreted as signed in its precision is known to be
14704 always positive or 2 if ARG is known to be always negative, or 3 if
14705 ARG may be positive or negative. */
14706
14707 int
14708 get_range_pos_neg (tree arg)
14709 {
14710 if (arg == error_mark_node)
14711 return 3;
14712
14713 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14714 int cnt = 0;
14715 if (TREE_CODE (arg) == INTEGER_CST)
14716 {
14717 wide_int w = wi::sext (wi::to_wide (arg), prec);
14718 if (wi::neg_p (w))
14719 return 2;
14720 else
14721 return 1;
14722 }
14723 while (CONVERT_EXPR_P (arg)
14724 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14725 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14726 {
14727 arg = TREE_OPERAND (arg, 0);
14728 /* Narrower value zero extended into wider type
14729 will always result in positive values. */
14730 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14731 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14732 return 1;
14733 prec = TYPE_PRECISION (TREE_TYPE (arg));
14734 if (++cnt > 30)
14735 return 3;
14736 }
14737
14738 if (TREE_CODE (arg) != SSA_NAME)
14739 return 3;
14740 wide_int arg_min, arg_max;
14741 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14742 {
14743 gimple *g = SSA_NAME_DEF_STMT (arg);
14744 if (is_gimple_assign (g)
14745 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14746 {
14747 tree t = gimple_assign_rhs1 (g);
14748 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14749 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14750 {
14751 if (TYPE_UNSIGNED (TREE_TYPE (t))
14752 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14753 return 1;
14754 prec = TYPE_PRECISION (TREE_TYPE (t));
14755 arg = t;
14756 if (++cnt > 30)
14757 return 3;
14758 continue;
14759 }
14760 }
14761 return 3;
14762 }
14763 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14764 {
14765 /* For unsigned values, the "positive" range comes
14766 below the "negative" range. */
14767 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14768 return 1;
14769 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14770 return 2;
14771 }
14772 else
14773 {
14774 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14775 return 1;
14776 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14777 return 2;
14778 }
14779 return 3;
14780 }
14781
14782
14783
14784
14785 /* Return true if ARG is marked with the nonnull attribute in the
14786 current function signature. */
14787
14788 bool
14789 nonnull_arg_p (const_tree arg)
14790 {
14791 tree t, attrs, fntype;
14792 unsigned HOST_WIDE_INT arg_num;
14793
14794 gcc_assert (TREE_CODE (arg) == PARM_DECL
14795 && (POINTER_TYPE_P (TREE_TYPE (arg))
14796 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14797
14798 /* The static chain decl is always non null. */
14799 if (arg == cfun->static_chain_decl)
14800 return true;
14801
14802 /* THIS argument of method is always non-NULL. */
14803 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14804 && arg == DECL_ARGUMENTS (cfun->decl)
14805 && flag_delete_null_pointer_checks)
14806 return true;
14807
14808 /* Values passed by reference are always non-NULL. */
14809 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14810 && flag_delete_null_pointer_checks)
14811 return true;
14812
14813 fntype = TREE_TYPE (cfun->decl);
14814 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14815 {
14816 attrs = lookup_attribute ("nonnull", attrs);
14817
14818 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14819 if (attrs == NULL_TREE)
14820 return false;
14821
14822 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14823 if (TREE_VALUE (attrs) == NULL_TREE)
14824 return true;
14825
14826 /* Get the position number for ARG in the function signature. */
14827 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14828 t;
14829 t = DECL_CHAIN (t), arg_num++)
14830 {
14831 if (t == arg)
14832 break;
14833 }
14834
14835 gcc_assert (t == arg);
14836
14837 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14838 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14839 {
14840 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14841 return true;
14842 }
14843 }
14844
14845 return false;
14846 }
14847
14848 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14849 information. */
14850
14851 location_t
14852 set_block (location_t loc, tree block)
14853 {
14854 location_t pure_loc = get_pure_location (loc);
14855 source_range src_range = get_range_from_loc (line_table, loc);
14856 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14857 }
14858
14859 location_t
14860 set_source_range (tree expr, location_t start, location_t finish)
14861 {
14862 source_range src_range;
14863 src_range.m_start = start;
14864 src_range.m_finish = finish;
14865 return set_source_range (expr, src_range);
14866 }
14867
14868 location_t
14869 set_source_range (tree expr, source_range src_range)
14870 {
14871 if (!EXPR_P (expr))
14872 return UNKNOWN_LOCATION;
14873
14874 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14875 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14876 pure_loc,
14877 src_range,
14878 NULL);
14879 SET_EXPR_LOCATION (expr, adhoc);
14880 return adhoc;
14881 }
14882
14883 /* Return EXPR, potentially wrapped with a node expression LOC,
14884 if !CAN_HAVE_LOCATION_P (expr).
14885
14886 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14887 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14888
14889 Wrapper nodes can be identified using location_wrapper_p. */
14890
14891 tree
14892 maybe_wrap_with_location (tree expr, location_t loc)
14893 {
14894 if (expr == NULL)
14895 return NULL;
14896 if (loc == UNKNOWN_LOCATION)
14897 return expr;
14898 if (CAN_HAVE_LOCATION_P (expr))
14899 return expr;
14900 /* We should only be adding wrappers for constants and for decls,
14901 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14902 gcc_assert (CONSTANT_CLASS_P (expr)
14903 || DECL_P (expr)
14904 || EXCEPTIONAL_CLASS_P (expr));
14905
14906 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14907 any impact of the wrapper nodes. */
14908 if (EXCEPTIONAL_CLASS_P (expr))
14909 return expr;
14910
14911 /* If any auto_suppress_location_wrappers are active, don't create
14912 wrappers. */
14913 if (suppress_location_wrappers > 0)
14914 return expr;
14915
14916 tree_code code
14917 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14918 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14919 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14920 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14921 /* Mark this node as being a wrapper. */
14922 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14923 return wrapper;
14924 }
14925
14926 int suppress_location_wrappers;
14927
14928 /* Return the name of combined function FN, for debugging purposes. */
14929
14930 const char *
14931 combined_fn_name (combined_fn fn)
14932 {
14933 if (builtin_fn_p (fn))
14934 {
14935 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14936 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14937 }
14938 else
14939 return internal_fn_name (as_internal_fn (fn));
14940 }
14941
14942 /* Return a bitmap with a bit set corresponding to each argument in
14943 a function call type FNTYPE declared with attribute nonnull,
14944 or null if none of the function's argument are nonnull. The caller
14945 must free the bitmap. */
14946
14947 bitmap
14948 get_nonnull_args (const_tree fntype)
14949 {
14950 if (fntype == NULL_TREE)
14951 return NULL;
14952
14953 tree attrs = TYPE_ATTRIBUTES (fntype);
14954 if (!attrs)
14955 return NULL;
14956
14957 bitmap argmap = NULL;
14958
14959 /* A function declaration can specify multiple attribute nonnull,
14960 each with zero or more arguments. The loop below creates a bitmap
14961 representing a union of all the arguments. An empty (but non-null)
14962 bitmap means that all arguments have been declaraed nonnull. */
14963 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14964 {
14965 attrs = lookup_attribute ("nonnull", attrs);
14966 if (!attrs)
14967 break;
14968
14969 if (!argmap)
14970 argmap = BITMAP_ALLOC (NULL);
14971
14972 if (!TREE_VALUE (attrs))
14973 {
14974 /* Clear the bitmap in case a previous attribute nonnull
14975 set it and this one overrides it for all arguments. */
14976 bitmap_clear (argmap);
14977 return argmap;
14978 }
14979
14980 /* Iterate over the indices of the format arguments declared nonnull
14981 and set a bit for each. */
14982 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14983 {
14984 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14985 bitmap_set_bit (argmap, val);
14986 }
14987 }
14988
14989 return argmap;
14990 }
14991
14992 /* Returns true if TYPE is a type where it and all of its subobjects
14993 (recursively) are of structure, union, or array type. */
14994
14995 static bool
14996 default_is_empty_type (tree type)
14997 {
14998 if (RECORD_OR_UNION_TYPE_P (type))
14999 {
15000 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15001 if (TREE_CODE (field) == FIELD_DECL
15002 && !DECL_PADDING_P (field)
15003 && !default_is_empty_type (TREE_TYPE (field)))
15004 return false;
15005 return true;
15006 }
15007 else if (TREE_CODE (type) == ARRAY_TYPE)
15008 return (integer_minus_onep (array_type_nelts (type))
15009 || TYPE_DOMAIN (type) == NULL_TREE
15010 || default_is_empty_type (TREE_TYPE (type)));
15011 return false;
15012 }
15013
15014 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
15015 that shouldn't be passed via stack. */
15016
15017 bool
15018 default_is_empty_record (const_tree type)
15019 {
15020 if (!abi_version_at_least (12))
15021 return false;
15022
15023 if (type == error_mark_node)
15024 return false;
15025
15026 if (TREE_ADDRESSABLE (type))
15027 return false;
15028
15029 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15030 }
15031
15032 /* Determine whether TYPE is a structure with a flexible array member,
15033 or a union containing such a structure (possibly recursively). */
15034
15035 bool
15036 flexible_array_type_p (const_tree type)
15037 {
15038 tree x, last;
15039 switch (TREE_CODE (type))
15040 {
15041 case RECORD_TYPE:
15042 last = NULL_TREE;
15043 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15044 if (TREE_CODE (x) == FIELD_DECL)
15045 last = x;
15046 if (last == NULL_TREE)
15047 return false;
15048 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
15049 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
15050 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
15051 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
15052 return true;
15053 return false;
15054 case UNION_TYPE:
15055 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15056 {
15057 if (TREE_CODE (x) == FIELD_DECL
15058 && flexible_array_type_p (TREE_TYPE (x)))
15059 return true;
15060 }
15061 return false;
15062 default:
15063 return false;
15064 }
15065 }
15066
15067 /* Like int_size_in_bytes, but handle empty records specially. */
15068
15069 HOST_WIDE_INT
15070 arg_int_size_in_bytes (const_tree type)
15071 {
15072 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15073 }
15074
15075 /* Like size_in_bytes, but handle empty records specially. */
15076
15077 tree
15078 arg_size_in_bytes (const_tree type)
15079 {
15080 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15081 }
15082
15083 /* Return true if an expression with CODE has to have the same result type as
15084 its first operand. */
15085
15086 bool
15087 expr_type_first_operand_type_p (tree_code code)
15088 {
15089 switch (code)
15090 {
15091 case NEGATE_EXPR:
15092 case ABS_EXPR:
15093 case BIT_NOT_EXPR:
15094 case PAREN_EXPR:
15095 case CONJ_EXPR:
15096
15097 case PLUS_EXPR:
15098 case MINUS_EXPR:
15099 case MULT_EXPR:
15100 case TRUNC_DIV_EXPR:
15101 case CEIL_DIV_EXPR:
15102 case FLOOR_DIV_EXPR:
15103 case ROUND_DIV_EXPR:
15104 case TRUNC_MOD_EXPR:
15105 case CEIL_MOD_EXPR:
15106 case FLOOR_MOD_EXPR:
15107 case ROUND_MOD_EXPR:
15108 case RDIV_EXPR:
15109 case EXACT_DIV_EXPR:
15110 case MIN_EXPR:
15111 case MAX_EXPR:
15112 case BIT_IOR_EXPR:
15113 case BIT_XOR_EXPR:
15114 case BIT_AND_EXPR:
15115
15116 case LSHIFT_EXPR:
15117 case RSHIFT_EXPR:
15118 case LROTATE_EXPR:
15119 case RROTATE_EXPR:
15120 return true;
15121
15122 default:
15123 return false;
15124 }
15125 }
15126
15127 /* Return a typenode for the "standard" C type with a given name. */
15128 tree
15129 get_typenode_from_name (const char *name)
15130 {
15131 if (name == NULL || *name == '\0')
15132 return NULL_TREE;
15133
15134 if (strcmp (name, "char") == 0)
15135 return char_type_node;
15136 if (strcmp (name, "unsigned char") == 0)
15137 return unsigned_char_type_node;
15138 if (strcmp (name, "signed char") == 0)
15139 return signed_char_type_node;
15140
15141 if (strcmp (name, "short int") == 0)
15142 return short_integer_type_node;
15143 if (strcmp (name, "short unsigned int") == 0)
15144 return short_unsigned_type_node;
15145
15146 if (strcmp (name, "int") == 0)
15147 return integer_type_node;
15148 if (strcmp (name, "unsigned int") == 0)
15149 return unsigned_type_node;
15150
15151 if (strcmp (name, "long int") == 0)
15152 return long_integer_type_node;
15153 if (strcmp (name, "long unsigned int") == 0)
15154 return long_unsigned_type_node;
15155
15156 if (strcmp (name, "long long int") == 0)
15157 return long_long_integer_type_node;
15158 if (strcmp (name, "long long unsigned int") == 0)
15159 return long_long_unsigned_type_node;
15160
15161 gcc_unreachable ();
15162 }
15163
15164 /* List of pointer types used to declare builtins before we have seen their
15165 real declaration.
15166
15167 Keep the size up to date in tree.h ! */
15168 const builtin_structptr_type builtin_structptr_types[6] =
15169 {
15170 { fileptr_type_node, ptr_type_node, "FILE" },
15171 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15172 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15173 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15174 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15175 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15176 };
15177
15178 /* Return the maximum object size. */
15179
15180 tree
15181 max_object_size (void)
15182 {
15183 /* To do: Make this a configurable parameter. */
15184 return TYPE_MAX_VALUE (ptrdiff_type_node);
15185 }
15186
15187 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
15188 parameter default to false and that weeds out error_mark_node. */
15189
15190 bool
15191 verify_type_context (location_t loc, type_context_kind context,
15192 const_tree type, bool silent_p)
15193 {
15194 if (type == error_mark_node)
15195 return true;
15196
15197 gcc_assert (TYPE_P (type));
15198 return (!targetm.verify_type_context
15199 || targetm.verify_type_context (loc, context, type, silent_p));
15200 }
15201
15202 #if CHECKING_P
15203
15204 namespace selftest {
15205
15206 /* Selftests for tree. */
15207
15208 /* Verify that integer constants are sane. */
15209
15210 static void
15211 test_integer_constants ()
15212 {
15213 ASSERT_TRUE (integer_type_node != NULL);
15214 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15215
15216 tree type = integer_type_node;
15217
15218 tree zero = build_zero_cst (type);
15219 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15220 ASSERT_EQ (type, TREE_TYPE (zero));
15221
15222 tree one = build_int_cst (type, 1);
15223 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15224 ASSERT_EQ (type, TREE_TYPE (zero));
15225 }
15226
15227 /* Verify identifiers. */
15228
15229 static void
15230 test_identifiers ()
15231 {
15232 tree identifier = get_identifier ("foo");
15233 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15234 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15235 }
15236
15237 /* Verify LABEL_DECL. */
15238
15239 static void
15240 test_labels ()
15241 {
15242 tree identifier = get_identifier ("err");
15243 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15244 identifier, void_type_node);
15245 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15246 ASSERT_FALSE (FORCED_LABEL (label_decl));
15247 }
15248
15249 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15250 are given by VALS. */
15251
15252 static tree
15253 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15254 {
15255 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15256 tree_vector_builder builder (type, vals.length (), 1);
15257 builder.splice (vals);
15258 return builder.build ();
15259 }
15260
15261 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15262
15263 static void
15264 check_vector_cst (vec<tree> expected, tree actual)
15265 {
15266 ASSERT_KNOWN_EQ (expected.length (),
15267 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15268 for (unsigned int i = 0; i < expected.length (); ++i)
15269 ASSERT_EQ (wi::to_wide (expected[i]),
15270 wi::to_wide (vector_cst_elt (actual, i)));
15271 }
15272
15273 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15274 and that its elements match EXPECTED. */
15275
15276 static void
15277 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15278 unsigned int npatterns)
15279 {
15280 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15281 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15282 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15283 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15284 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15285 check_vector_cst (expected, actual);
15286 }
15287
15288 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15289 and NPATTERNS background elements, and that its elements match
15290 EXPECTED. */
15291
15292 static void
15293 check_vector_cst_fill (vec<tree> expected, tree actual,
15294 unsigned int npatterns)
15295 {
15296 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15297 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15298 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15299 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15300 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15301 check_vector_cst (expected, actual);
15302 }
15303
15304 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15305 and that its elements match EXPECTED. */
15306
15307 static void
15308 check_vector_cst_stepped (vec<tree> expected, tree actual,
15309 unsigned int npatterns)
15310 {
15311 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15312 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15313 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15314 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15315 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15316 check_vector_cst (expected, actual);
15317 }
15318
15319 /* Test the creation of VECTOR_CSTs. */
15320
15321 static void
15322 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15323 {
15324 auto_vec<tree, 8> elements (8);
15325 elements.quick_grow (8);
15326 tree element_type = build_nonstandard_integer_type (16, true);
15327 tree vector_type = build_vector_type (element_type, 8);
15328
15329 /* Test a simple linear series with a base of 0 and a step of 1:
15330 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15331 for (unsigned int i = 0; i < 8; ++i)
15332 elements[i] = build_int_cst (element_type, i);
15333 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15334 check_vector_cst_stepped (elements, vector, 1);
15335
15336 /* Try the same with the first element replaced by 100:
15337 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15338 elements[0] = build_int_cst (element_type, 100);
15339 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15340 check_vector_cst_stepped (elements, vector, 1);
15341
15342 /* Try a series that wraps around.
15343 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15344 for (unsigned int i = 1; i < 8; ++i)
15345 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15346 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15347 check_vector_cst_stepped (elements, vector, 1);
15348
15349 /* Try a downward series:
15350 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15351 for (unsigned int i = 1; i < 8; ++i)
15352 elements[i] = build_int_cst (element_type, 80 - i);
15353 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15354 check_vector_cst_stepped (elements, vector, 1);
15355
15356 /* Try two interleaved series with different bases and steps:
15357 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15358 elements[1] = build_int_cst (element_type, 53);
15359 for (unsigned int i = 2; i < 8; i += 2)
15360 {
15361 elements[i] = build_int_cst (element_type, 70 - i * 2);
15362 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15363 }
15364 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15365 check_vector_cst_stepped (elements, vector, 2);
15366
15367 /* Try a duplicated value:
15368 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15369 for (unsigned int i = 1; i < 8; ++i)
15370 elements[i] = elements[0];
15371 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15372 check_vector_cst_duplicate (elements, vector, 1);
15373
15374 /* Try an interleaved duplicated value:
15375 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15376 elements[1] = build_int_cst (element_type, 55);
15377 for (unsigned int i = 2; i < 8; ++i)
15378 elements[i] = elements[i - 2];
15379 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15380 check_vector_cst_duplicate (elements, vector, 2);
15381
15382 /* Try a duplicated value with 2 exceptions
15383 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15384 elements[0] = build_int_cst (element_type, 41);
15385 elements[1] = build_int_cst (element_type, 97);
15386 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15387 check_vector_cst_fill (elements, vector, 2);
15388
15389 /* Try with and without a step
15390 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15391 for (unsigned int i = 3; i < 8; i += 2)
15392 elements[i] = build_int_cst (element_type, i * 7);
15393 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15394 check_vector_cst_stepped (elements, vector, 2);
15395
15396 /* Try a fully-general constant:
15397 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15398 elements[5] = build_int_cst (element_type, 9990);
15399 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15400 check_vector_cst_fill (elements, vector, 4);
15401 }
15402
15403 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15404 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15405 modifying its argument in-place. */
15406
15407 static void
15408 check_strip_nops (tree node, tree expected)
15409 {
15410 STRIP_NOPS (node);
15411 ASSERT_EQ (expected, node);
15412 }
15413
15414 /* Verify location wrappers. */
15415
15416 static void
15417 test_location_wrappers ()
15418 {
15419 location_t loc = BUILTINS_LOCATION;
15420
15421 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15422
15423 /* Wrapping a constant. */
15424 tree int_cst = build_int_cst (integer_type_node, 42);
15425 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15426 ASSERT_FALSE (location_wrapper_p (int_cst));
15427
15428 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15429 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15430 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15431 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15432
15433 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15434 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15435
15436 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15437 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15438 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15439 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15440
15441 /* Wrapping a STRING_CST. */
15442 tree string_cst = build_string (4, "foo");
15443 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15444 ASSERT_FALSE (location_wrapper_p (string_cst));
15445
15446 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15447 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15448 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15449 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15450 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15451
15452
15453 /* Wrapping a variable. */
15454 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15455 get_identifier ("some_int_var"),
15456 integer_type_node);
15457 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15458 ASSERT_FALSE (location_wrapper_p (int_var));
15459
15460 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15461 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15462 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15463 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15464
15465 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15466 wrapper. */
15467 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15468 ASSERT_FALSE (location_wrapper_p (r_cast));
15469 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15470
15471 /* Verify that STRIP_NOPS removes wrappers. */
15472 check_strip_nops (wrapped_int_cst, int_cst);
15473 check_strip_nops (wrapped_string_cst, string_cst);
15474 check_strip_nops (wrapped_int_var, int_var);
15475 }
15476
15477 /* Test various tree predicates. Verify that location wrappers don't
15478 affect the results. */
15479
15480 static void
15481 test_predicates ()
15482 {
15483 /* Build various constants and wrappers around them. */
15484
15485 location_t loc = BUILTINS_LOCATION;
15486
15487 tree i_0 = build_int_cst (integer_type_node, 0);
15488 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15489
15490 tree i_1 = build_int_cst (integer_type_node, 1);
15491 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15492
15493 tree i_m1 = build_int_cst (integer_type_node, -1);
15494 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15495
15496 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15497 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15498 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15499 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15500 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15501 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15502
15503 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15504 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15505 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15506
15507 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15508 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15509 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15510
15511 /* TODO: vector constants. */
15512
15513 /* Test integer_onep. */
15514 ASSERT_FALSE (integer_onep (i_0));
15515 ASSERT_FALSE (integer_onep (wr_i_0));
15516 ASSERT_TRUE (integer_onep (i_1));
15517 ASSERT_TRUE (integer_onep (wr_i_1));
15518 ASSERT_FALSE (integer_onep (i_m1));
15519 ASSERT_FALSE (integer_onep (wr_i_m1));
15520 ASSERT_FALSE (integer_onep (f_0));
15521 ASSERT_FALSE (integer_onep (wr_f_0));
15522 ASSERT_FALSE (integer_onep (f_1));
15523 ASSERT_FALSE (integer_onep (wr_f_1));
15524 ASSERT_FALSE (integer_onep (f_m1));
15525 ASSERT_FALSE (integer_onep (wr_f_m1));
15526 ASSERT_FALSE (integer_onep (c_i_0));
15527 ASSERT_TRUE (integer_onep (c_i_1));
15528 ASSERT_FALSE (integer_onep (c_i_m1));
15529 ASSERT_FALSE (integer_onep (c_f_0));
15530 ASSERT_FALSE (integer_onep (c_f_1));
15531 ASSERT_FALSE (integer_onep (c_f_m1));
15532
15533 /* Test integer_zerop. */
15534 ASSERT_TRUE (integer_zerop (i_0));
15535 ASSERT_TRUE (integer_zerop (wr_i_0));
15536 ASSERT_FALSE (integer_zerop (i_1));
15537 ASSERT_FALSE (integer_zerop (wr_i_1));
15538 ASSERT_FALSE (integer_zerop (i_m1));
15539 ASSERT_FALSE (integer_zerop (wr_i_m1));
15540 ASSERT_FALSE (integer_zerop (f_0));
15541 ASSERT_FALSE (integer_zerop (wr_f_0));
15542 ASSERT_FALSE (integer_zerop (f_1));
15543 ASSERT_FALSE (integer_zerop (wr_f_1));
15544 ASSERT_FALSE (integer_zerop (f_m1));
15545 ASSERT_FALSE (integer_zerop (wr_f_m1));
15546 ASSERT_TRUE (integer_zerop (c_i_0));
15547 ASSERT_FALSE (integer_zerop (c_i_1));
15548 ASSERT_FALSE (integer_zerop (c_i_m1));
15549 ASSERT_FALSE (integer_zerop (c_f_0));
15550 ASSERT_FALSE (integer_zerop (c_f_1));
15551 ASSERT_FALSE (integer_zerop (c_f_m1));
15552
15553 /* Test integer_all_onesp. */
15554 ASSERT_FALSE (integer_all_onesp (i_0));
15555 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15556 ASSERT_FALSE (integer_all_onesp (i_1));
15557 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15558 ASSERT_TRUE (integer_all_onesp (i_m1));
15559 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15560 ASSERT_FALSE (integer_all_onesp (f_0));
15561 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15562 ASSERT_FALSE (integer_all_onesp (f_1));
15563 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15564 ASSERT_FALSE (integer_all_onesp (f_m1));
15565 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15566 ASSERT_FALSE (integer_all_onesp (c_i_0));
15567 ASSERT_FALSE (integer_all_onesp (c_i_1));
15568 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15569 ASSERT_FALSE (integer_all_onesp (c_f_0));
15570 ASSERT_FALSE (integer_all_onesp (c_f_1));
15571 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15572
15573 /* Test integer_minus_onep. */
15574 ASSERT_FALSE (integer_minus_onep (i_0));
15575 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15576 ASSERT_FALSE (integer_minus_onep (i_1));
15577 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15578 ASSERT_TRUE (integer_minus_onep (i_m1));
15579 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15580 ASSERT_FALSE (integer_minus_onep (f_0));
15581 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15582 ASSERT_FALSE (integer_minus_onep (f_1));
15583 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15584 ASSERT_FALSE (integer_minus_onep (f_m1));
15585 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15586 ASSERT_FALSE (integer_minus_onep (c_i_0));
15587 ASSERT_FALSE (integer_minus_onep (c_i_1));
15588 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15589 ASSERT_FALSE (integer_minus_onep (c_f_0));
15590 ASSERT_FALSE (integer_minus_onep (c_f_1));
15591 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15592
15593 /* Test integer_each_onep. */
15594 ASSERT_FALSE (integer_each_onep (i_0));
15595 ASSERT_FALSE (integer_each_onep (wr_i_0));
15596 ASSERT_TRUE (integer_each_onep (i_1));
15597 ASSERT_TRUE (integer_each_onep (wr_i_1));
15598 ASSERT_FALSE (integer_each_onep (i_m1));
15599 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15600 ASSERT_FALSE (integer_each_onep (f_0));
15601 ASSERT_FALSE (integer_each_onep (wr_f_0));
15602 ASSERT_FALSE (integer_each_onep (f_1));
15603 ASSERT_FALSE (integer_each_onep (wr_f_1));
15604 ASSERT_FALSE (integer_each_onep (f_m1));
15605 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15606 ASSERT_FALSE (integer_each_onep (c_i_0));
15607 ASSERT_FALSE (integer_each_onep (c_i_1));
15608 ASSERT_FALSE (integer_each_onep (c_i_m1));
15609 ASSERT_FALSE (integer_each_onep (c_f_0));
15610 ASSERT_FALSE (integer_each_onep (c_f_1));
15611 ASSERT_FALSE (integer_each_onep (c_f_m1));
15612
15613 /* Test integer_truep. */
15614 ASSERT_FALSE (integer_truep (i_0));
15615 ASSERT_FALSE (integer_truep (wr_i_0));
15616 ASSERT_TRUE (integer_truep (i_1));
15617 ASSERT_TRUE (integer_truep (wr_i_1));
15618 ASSERT_FALSE (integer_truep (i_m1));
15619 ASSERT_FALSE (integer_truep (wr_i_m1));
15620 ASSERT_FALSE (integer_truep (f_0));
15621 ASSERT_FALSE (integer_truep (wr_f_0));
15622 ASSERT_FALSE (integer_truep (f_1));
15623 ASSERT_FALSE (integer_truep (wr_f_1));
15624 ASSERT_FALSE (integer_truep (f_m1));
15625 ASSERT_FALSE (integer_truep (wr_f_m1));
15626 ASSERT_FALSE (integer_truep (c_i_0));
15627 ASSERT_TRUE (integer_truep (c_i_1));
15628 ASSERT_FALSE (integer_truep (c_i_m1));
15629 ASSERT_FALSE (integer_truep (c_f_0));
15630 ASSERT_FALSE (integer_truep (c_f_1));
15631 ASSERT_FALSE (integer_truep (c_f_m1));
15632
15633 /* Test integer_nonzerop. */
15634 ASSERT_FALSE (integer_nonzerop (i_0));
15635 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15636 ASSERT_TRUE (integer_nonzerop (i_1));
15637 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15638 ASSERT_TRUE (integer_nonzerop (i_m1));
15639 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15640 ASSERT_FALSE (integer_nonzerop (f_0));
15641 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15642 ASSERT_FALSE (integer_nonzerop (f_1));
15643 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15644 ASSERT_FALSE (integer_nonzerop (f_m1));
15645 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15646 ASSERT_FALSE (integer_nonzerop (c_i_0));
15647 ASSERT_TRUE (integer_nonzerop (c_i_1));
15648 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15649 ASSERT_FALSE (integer_nonzerop (c_f_0));
15650 ASSERT_FALSE (integer_nonzerop (c_f_1));
15651 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15652
15653 /* Test real_zerop. */
15654 ASSERT_FALSE (real_zerop (i_0));
15655 ASSERT_FALSE (real_zerop (wr_i_0));
15656 ASSERT_FALSE (real_zerop (i_1));
15657 ASSERT_FALSE (real_zerop (wr_i_1));
15658 ASSERT_FALSE (real_zerop (i_m1));
15659 ASSERT_FALSE (real_zerop (wr_i_m1));
15660 ASSERT_TRUE (real_zerop (f_0));
15661 ASSERT_TRUE (real_zerop (wr_f_0));
15662 ASSERT_FALSE (real_zerop (f_1));
15663 ASSERT_FALSE (real_zerop (wr_f_1));
15664 ASSERT_FALSE (real_zerop (f_m1));
15665 ASSERT_FALSE (real_zerop (wr_f_m1));
15666 ASSERT_FALSE (real_zerop (c_i_0));
15667 ASSERT_FALSE (real_zerop (c_i_1));
15668 ASSERT_FALSE (real_zerop (c_i_m1));
15669 ASSERT_TRUE (real_zerop (c_f_0));
15670 ASSERT_FALSE (real_zerop (c_f_1));
15671 ASSERT_FALSE (real_zerop (c_f_m1));
15672
15673 /* Test real_onep. */
15674 ASSERT_FALSE (real_onep (i_0));
15675 ASSERT_FALSE (real_onep (wr_i_0));
15676 ASSERT_FALSE (real_onep (i_1));
15677 ASSERT_FALSE (real_onep (wr_i_1));
15678 ASSERT_FALSE (real_onep (i_m1));
15679 ASSERT_FALSE (real_onep (wr_i_m1));
15680 ASSERT_FALSE (real_onep (f_0));
15681 ASSERT_FALSE (real_onep (wr_f_0));
15682 ASSERT_TRUE (real_onep (f_1));
15683 ASSERT_TRUE (real_onep (wr_f_1));
15684 ASSERT_FALSE (real_onep (f_m1));
15685 ASSERT_FALSE (real_onep (wr_f_m1));
15686 ASSERT_FALSE (real_onep (c_i_0));
15687 ASSERT_FALSE (real_onep (c_i_1));
15688 ASSERT_FALSE (real_onep (c_i_m1));
15689 ASSERT_FALSE (real_onep (c_f_0));
15690 ASSERT_TRUE (real_onep (c_f_1));
15691 ASSERT_FALSE (real_onep (c_f_m1));
15692
15693 /* Test real_minus_onep. */
15694 ASSERT_FALSE (real_minus_onep (i_0));
15695 ASSERT_FALSE (real_minus_onep (wr_i_0));
15696 ASSERT_FALSE (real_minus_onep (i_1));
15697 ASSERT_FALSE (real_minus_onep (wr_i_1));
15698 ASSERT_FALSE (real_minus_onep (i_m1));
15699 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15700 ASSERT_FALSE (real_minus_onep (f_0));
15701 ASSERT_FALSE (real_minus_onep (wr_f_0));
15702 ASSERT_FALSE (real_minus_onep (f_1));
15703 ASSERT_FALSE (real_minus_onep (wr_f_1));
15704 ASSERT_TRUE (real_minus_onep (f_m1));
15705 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15706 ASSERT_FALSE (real_minus_onep (c_i_0));
15707 ASSERT_FALSE (real_minus_onep (c_i_1));
15708 ASSERT_FALSE (real_minus_onep (c_i_m1));
15709 ASSERT_FALSE (real_minus_onep (c_f_0));
15710 ASSERT_FALSE (real_minus_onep (c_f_1));
15711 ASSERT_TRUE (real_minus_onep (c_f_m1));
15712
15713 /* Test zerop. */
15714 ASSERT_TRUE (zerop (i_0));
15715 ASSERT_TRUE (zerop (wr_i_0));
15716 ASSERT_FALSE (zerop (i_1));
15717 ASSERT_FALSE (zerop (wr_i_1));
15718 ASSERT_FALSE (zerop (i_m1));
15719 ASSERT_FALSE (zerop (wr_i_m1));
15720 ASSERT_TRUE (zerop (f_0));
15721 ASSERT_TRUE (zerop (wr_f_0));
15722 ASSERT_FALSE (zerop (f_1));
15723 ASSERT_FALSE (zerop (wr_f_1));
15724 ASSERT_FALSE (zerop (f_m1));
15725 ASSERT_FALSE (zerop (wr_f_m1));
15726 ASSERT_TRUE (zerop (c_i_0));
15727 ASSERT_FALSE (zerop (c_i_1));
15728 ASSERT_FALSE (zerop (c_i_m1));
15729 ASSERT_TRUE (zerop (c_f_0));
15730 ASSERT_FALSE (zerop (c_f_1));
15731 ASSERT_FALSE (zerop (c_f_m1));
15732
15733 /* Test tree_expr_nonnegative_p. */
15734 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15735 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15736 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15737 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15738 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15739 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15740 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15741 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15742 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15743 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15744 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15745 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15746 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15747 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15748 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15749 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15750 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15751 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15752
15753 /* Test tree_expr_nonzero_p. */
15754 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15755 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15756 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15757 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15758 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15759 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15760
15761 /* Test integer_valued_real_p. */
15762 ASSERT_FALSE (integer_valued_real_p (i_0));
15763 ASSERT_TRUE (integer_valued_real_p (f_0));
15764 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15765 ASSERT_TRUE (integer_valued_real_p (f_1));
15766 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15767
15768 /* Test integer_pow2p. */
15769 ASSERT_FALSE (integer_pow2p (i_0));
15770 ASSERT_TRUE (integer_pow2p (i_1));
15771 ASSERT_TRUE (integer_pow2p (wr_i_1));
15772
15773 /* Test uniform_integer_cst_p. */
15774 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15775 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15776 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15777 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15778 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15779 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15780 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15781 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15782 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15783 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15784 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15785 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15786 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15787 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15788 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15789 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15790 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15791 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15792 }
15793
15794 /* Check that string escaping works correctly. */
15795
15796 static void
15797 test_escaped_strings (void)
15798 {
15799 int saved_cutoff;
15800 escaped_string msg;
15801
15802 msg.escape (NULL);
15803 /* ASSERT_STREQ does not accept NULL as a valid test
15804 result, so we have to use ASSERT_EQ instead. */
15805 ASSERT_EQ (NULL, (const char *) msg);
15806
15807 msg.escape ("");
15808 ASSERT_STREQ ("", (const char *) msg);
15809
15810 msg.escape ("foobar");
15811 ASSERT_STREQ ("foobar", (const char *) msg);
15812
15813 /* Ensure that we have -fmessage-length set to 0. */
15814 saved_cutoff = pp_line_cutoff (global_dc->printer);
15815 pp_line_cutoff (global_dc->printer) = 0;
15816
15817 msg.escape ("foo\nbar");
15818 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15819
15820 msg.escape ("\a\b\f\n\r\t\v");
15821 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15822
15823 /* Now repeat the tests with -fmessage-length set to 5. */
15824 pp_line_cutoff (global_dc->printer) = 5;
15825
15826 /* Note that the newline is not translated into an escape. */
15827 msg.escape ("foo\nbar");
15828 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15829
15830 msg.escape ("\a\b\f\n\r\t\v");
15831 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15832
15833 /* Restore the original message length setting. */
15834 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15835 }
15836
15837 /* Run all of the selftests within this file. */
15838
15839 void
15840 tree_c_tests ()
15841 {
15842 test_integer_constants ();
15843 test_identifiers ();
15844 test_labels ();
15845 test_vector_cst_patterns ();
15846 test_location_wrappers ();
15847 test_predicates ();
15848 test_escaped_strings ();
15849 }
15850
15851 } // namespace selftest
15852
15853 #endif /* CHECKING_P */
15854
15855 #include "gt-tree.h"
This page took 0.724844 seconds and 5 git commands to generate.