]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple.c
hashtable_policy.h: Correct namepace nesting for _Hashtable forward declaration.
[gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
ddb555ed 3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
d7f09764 26#include "target.h"
726a989a
RB
27#include "tree.h"
28#include "ggc.h"
726a989a
RB
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "gimple.h"
32#include "diagnostic.h"
33#include "tree-flow.h"
34#include "value-prof.h"
35#include "flags.h"
d7f09764 36#include "alias.h"
4537ec0c 37#include "demangle.h"
0f443ad0 38#include "langhooks.h"
726a989a 39
d7f09764
DN
40/* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
0f443ad0
RG
44static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
4490cae6
RG
46static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
0f443ad0
RG
48static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
a844a60b
RG
50static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
d7f09764 52
f2c4a81c 53/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
57#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 59EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
60#include "gsstruct.def"
61};
62#undef DEFGSSTRUCT
63
64#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65static const size_t gsstruct_code_size[] = {
66#include "gsstruct.def"
67};
68#undef DEFGSSTRUCT
69
70#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71const char *const gimple_code_name[] = {
72#include "gimple.def"
73};
74#undef DEFGSCODE
75
76#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
78#include "gimple.def"
79};
80#undef DEFGSCODE
81
82#ifdef GATHER_STATISTICS
83/* Gimple stats. */
84
85int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
87
88/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89static const char * const gimple_alloc_kind_names[] = {
90 "assignments",
91 "phi nodes",
92 "conditionals",
93 "sequences",
94 "everything else"
95};
96
97#endif /* GATHER_STATISTICS */
98
99/* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
102
103/* Private API manipulation functions shared only with some
104 other files. */
105extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
107
108/* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
111
112/* Set the code for statement G to CODE. */
113
114static inline void
115gimple_set_code (gimple g, enum gimple_code code)
116{
117 g->gsbase.code = code;
118}
119
726a989a
RB
120/* Return the number of bytes needed to hold a GIMPLE statement with
121 code CODE. */
122
f2c4a81c 123static inline size_t
726a989a
RB
124gimple_size (enum gimple_code code)
125{
f2c4a81c 126 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
127}
128
726a989a
RB
129/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
130 operands. */
131
d7f09764 132gimple
726a989a
RB
133gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
134{
135 size_t size;
136 gimple stmt;
137
138 size = gimple_size (code);
139 if (num_ops > 0)
140 size += sizeof (tree) * (num_ops - 1);
141
142#ifdef GATHER_STATISTICS
143 {
144 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145 gimple_alloc_counts[(int) kind]++;
146 gimple_alloc_sizes[(int) kind] += size;
147 }
148#endif
149
a9429e29 150 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
726a989a
RB
151 gimple_set_code (stmt, code);
152 gimple_set_num_ops (stmt, num_ops);
153
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt->gsbase.modified = 1;
157
158 return stmt;
159}
160
161/* Set SUBCODE to be the code of the expression computed by statement G. */
162
163static inline void
164gimple_set_subcode (gimple g, unsigned subcode)
165{
166 /* We only have 16 bits for the RHS code. Assert that we are not
167 overflowing it. */
168 gcc_assert (subcode < (1 << 16));
169 g->gsbase.subcode = subcode;
170}
171
172
173
174/* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
b8698a0f 176 for the new tuple. NUM_OPS is the number of operands to allocate. */
726a989a
RB
177
178#define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180
181static gimple
b5b8b0ac 182gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
183 unsigned num_ops MEM_STAT_DECL)
184{
185 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186 gimple_set_subcode (s, subcode);
187
188 return s;
189}
190
191
192/* Build a GIMPLE_RETURN statement returning RETVAL. */
193
194gimple
195gimple_build_return (tree retval)
196{
bbbbb16a 197 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
198 if (retval)
199 gimple_return_set_retval (s, retval);
200 return s;
201}
202
d086d311
RG
203/* Reset alias information on call S. */
204
205void
206gimple_call_reset_alias_info (gimple s)
207{
208 if (gimple_call_flags (s) & ECF_CONST)
209 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
210 else
211 pt_solution_reset (gimple_call_use_set (s));
212 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
214 else
215 pt_solution_reset (gimple_call_clobber_set (s));
216}
217
21860814
JJ
218/* Helper for gimple_build_call, gimple_build_call_valist,
219 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
220 components of a GIMPLE_CALL statement to function FN with NARGS
221 arguments. */
726a989a
RB
222
223static inline gimple
224gimple_build_call_1 (tree fn, unsigned nargs)
225{
bbbbb16a 226 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
227 if (TREE_CODE (fn) == FUNCTION_DECL)
228 fn = build_fold_addr_expr (fn);
726a989a 229 gimple_set_op (s, 1, fn);
f20ca725 230 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
d086d311 231 gimple_call_reset_alias_info (s);
726a989a
RB
232 return s;
233}
234
235
236/* Build a GIMPLE_CALL statement to function FN with the arguments
237 specified in vector ARGS. */
238
239gimple
240gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
241{
242 unsigned i;
243 unsigned nargs = VEC_length (tree, args);
244 gimple call = gimple_build_call_1 (fn, nargs);
245
246 for (i = 0; i < nargs; i++)
247 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
248
249 return call;
250}
251
252
253/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
254 arguments. The ... are the arguments. */
255
256gimple
257gimple_build_call (tree fn, unsigned nargs, ...)
258{
259 va_list ap;
260 gimple call;
261 unsigned i;
262
263 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
264
265 call = gimple_build_call_1 (fn, nargs);
266
267 va_start (ap, nargs);
268 for (i = 0; i < nargs; i++)
269 gimple_call_set_arg (call, i, va_arg (ap, tree));
270 va_end (ap);
271
272 return call;
273}
274
275
21860814
JJ
276/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
277 arguments. AP contains the arguments. */
278
279gimple
280gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
281{
282 gimple call;
283 unsigned i;
284
285 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
286
287 call = gimple_build_call_1 (fn, nargs);
288
289 for (i = 0; i < nargs; i++)
290 gimple_call_set_arg (call, i, va_arg (ap, tree));
291
292 return call;
293}
294
295
25583c4f
RS
296/* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297 Build the basic components of a GIMPLE_CALL statement to internal
298 function FN with NARGS arguments. */
299
300static inline gimple
301gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
302{
303 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
304 s->gsbase.subcode |= GF_CALL_INTERNAL;
305 gimple_call_set_internal_fn (s, fn);
306 gimple_call_reset_alias_info (s);
307 return s;
308}
309
310
311/* Build a GIMPLE_CALL statement to internal function FN. NARGS is
312 the number of arguments. The ... are the arguments. */
313
314gimple
315gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
316{
317 va_list ap;
318 gimple call;
319 unsigned i;
320
321 call = gimple_build_call_internal_1 (fn, nargs);
322 va_start (ap, nargs);
323 for (i = 0; i < nargs; i++)
324 gimple_call_set_arg (call, i, va_arg (ap, tree));
325 va_end (ap);
326
327 return call;
328}
329
330
331/* Build a GIMPLE_CALL statement to internal function FN with the arguments
332 specified in vector ARGS. */
333
334gimple
335gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
336{
337 unsigned i, nargs;
338 gimple call;
339
340 nargs = VEC_length (tree, args);
341 call = gimple_build_call_internal_1 (fn, nargs);
342 for (i = 0; i < nargs; i++)
343 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
344
345 return call;
346}
347
348
726a989a
RB
349/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
350 assumed to be in GIMPLE form already. Minimal checking is done of
351 this fact. */
352
353gimple
354gimple_build_call_from_tree (tree t)
355{
356 unsigned i, nargs;
357 gimple call;
358 tree fndecl = get_callee_fndecl (t);
359
360 gcc_assert (TREE_CODE (t) == CALL_EXPR);
361
362 nargs = call_expr_nargs (t);
363 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
364
365 for (i = 0; i < nargs; i++)
366 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
367
368 gimple_set_block (call, TREE_BLOCK (t));
369
370 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
371 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
372 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
726a989a 373 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
63d2a353
MM
374 if (fndecl
375 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13e49da9
TV
376 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
377 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
378 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
379 else
380 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
726a989a 381 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
9bb1a81b 382 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
d665b6e5 383 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
384
385 return call;
386}
387
388
389/* Extract the operands and code for expression EXPR into *SUBCODE_P,
0354c0c7 390 *OP1_P, *OP2_P and *OP3_P respectively. */
726a989a
RB
391
392void
0354c0c7
BS
393extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
394 tree *op2_p, tree *op3_p)
726a989a 395{
82d6e6fc 396 enum gimple_rhs_class grhs_class;
726a989a
RB
397
398 *subcode_p = TREE_CODE (expr);
82d6e6fc 399 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 400
0354c0c7 401 if (grhs_class == GIMPLE_TERNARY_RHS)
726a989a
RB
402 {
403 *op1_p = TREE_OPERAND (expr, 0);
404 *op2_p = TREE_OPERAND (expr, 1);
0354c0c7
BS
405 *op3_p = TREE_OPERAND (expr, 2);
406 }
407 else if (grhs_class == GIMPLE_BINARY_RHS)
408 {
409 *op1_p = TREE_OPERAND (expr, 0);
410 *op2_p = TREE_OPERAND (expr, 1);
411 *op3_p = NULL_TREE;
726a989a 412 }
82d6e6fc 413 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
414 {
415 *op1_p = TREE_OPERAND (expr, 0);
416 *op2_p = NULL_TREE;
0354c0c7 417 *op3_p = NULL_TREE;
726a989a 418 }
82d6e6fc 419 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
420 {
421 *op1_p = expr;
422 *op2_p = NULL_TREE;
0354c0c7 423 *op3_p = NULL_TREE;
726a989a
RB
424 }
425 else
426 gcc_unreachable ();
427}
428
429
430/* Build a GIMPLE_ASSIGN statement.
431
432 LHS of the assignment.
433 RHS of the assignment which can be unary or binary. */
434
435gimple
436gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
437{
438 enum tree_code subcode;
0354c0c7 439 tree op1, op2, op3;
726a989a 440
0354c0c7
BS
441 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
442 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
726a989a
RB
443 PASS_MEM_STAT);
444}
445
446
447/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
448 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
449 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
450
451gimple
452gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
0354c0c7 453 tree op2, tree op3 MEM_STAT_DECL)
726a989a
RB
454{
455 unsigned num_ops;
456 gimple p;
457
458 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
459 code). */
460 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
b8698a0f 461
b5b8b0ac 462 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
463 PASS_MEM_STAT);
464 gimple_assign_set_lhs (p, lhs);
465 gimple_assign_set_rhs1 (p, op1);
466 if (op2)
467 {
468 gcc_assert (num_ops > 2);
469 gimple_assign_set_rhs2 (p, op2);
470 }
471
0354c0c7
BS
472 if (op3)
473 {
474 gcc_assert (num_ops > 3);
475 gimple_assign_set_rhs3 (p, op3);
476 }
477
726a989a
RB
478 return p;
479}
480
481
482/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
483
484 DST/SRC are the destination and source respectively. You can pass
485 ungimplified trees in DST or SRC, in which case they will be
486 converted to a gimple operand if necessary.
487
488 This function returns the newly created GIMPLE_ASSIGN tuple. */
489
5fd8300b 490gimple
726a989a 491gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
b8698a0f 492{
726a989a
RB
493 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
494 gimplify_and_add (t, seq_p);
495 ggc_free (t);
496 return gimple_seq_last_stmt (*seq_p);
497}
498
499
500/* Build a GIMPLE_COND statement.
501
502 PRED is the condition used to compare LHS and the RHS.
503 T_LABEL is the label to jump to if the condition is true.
504 F_LABEL is the label to jump to otherwise. */
505
506gimple
507gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
508 tree t_label, tree f_label)
509{
510 gimple p;
511
512 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
513 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
514 gimple_cond_set_lhs (p, lhs);
515 gimple_cond_set_rhs (p, rhs);
516 gimple_cond_set_true_label (p, t_label);
517 gimple_cond_set_false_label (p, f_label);
518 return p;
519}
520
521
522/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
523
524void
525gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
526 tree *lhs_p, tree *rhs_p)
527{
528 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
529 || TREE_CODE (cond) == TRUTH_NOT_EXPR
530 || is_gimple_min_invariant (cond)
531 || SSA_VAR_P (cond));
532
533 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
534
535 /* Canonicalize conditionals of the form 'if (!VAL)'. */
536 if (*code_p == TRUTH_NOT_EXPR)
537 {
538 *code_p = EQ_EXPR;
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
541 }
542 /* Canonicalize conditionals of the form 'if (VAL)' */
543 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
544 {
545 *code_p = NE_EXPR;
546 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 547 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
548 }
549}
550
551
552/* Build a GIMPLE_COND statement from the conditional expression tree
553 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
554
555gimple
556gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
557{
558 enum tree_code code;
559 tree lhs, rhs;
560
561 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
562 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
563}
564
565/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
566 boolean expression tree COND. */
567
568void
569gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
570{
571 enum tree_code code;
572 tree lhs, rhs;
573
574 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
575 gimple_cond_set_condition (stmt, code, lhs, rhs);
576}
577
578/* Build a GIMPLE_LABEL statement for LABEL. */
579
580gimple
581gimple_build_label (tree label)
582{
bbbbb16a 583 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
584 gimple_label_set_label (p, label);
585 return p;
586}
587
588/* Build a GIMPLE_GOTO statement to label DEST. */
589
590gimple
591gimple_build_goto (tree dest)
592{
bbbbb16a 593 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
594 gimple_goto_set_dest (p, dest);
595 return p;
596}
597
598
599/* Build a GIMPLE_NOP statement. */
600
b8698a0f 601gimple
726a989a
RB
602gimple_build_nop (void)
603{
604 return gimple_alloc (GIMPLE_NOP, 0);
605}
606
607
608/* Build a GIMPLE_BIND statement.
609 VARS are the variables in BODY.
610 BLOCK is the containing block. */
611
612gimple
613gimple_build_bind (tree vars, gimple_seq body, tree block)
614{
615 gimple p = gimple_alloc (GIMPLE_BIND, 0);
616 gimple_bind_set_vars (p, vars);
617 if (body)
618 gimple_bind_set_body (p, body);
619 if (block)
620 gimple_bind_set_block (p, block);
621 return p;
622}
623
624/* Helper function to set the simple fields of a asm stmt.
625
626 STRING is a pointer to a string that is the asm blocks assembly code.
627 NINPUT is the number of register inputs.
628 NOUTPUT is the number of register outputs.
629 NCLOBBERS is the number of clobbered registers.
630 */
631
632static inline gimple
b8698a0f 633gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
1c384bf1 634 unsigned nclobbers, unsigned nlabels)
726a989a
RB
635{
636 gimple p;
637 int size = strlen (string);
638
1c384bf1
RH
639 /* ASMs with labels cannot have outputs. This should have been
640 enforced by the front end. */
641 gcc_assert (nlabels == 0 || noutputs == 0);
642
bbbbb16a 643 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
1c384bf1 644 ninputs + noutputs + nclobbers + nlabels);
726a989a
RB
645
646 p->gimple_asm.ni = ninputs;
647 p->gimple_asm.no = noutputs;
648 p->gimple_asm.nc = nclobbers;
1c384bf1 649 p->gimple_asm.nl = nlabels;
726a989a
RB
650 p->gimple_asm.string = ggc_alloc_string (string, size);
651
652#ifdef GATHER_STATISTICS
653 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
654#endif
b8698a0f 655
726a989a
RB
656 return p;
657}
658
659/* Build a GIMPLE_ASM statement.
660
661 STRING is the assembly code.
662 NINPUT is the number of register inputs.
663 NOUTPUT is the number of register outputs.
664 NCLOBBERS is the number of clobbered registers.
665 INPUTS is a vector of the input register parameters.
666 OUTPUTS is a vector of the output register parameters.
1c384bf1
RH
667 CLOBBERS is a vector of the clobbered register parameters.
668 LABELS is a vector of destination labels. */
726a989a
RB
669
670gimple
b8698a0f 671gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
1c384bf1
RH
672 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
673 VEC(tree,gc)* labels)
726a989a
RB
674{
675 gimple p;
676 unsigned i;
677
678 p = gimple_build_asm_1 (string,
679 VEC_length (tree, inputs),
b8698a0f 680 VEC_length (tree, outputs),
1c384bf1
RH
681 VEC_length (tree, clobbers),
682 VEC_length (tree, labels));
b8698a0f 683
726a989a
RB
684 for (i = 0; i < VEC_length (tree, inputs); i++)
685 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
686
687 for (i = 0; i < VEC_length (tree, outputs); i++)
688 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
689
690 for (i = 0; i < VEC_length (tree, clobbers); i++)
691 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
b8698a0f 692
1c384bf1
RH
693 for (i = 0; i < VEC_length (tree, labels); i++)
694 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
b8698a0f 695
726a989a
RB
696 return p;
697}
698
699/* Build a GIMPLE_CATCH statement.
700
701 TYPES are the catch types.
702 HANDLER is the exception handler. */
703
704gimple
705gimple_build_catch (tree types, gimple_seq handler)
706{
707 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
708 gimple_catch_set_types (p, types);
709 if (handler)
710 gimple_catch_set_handler (p, handler);
711
712 return p;
713}
714
715/* Build a GIMPLE_EH_FILTER statement.
716
717 TYPES are the filter's types.
718 FAILURE is the filter's failure action. */
719
720gimple
721gimple_build_eh_filter (tree types, gimple_seq failure)
722{
723 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
724 gimple_eh_filter_set_types (p, types);
725 if (failure)
726 gimple_eh_filter_set_failure (p, failure);
727
728 return p;
729}
730
1d65f45c
RH
731/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
732
733gimple
734gimple_build_eh_must_not_throw (tree decl)
735{
786f715d 736 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
1d65f45c
RH
737
738 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
739 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
d7f09764 740 gimple_eh_must_not_throw_set_fndecl (p, decl);
1d65f45c
RH
741
742 return p;
743}
744
0a35513e
AH
745/* Build a GIMPLE_EH_ELSE statement. */
746
747gimple
748gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
749{
750 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
751 gimple_eh_else_set_n_body (p, n_body);
752 gimple_eh_else_set_e_body (p, e_body);
753 return p;
754}
755
726a989a
RB
756/* Build a GIMPLE_TRY statement.
757
758 EVAL is the expression to evaluate.
759 CLEANUP is the cleanup expression.
760 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
761 whether this is a try/catch or a try/finally respectively. */
762
763gimple
764gimple_build_try (gimple_seq eval, gimple_seq cleanup,
765 enum gimple_try_flags kind)
766{
767 gimple p;
768
769 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
770 p = gimple_alloc (GIMPLE_TRY, 0);
771 gimple_set_subcode (p, kind);
772 if (eval)
773 gimple_try_set_eval (p, eval);
774 if (cleanup)
775 gimple_try_set_cleanup (p, cleanup);
776
777 return p;
778}
779
780/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
781
782 CLEANUP is the cleanup expression. */
783
784gimple
785gimple_build_wce (gimple_seq cleanup)
786{
787 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
788 if (cleanup)
789 gimple_wce_set_cleanup (p, cleanup);
790
791 return p;
792}
793
794
1d65f45c 795/* Build a GIMPLE_RESX statement. */
726a989a
RB
796
797gimple
798gimple_build_resx (int region)
799{
1d65f45c
RH
800 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
801 p->gimple_eh_ctrl.region = region;
726a989a
RB
802 return p;
803}
804
805
806/* The helper for constructing a gimple switch statement.
807 INDEX is the switch's index.
808 NLABELS is the number of labels in the switch excluding the default.
809 DEFAULT_LABEL is the default label for the switch statement. */
810
b8698a0f 811gimple
1d65f45c 812gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
813{
814 /* nlabels + 1 default label + 1 index. */
bbbbb16a 815 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
1d65f45c 816 1 + (default_label != NULL) + nlabels);
726a989a 817 gimple_switch_set_index (p, index);
1d65f45c
RH
818 if (default_label)
819 gimple_switch_set_default_label (p, default_label);
726a989a
RB
820 return p;
821}
822
823
824/* Build a GIMPLE_SWITCH statement.
825
826 INDEX is the switch's index.
b8698a0f 827 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
726a989a
RB
828 ... are the labels excluding the default. */
829
b8698a0f 830gimple
726a989a
RB
831gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
832{
833 va_list al;
1d65f45c
RH
834 unsigned i, offset;
835 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a
RB
836
837 /* Store the rest of the labels. */
838 va_start (al, default_label);
1d65f45c
RH
839 offset = (default_label != NULL);
840 for (i = 0; i < nlabels; i++)
841 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
726a989a
RB
842 va_end (al);
843
844 return p;
845}
846
847
848/* Build a GIMPLE_SWITCH statement.
849
850 INDEX is the switch's index.
851 DEFAULT_LABEL is the default label
852 ARGS is a vector of labels excluding the default. */
853
854gimple
855gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
856{
1d65f45c
RH
857 unsigned i, offset, nlabels = VEC_length (tree, args);
858 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 859
1d65f45c
RH
860 /* Copy the labels from the vector to the switch statement. */
861 offset = (default_label != NULL);
862 for (i = 0; i < nlabels; i++)
863 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
726a989a
RB
864
865 return p;
866}
867
1d65f45c
RH
868/* Build a GIMPLE_EH_DISPATCH statement. */
869
870gimple
871gimple_build_eh_dispatch (int region)
872{
873 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
874 p->gimple_eh_ctrl.region = region;
875 return p;
876}
726a989a 877
b5b8b0ac
AO
878/* Build a new GIMPLE_DEBUG_BIND statement.
879
880 VAR is bound to VALUE; block and location are taken from STMT. */
881
882gimple
883gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
884{
885 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
886 (unsigned)GIMPLE_DEBUG_BIND, 2
887 PASS_MEM_STAT);
888
889 gimple_debug_bind_set_var (p, var);
890 gimple_debug_bind_set_value (p, value);
891 if (stmt)
892 {
893 gimple_set_block (p, gimple_block (stmt));
894 gimple_set_location (p, gimple_location (stmt));
895 }
896
897 return p;
898}
899
900
ddb555ed
JJ
901/* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
902
903 VAR is bound to VALUE; block and location are taken from STMT. */
904
905gimple
906gimple_build_debug_source_bind_stat (tree var, tree value,
907 gimple stmt MEM_STAT_DECL)
908{
909 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
910 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
911 PASS_MEM_STAT);
912
913 gimple_debug_source_bind_set_var (p, var);
914 gimple_debug_source_bind_set_value (p, value);
915 if (stmt)
916 {
917 gimple_set_block (p, gimple_block (stmt));
918 gimple_set_location (p, gimple_location (stmt));
919 }
920
921 return p;
922}
923
924
726a989a
RB
925/* Build a GIMPLE_OMP_CRITICAL statement.
926
927 BODY is the sequence of statements for which only one thread can execute.
928 NAME is optional identifier for this critical block. */
929
b8698a0f 930gimple
726a989a
RB
931gimple_build_omp_critical (gimple_seq body, tree name)
932{
933 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
934 gimple_omp_critical_set_name (p, name);
935 if (body)
936 gimple_omp_set_body (p, body);
937
938 return p;
939}
940
941/* Build a GIMPLE_OMP_FOR statement.
942
943 BODY is sequence of statements inside the for loop.
b8698a0f 944 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
726a989a
RB
945 lastprivate, reductions, ordered, schedule, and nowait.
946 COLLAPSE is the collapse count.
947 PRE_BODY is the sequence of statements that are loop invariant. */
948
949gimple
950gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
951 gimple_seq pre_body)
952{
953 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
954 if (body)
955 gimple_omp_set_body (p, body);
956 gimple_omp_for_set_clauses (p, clauses);
957 p->gimple_omp_for.collapse = collapse;
a9429e29
LB
958 p->gimple_omp_for.iter
959 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
726a989a
RB
960 if (pre_body)
961 gimple_omp_for_set_pre_body (p, pre_body);
962
963 return p;
964}
965
966
967/* Build a GIMPLE_OMP_PARALLEL statement.
968
969 BODY is sequence of statements which are executed in parallel.
970 CLAUSES, are the OMP parallel construct's clauses.
971 CHILD_FN is the function created for the parallel threads to execute.
972 DATA_ARG are the shared data argument(s). */
973
b8698a0f
L
974gimple
975gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
726a989a
RB
976 tree data_arg)
977{
978 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
979 if (body)
980 gimple_omp_set_body (p, body);
981 gimple_omp_parallel_set_clauses (p, clauses);
982 gimple_omp_parallel_set_child_fn (p, child_fn);
983 gimple_omp_parallel_set_data_arg (p, data_arg);
984
985 return p;
986}
987
988
989/* Build a GIMPLE_OMP_TASK statement.
990
991 BODY is sequence of statements which are executed by the explicit task.
992 CLAUSES, are the OMP parallel construct's clauses.
993 CHILD_FN is the function created for the parallel threads to execute.
994 DATA_ARG are the shared data argument(s).
995 COPY_FN is the optional function for firstprivate initialization.
996 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
997
b8698a0f 998gimple
726a989a
RB
999gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
1000 tree data_arg, tree copy_fn, tree arg_size,
1001 tree arg_align)
1002{
1003 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
1004 if (body)
1005 gimple_omp_set_body (p, body);
1006 gimple_omp_task_set_clauses (p, clauses);
1007 gimple_omp_task_set_child_fn (p, child_fn);
1008 gimple_omp_task_set_data_arg (p, data_arg);
1009 gimple_omp_task_set_copy_fn (p, copy_fn);
1010 gimple_omp_task_set_arg_size (p, arg_size);
1011 gimple_omp_task_set_arg_align (p, arg_align);
1012
1013 return p;
1014}
1015
1016
1017/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1018
1019 BODY is the sequence of statements in the section. */
1020
1021gimple
1022gimple_build_omp_section (gimple_seq body)
1023{
1024 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1025 if (body)
1026 gimple_omp_set_body (p, body);
1027
1028 return p;
1029}
1030
1031
1032/* Build a GIMPLE_OMP_MASTER statement.
1033
1034 BODY is the sequence of statements to be executed by just the master. */
1035
b8698a0f 1036gimple
726a989a
RB
1037gimple_build_omp_master (gimple_seq body)
1038{
1039 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1040 if (body)
1041 gimple_omp_set_body (p, body);
1042
1043 return p;
1044}
1045
1046
1047/* Build a GIMPLE_OMP_CONTINUE statement.
1048
1049 CONTROL_DEF is the definition of the control variable.
1050 CONTROL_USE is the use of the control variable. */
1051
b8698a0f 1052gimple
726a989a
RB
1053gimple_build_omp_continue (tree control_def, tree control_use)
1054{
1055 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1056 gimple_omp_continue_set_control_def (p, control_def);
1057 gimple_omp_continue_set_control_use (p, control_use);
1058 return p;
1059}
1060
1061/* Build a GIMPLE_OMP_ORDERED statement.
1062
1063 BODY is the sequence of statements inside a loop that will executed in
1064 sequence. */
1065
b8698a0f 1066gimple
726a989a
RB
1067gimple_build_omp_ordered (gimple_seq body)
1068{
1069 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1070 if (body)
1071 gimple_omp_set_body (p, body);
1072
1073 return p;
1074}
1075
1076
1077/* Build a GIMPLE_OMP_RETURN statement.
1078 WAIT_P is true if this is a non-waiting return. */
1079
b8698a0f 1080gimple
726a989a
RB
1081gimple_build_omp_return (bool wait_p)
1082{
1083 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1084 if (wait_p)
1085 gimple_omp_return_set_nowait (p);
1086
1087 return p;
1088}
1089
1090
1091/* Build a GIMPLE_OMP_SECTIONS statement.
1092
1093 BODY is a sequence of section statements.
1094 CLAUSES are any of the OMP sections contsruct's clauses: private,
1095 firstprivate, lastprivate, reduction, and nowait. */
1096
b8698a0f 1097gimple
726a989a
RB
1098gimple_build_omp_sections (gimple_seq body, tree clauses)
1099{
1100 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1101 if (body)
1102 gimple_omp_set_body (p, body);
1103 gimple_omp_sections_set_clauses (p, clauses);
1104
1105 return p;
1106}
1107
1108
1109/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1110
1111gimple
1112gimple_build_omp_sections_switch (void)
1113{
1114 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1115}
1116
1117
1118/* Build a GIMPLE_OMP_SINGLE statement.
1119
1120 BODY is the sequence of statements that will be executed once.
1121 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1122 copyprivate, nowait. */
1123
b8698a0f 1124gimple
726a989a
RB
1125gimple_build_omp_single (gimple_seq body, tree clauses)
1126{
1127 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1128 if (body)
1129 gimple_omp_set_body (p, body);
1130 gimple_omp_single_set_clauses (p, clauses);
1131
1132 return p;
1133}
1134
1135
726a989a
RB
1136/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1137
1138gimple
1139gimple_build_omp_atomic_load (tree lhs, tree rhs)
1140{
1141 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1142 gimple_omp_atomic_load_set_lhs (p, lhs);
1143 gimple_omp_atomic_load_set_rhs (p, rhs);
1144 return p;
1145}
1146
1147/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1148
1149 VAL is the value we are storing. */
1150
1151gimple
1152gimple_build_omp_atomic_store (tree val)
1153{
1154 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1155 gimple_omp_atomic_store_set_val (p, val);
1156 return p;
1157}
1158
0a35513e
AH
1159/* Build a GIMPLE_TRANSACTION statement. */
1160
1161gimple
1162gimple_build_transaction (gimple_seq body, tree label)
1163{
1164 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1165 gimple_transaction_set_body (p, body);
1166 gimple_transaction_set_label (p, label);
1167 return p;
1168}
1169
726a989a
RB
1170/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1171 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1172
1173gimple
1174gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1175{
1176 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1177 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1178 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1179 gimple_predict_set_predictor (p, predictor);
1180 gimple_predict_set_outcome (p, outcome);
1181 return p;
1182}
1183
cea094ed 1184#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1185/* Complain of a gimple type mismatch and die. */
1186
1187void
1188gimple_check_failed (const_gimple gs, const char *file, int line,
1189 const char *function, enum gimple_code code,
1190 enum tree_code subcode)
1191{
1192 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1193 gimple_code_name[code],
1194 tree_code_name[subcode],
1195 gimple_code_name[gimple_code (gs)],
1196 gs->gsbase.subcode > 0
1197 ? tree_code_name[gs->gsbase.subcode]
1198 : "",
1199 function, trim_filename (file), line);
1200}
726a989a
RB
1201#endif /* ENABLE_GIMPLE_CHECKING */
1202
1203
1204/* Allocate a new GIMPLE sequence in GC memory and return it. If
1205 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1206 instead. */
1207
1208gimple_seq
1209gimple_seq_alloc (void)
1210{
1211 gimple_seq seq = gimple_seq_cache;
1212 if (seq)
1213 {
1214 gimple_seq_cache = gimple_seq_cache->next_free;
1215 gcc_assert (gimple_seq_cache != seq);
1216 memset (seq, 0, sizeof (*seq));
1217 }
1218 else
1219 {
a9429e29 1220 seq = ggc_alloc_cleared_gimple_seq_d ();
726a989a
RB
1221#ifdef GATHER_STATISTICS
1222 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1223 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1224#endif
1225 }
1226
1227 return seq;
1228}
1229
1230/* Return SEQ to the free pool of GIMPLE sequences. */
1231
1232void
1233gimple_seq_free (gimple_seq seq)
1234{
1235 if (seq == NULL)
1236 return;
1237
1238 gcc_assert (gimple_seq_first (seq) == NULL);
1239 gcc_assert (gimple_seq_last (seq) == NULL);
1240
1241 /* If this triggers, it's a sign that the same list is being freed
1242 twice. */
1243 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
b8698a0f 1244
726a989a
RB
1245 /* Add SEQ to the pool of free sequences. */
1246 seq->next_free = gimple_seq_cache;
1247 gimple_seq_cache = seq;
1248}
1249
1250
1251/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1252 *SEQ_P is NULL, a new sequence is allocated. */
1253
1254void
1255gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1256{
1257 gimple_stmt_iterator si;
1258
1259 if (gs == NULL)
1260 return;
1261
1262 if (*seq_p == NULL)
1263 *seq_p = gimple_seq_alloc ();
1264
1265 si = gsi_last (*seq_p);
1266 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1267}
1268
1269
1270/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1271 NULL, a new sequence is allocated. */
1272
1273void
1274gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1275{
1276 gimple_stmt_iterator si;
1277
1278 if (src == NULL)
1279 return;
1280
1281 if (*dst_p == NULL)
1282 *dst_p = gimple_seq_alloc ();
1283
1284 si = gsi_last (*dst_p);
1285 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1286}
1287
1288
1289/* Helper function of empty_body_p. Return true if STMT is an empty
1290 statement. */
1291
1292static bool
1293empty_stmt_p (gimple stmt)
1294{
1295 if (gimple_code (stmt) == GIMPLE_NOP)
1296 return true;
1297 if (gimple_code (stmt) == GIMPLE_BIND)
1298 return empty_body_p (gimple_bind_body (stmt));
1299 return false;
1300}
1301
1302
1303/* Return true if BODY contains nothing but empty statements. */
1304
1305bool
1306empty_body_p (gimple_seq body)
1307{
1308 gimple_stmt_iterator i;
1309
726a989a
RB
1310 if (gimple_seq_empty_p (body))
1311 return true;
1312 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1313 if (!empty_stmt_p (gsi_stmt (i))
1314 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1315 return false;
1316
1317 return true;
1318}
1319
1320
1321/* Perform a deep copy of sequence SRC and return the result. */
1322
1323gimple_seq
1324gimple_seq_copy (gimple_seq src)
1325{
1326 gimple_stmt_iterator gsi;
82d6e6fc 1327 gimple_seq new_seq = gimple_seq_alloc ();
726a989a
RB
1328 gimple stmt;
1329
1330 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1331 {
1332 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1333 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1334 }
1335
82d6e6fc 1336 return new_seq;
726a989a
RB
1337}
1338
1339
1340/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1341 on each one. WI is as in walk_gimple_stmt.
b8698a0f 1342
0a35513e
AH
1343 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1344 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1345 produced the value is returned if this statement has not been
1346 removed by a callback (wi->removed_stmt). If the statement has
1347 been removed, NULL is returned.
726a989a
RB
1348
1349 Otherwise, all the statements are walked and NULL returned. */
1350
1351gimple
1352walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1353 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1354{
1355 gimple_stmt_iterator gsi;
1356
0a35513e 1357 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
726a989a
RB
1358 {
1359 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1360 if (ret)
1361 {
1362 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1363 to hold it. */
1364 gcc_assert (wi);
1365 wi->callback_result = ret;
0a35513e
AH
1366
1367 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
726a989a 1368 }
0a35513e
AH
1369
1370 if (!wi->removed_stmt)
1371 gsi_next (&gsi);
726a989a
RB
1372 }
1373
1374 if (wi)
1375 wi->callback_result = NULL_TREE;
1376
1377 return NULL;
1378}
1379
1380
1381/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1382
1383static tree
1384walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1385 struct walk_stmt_info *wi)
1386{
1c384bf1 1387 tree ret, op;
726a989a
RB
1388 unsigned noutputs;
1389 const char **oconstraints;
1c384bf1 1390 unsigned i, n;
726a989a
RB
1391 const char *constraint;
1392 bool allows_mem, allows_reg, is_inout;
1393
1394 noutputs = gimple_asm_noutputs (stmt);
1395 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1396
1397 if (wi)
1398 wi->is_lhs = true;
1399
1400 for (i = 0; i < noutputs; i++)
1401 {
1c384bf1 1402 op = gimple_asm_output_op (stmt, i);
726a989a
RB
1403 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1404 oconstraints[i] = constraint;
1405 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1406 &is_inout);
1407 if (wi)
1408 wi->val_only = (allows_reg || !allows_mem);
1409 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1410 if (ret)
1411 return ret;
1412 }
1413
1c384bf1
RH
1414 n = gimple_asm_ninputs (stmt);
1415 for (i = 0; i < n; i++)
726a989a 1416 {
1c384bf1 1417 op = gimple_asm_input_op (stmt, i);
726a989a
RB
1418 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1419 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1420 oconstraints, &allows_mem, &allows_reg);
1421 if (wi)
1c384bf1
RH
1422 {
1423 wi->val_only = (allows_reg || !allows_mem);
1424 /* Although input "m" is not really a LHS, we need a lvalue. */
1425 wi->is_lhs = !wi->val_only;
1426 }
726a989a
RB
1427 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1428 if (ret)
1429 return ret;
1430 }
1431
1432 if (wi)
1433 {
1434 wi->is_lhs = false;
1435 wi->val_only = true;
1436 }
1437
1c384bf1
RH
1438 n = gimple_asm_nlabels (stmt);
1439 for (i = 0; i < n; i++)
1440 {
1441 op = gimple_asm_label_op (stmt, i);
1442 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1443 if (ret)
1444 return ret;
1445 }
1446
726a989a
RB
1447 return NULL_TREE;
1448}
1449
1450
1451/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1452 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1453
1454 CALLBACK_OP is called on each operand of STMT via walk_tree.
1455 Additional parameters to walk_tree must be stored in WI. For each operand
1456 OP, walk_tree is called as:
1457
1458 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1459
1460 If CALLBACK_OP returns non-NULL for an operand, the remaining
1461 operands are not scanned.
1462
1463 The return value is that returned by the last call to walk_tree, or
1464 NULL_TREE if no CALLBACK_OP is specified. */
1465
6a4d4e8a 1466tree
726a989a
RB
1467walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1468 struct walk_stmt_info *wi)
1469{
1470 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1471 unsigned i;
1472 tree ret = NULL_TREE;
1473
1474 switch (gimple_code (stmt))
1475 {
1476 case GIMPLE_ASSIGN:
cb3d597d
EB
1477 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1478 is a register variable, we may use a COMPONENT_REF on the RHS. */
726a989a 1479 if (wi)
cb3d597d
EB
1480 {
1481 tree lhs = gimple_assign_lhs (stmt);
1482 wi->val_only
1483 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
b9af73fc 1484 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
cb3d597d 1485 }
726a989a
RB
1486
1487 for (i = 1; i < gimple_num_ops (stmt); i++)
1488 {
1489 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1490 pset);
1491 if (ret)
1492 return ret;
1493 }
1494
1495 /* Walk the LHS. If the RHS is appropriate for a memory, we
1496 may use a COMPONENT_REF on the LHS. */
1497 if (wi)
1498 {
216820a4
RG
1499 /* If the RHS is of a non-renamable type or is a register variable,
1500 we may use a COMPONENT_REF on the LHS. */
b9af73fc 1501 tree rhs1 = gimple_assign_rhs1 (stmt);
216820a4
RG
1502 wi->val_only
1503 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1504 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
726a989a
RB
1505 wi->is_lhs = true;
1506 }
1507
1508 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1509 if (ret)
1510 return ret;
1511
1512 if (wi)
1513 {
1514 wi->val_only = true;
1515 wi->is_lhs = false;
1516 }
1517 break;
1518
1519 case GIMPLE_CALL:
1520 if (wi)
523968bf
RG
1521 {
1522 wi->is_lhs = false;
1523 wi->val_only = true;
1524 }
726a989a
RB
1525
1526 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1527 if (ret)
1528 return ret;
1529
1530 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1531 if (ret)
1532 return ret;
1533
1534 for (i = 0; i < gimple_call_num_args (stmt); i++)
1535 {
523968bf 1536 if (wi)
4d931f41
EB
1537 wi->val_only
1538 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
726a989a
RB
1539 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1540 pset);
1541 if (ret)
1542 return ret;
1543 }
1544
523968bf
RG
1545 if (gimple_call_lhs (stmt))
1546 {
1547 if (wi)
1548 {
1549 wi->is_lhs = true;
4d931f41
EB
1550 wi->val_only
1551 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
523968bf 1552 }
726a989a 1553
523968bf
RG
1554 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1555 if (ret)
1556 return ret;
1557 }
726a989a
RB
1558
1559 if (wi)
523968bf
RG
1560 {
1561 wi->is_lhs = false;
1562 wi->val_only = true;
1563 }
726a989a
RB
1564 break;
1565
1566 case GIMPLE_CATCH:
1567 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1568 pset);
1569 if (ret)
1570 return ret;
1571 break;
1572
1573 case GIMPLE_EH_FILTER:
1574 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1575 pset);
1576 if (ret)
1577 return ret;
1578 break;
1579
726a989a
RB
1580 case GIMPLE_ASM:
1581 ret = walk_gimple_asm (stmt, callback_op, wi);
1582 if (ret)
1583 return ret;
1584 break;
1585
1586 case GIMPLE_OMP_CONTINUE:
1587 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1588 callback_op, wi, pset);
1589 if (ret)
1590 return ret;
1591
1592 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1593 callback_op, wi, pset);
1594 if (ret)
1595 return ret;
1596 break;
1597
1598 case GIMPLE_OMP_CRITICAL:
1599 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1600 pset);
1601 if (ret)
1602 return ret;
1603 break;
1604
1605 case GIMPLE_OMP_FOR:
1606 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1607 pset);
1608 if (ret)
1609 return ret;
1610 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1611 {
1612 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1613 wi, pset);
1614 if (ret)
1615 return ret;
1616 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1617 wi, pset);
1618 if (ret)
1619 return ret;
1620 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1621 wi, pset);
1622 if (ret)
1623 return ret;
1624 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1625 wi, pset);
1626 }
1627 if (ret)
1628 return ret;
1629 break;
1630
1631 case GIMPLE_OMP_PARALLEL:
1632 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1633 wi, pset);
1634 if (ret)
1635 return ret;
1636 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1637 wi, pset);
1638 if (ret)
1639 return ret;
1640 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1641 wi, pset);
1642 if (ret)
1643 return ret;
1644 break;
1645
1646 case GIMPLE_OMP_TASK:
1647 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1648 wi, pset);
1649 if (ret)
1650 return ret;
1651 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1652 wi, pset);
1653 if (ret)
1654 return ret;
1655 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1656 wi, pset);
1657 if (ret)
1658 return ret;
1659 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1660 wi, pset);
1661 if (ret)
1662 return ret;
1663 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1664 wi, pset);
1665 if (ret)
1666 return ret;
1667 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1668 wi, pset);
1669 if (ret)
1670 return ret;
1671 break;
1672
1673 case GIMPLE_OMP_SECTIONS:
1674 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1675 wi, pset);
1676 if (ret)
1677 return ret;
1678
1679 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1680 wi, pset);
1681 if (ret)
1682 return ret;
1683
1684 break;
1685
1686 case GIMPLE_OMP_SINGLE:
1687 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1688 pset);
1689 if (ret)
1690 return ret;
1691 break;
1692
1693 case GIMPLE_OMP_ATOMIC_LOAD:
1694 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1695 pset);
1696 if (ret)
1697 return ret;
1698
1699 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1700 pset);
1701 if (ret)
1702 return ret;
1703 break;
1704
1705 case GIMPLE_OMP_ATOMIC_STORE:
1706 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1707 wi, pset);
1708 if (ret)
1709 return ret;
1710 break;
1711
0a35513e
AH
1712 case GIMPLE_TRANSACTION:
1713 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1714 wi, pset);
1715 if (ret)
1716 return ret;
1717 break;
1718
726a989a
RB
1719 /* Tuples that do not have operands. */
1720 case GIMPLE_NOP:
1721 case GIMPLE_RESX:
1722 case GIMPLE_OMP_RETURN:
1723 case GIMPLE_PREDICT:
1724 break;
1725
1726 default:
1727 {
1728 enum gimple_statement_structure_enum gss;
1729 gss = gimple_statement_structure (stmt);
1730 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1731 for (i = 0; i < gimple_num_ops (stmt); i++)
1732 {
1733 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1734 if (ret)
1735 return ret;
1736 }
1737 }
1738 break;
1739 }
1740
1741 return NULL_TREE;
1742}
1743
1744
1745/* Walk the current statement in GSI (optionally using traversal state
1746 stored in WI). If WI is NULL, no state is kept during traversal.
1747 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1748 that it has handled all the operands of the statement, its return
1749 value is returned. Otherwise, the return value from CALLBACK_STMT
1750 is discarded and its operands are scanned.
1751
1752 If CALLBACK_STMT is NULL or it didn't handle the operands,
1753 CALLBACK_OP is called on each operand of the statement via
1754 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1755 operand, the remaining operands are not scanned. In this case, the
1756 return value from CALLBACK_OP is returned.
1757
1758 In any other case, NULL_TREE is returned. */
1759
1760tree
1761walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1762 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1763{
1764 gimple ret;
1765 tree tree_ret;
1766 gimple stmt = gsi_stmt (*gsi);
1767
1768 if (wi)
0a35513e
AH
1769 {
1770 wi->gsi = *gsi;
1771 wi->removed_stmt = false;
726a989a 1772
0a35513e
AH
1773 if (wi->want_locations && gimple_has_location (stmt))
1774 input_location = gimple_location (stmt);
1775 }
726a989a
RB
1776
1777 ret = NULL;
1778
1779 /* Invoke the statement callback. Return if the callback handled
1780 all of STMT operands by itself. */
1781 if (callback_stmt)
1782 {
1783 bool handled_ops = false;
1784 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1785 if (handled_ops)
1786 return tree_ret;
1787
1788 /* If CALLBACK_STMT did not handle operands, it should not have
1789 a value to return. */
1790 gcc_assert (tree_ret == NULL);
1791
0a35513e
AH
1792 if (wi && wi->removed_stmt)
1793 return NULL;
1794
726a989a
RB
1795 /* Re-read stmt in case the callback changed it. */
1796 stmt = gsi_stmt (*gsi);
1797 }
1798
1799 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1800 if (callback_op)
1801 {
1802 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1803 if (tree_ret)
1804 return tree_ret;
1805 }
1806
1807 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1808 switch (gimple_code (stmt))
1809 {
1810 case GIMPLE_BIND:
1811 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1812 callback_op, wi);
1813 if (ret)
1814 return wi->callback_result;
1815 break;
1816
1817 case GIMPLE_CATCH:
1818 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1819 callback_op, wi);
1820 if (ret)
1821 return wi->callback_result;
1822 break;
1823
1824 case GIMPLE_EH_FILTER:
1825 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1826 callback_op, wi);
1827 if (ret)
1828 return wi->callback_result;
1829 break;
1830
0a35513e
AH
1831 case GIMPLE_EH_ELSE:
1832 ret = walk_gimple_seq (gimple_eh_else_n_body (stmt),
1833 callback_stmt, callback_op, wi);
1834 if (ret)
1835 return wi->callback_result;
1836 ret = walk_gimple_seq (gimple_eh_else_e_body (stmt),
1837 callback_stmt, callback_op, wi);
1838 if (ret)
1839 return wi->callback_result;
1840 break;
1841
726a989a
RB
1842 case GIMPLE_TRY:
1843 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1844 wi);
1845 if (ret)
1846 return wi->callback_result;
1847
1848 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1849 callback_op, wi);
1850 if (ret)
1851 return wi->callback_result;
1852 break;
1853
1854 case GIMPLE_OMP_FOR:
1855 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1856 callback_op, wi);
1857 if (ret)
1858 return wi->callback_result;
1859
1860 /* FALL THROUGH. */
1861 case GIMPLE_OMP_CRITICAL:
1862 case GIMPLE_OMP_MASTER:
1863 case GIMPLE_OMP_ORDERED:
1864 case GIMPLE_OMP_SECTION:
1865 case GIMPLE_OMP_PARALLEL:
1866 case GIMPLE_OMP_TASK:
1867 case GIMPLE_OMP_SECTIONS:
1868 case GIMPLE_OMP_SINGLE:
0a35513e
AH
1869 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt,
1870 callback_op, wi);
726a989a
RB
1871 if (ret)
1872 return wi->callback_result;
1873 break;
1874
1875 case GIMPLE_WITH_CLEANUP_EXPR:
1876 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1877 callback_op, wi);
1878 if (ret)
1879 return wi->callback_result;
1880 break;
1881
0a35513e
AH
1882 case GIMPLE_TRANSACTION:
1883 ret = walk_gimple_seq (gimple_transaction_body (stmt),
1884 callback_stmt, callback_op, wi);
1885 if (ret)
1886 return wi->callback_result;
1887 break;
1888
726a989a
RB
1889 default:
1890 gcc_assert (!gimple_has_substatements (stmt));
1891 break;
1892 }
1893
1894 return NULL;
1895}
1896
1897
1898/* Set sequence SEQ to be the GIMPLE body for function FN. */
1899
1900void
1901gimple_set_body (tree fndecl, gimple_seq seq)
1902{
1903 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1904 if (fn == NULL)
1905 {
1906 /* If FNDECL still does not have a function structure associated
1907 with it, then it does not make sense for it to receive a
1908 GIMPLE body. */
1909 gcc_assert (seq == NULL);
1910 }
1911 else
1912 fn->gimple_body = seq;
1913}
1914
1915
abbd64b9
JS
1916/* Return the body of GIMPLE statements for function FN. After the
1917 CFG pass, the function body doesn't exist anymore because it has
1918 been split up into basic blocks. In this case, it returns
1919 NULL. */
726a989a
RB
1920
1921gimple_seq
1922gimple_body (tree fndecl)
1923{
1924 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1925 return fn ? fn->gimple_body : NULL;
1926}
1927
39ecc018
JH
1928/* Return true when FNDECL has Gimple body either in unlowered
1929 or CFG form. */
1930bool
1931gimple_has_body_p (tree fndecl)
1932{
1933 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1934 return (gimple_body (fndecl) || (fn && fn->cfg));
1935}
726a989a 1936
25583c4f
RS
1937/* Return true if calls C1 and C2 are known to go to the same function. */
1938
1939bool
1940gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1941{
1942 if (gimple_call_internal_p (c1))
1943 return (gimple_call_internal_p (c2)
1944 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1945 else
1946 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1947 || (gimple_call_fndecl (c1)
1948 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1949}
1950
726a989a
RB
1951/* Detect flags from a GIMPLE_CALL. This is just like
1952 call_expr_flags, but for gimple tuples. */
1953
1954int
1955gimple_call_flags (const_gimple stmt)
1956{
1957 int flags;
1958 tree decl = gimple_call_fndecl (stmt);
726a989a
RB
1959
1960 if (decl)
1961 flags = flags_from_decl_or_type (decl);
25583c4f
RS
1962 else if (gimple_call_internal_p (stmt))
1963 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
726a989a 1964 else
97e03fa1 1965 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
726a989a 1966
9bb1a81b
JM
1967 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1968 flags |= ECF_NOTHROW;
1969
726a989a
RB
1970 return flags;
1971}
1972
25583c4f
RS
1973/* Return the "fn spec" string for call STMT. */
1974
1975static tree
1976gimple_call_fnspec (const_gimple stmt)
1977{
1978 tree type, attr;
1979
1980 type = gimple_call_fntype (stmt);
1981 if (!type)
1982 return NULL_TREE;
1983
1984 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1985 if (!attr)
1986 return NULL_TREE;
1987
1988 return TREE_VALUE (TREE_VALUE (attr));
1989}
1990
0b7b376d
RG
1991/* Detects argument flags for argument number ARG on call STMT. */
1992
1993int
1994gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1995{
25583c4f 1996 tree attr = gimple_call_fnspec (stmt);
0b7b376d 1997
25583c4f 1998 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
0b7b376d
RG
1999 return 0;
2000
2001 switch (TREE_STRING_POINTER (attr)[1 + arg])
2002 {
2003 case 'x':
2004 case 'X':
2005 return EAF_UNUSED;
2006
2007 case 'R':
2008 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2009
2010 case 'r':
2011 return EAF_NOCLOBBER | EAF_NOESCAPE;
2012
2013 case 'W':
2014 return EAF_DIRECT | EAF_NOESCAPE;
2015
2016 case 'w':
2017 return EAF_NOESCAPE;
2018
2019 case '.':
2020 default:
2021 return 0;
2022 }
2023}
2024
2025/* Detects return flags for the call STMT. */
2026
2027int
2028gimple_call_return_flags (const_gimple stmt)
2029{
25583c4f 2030 tree attr;
0b7b376d
RG
2031
2032 if (gimple_call_flags (stmt) & ECF_MALLOC)
2033 return ERF_NOALIAS;
2034
25583c4f
RS
2035 attr = gimple_call_fnspec (stmt);
2036 if (!attr || TREE_STRING_LENGTH (attr) < 1)
0b7b376d
RG
2037 return 0;
2038
2039 switch (TREE_STRING_POINTER (attr)[0])
2040 {
2041 case '1':
2042 case '2':
2043 case '3':
2044 case '4':
2045 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2046
2047 case 'm':
2048 return ERF_NOALIAS;
2049
2050 case '.':
2051 default:
2052 return 0;
2053 }
2054}
726a989a 2055
3dbe9454 2056
726a989a
RB
2057/* Return true if GS is a copy assignment. */
2058
2059bool
2060gimple_assign_copy_p (gimple gs)
2061{
3dbe9454
RG
2062 return (gimple_assign_single_p (gs)
2063 && is_gimple_val (gimple_op (gs, 1)));
726a989a
RB
2064}
2065
2066
2067/* Return true if GS is a SSA_NAME copy assignment. */
2068
2069bool
2070gimple_assign_ssa_name_copy_p (gimple gs)
2071{
3dbe9454 2072 return (gimple_assign_single_p (gs)
726a989a
RB
2073 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2074 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2075}
2076
2077
726a989a
RB
2078/* Return true if GS is an assignment with a unary RHS, but the
2079 operator has no effect on the assigned value. The logic is adapted
2080 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2081 instances in which STRIP_NOPS was previously applied to the RHS of
2082 an assignment.
2083
2084 NOTE: In the use cases that led to the creation of this function
2085 and of gimple_assign_single_p, it is typical to test for either
2086 condition and to proceed in the same manner. In each case, the
2087 assigned value is represented by the single RHS operand of the
2088 assignment. I suspect there may be cases where gimple_assign_copy_p,
2089 gimple_assign_single_p, or equivalent logic is used where a similar
2090 treatment of unary NOPs is appropriate. */
b8698a0f 2091
726a989a
RB
2092bool
2093gimple_assign_unary_nop_p (gimple gs)
2094{
3dbe9454 2095 return (is_gimple_assign (gs)
1a87cf0c 2096 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
2097 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2098 && gimple_assign_rhs1 (gs) != error_mark_node
2099 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2100 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2101}
2102
2103/* Set BB to be the basic block holding G. */
2104
2105void
2106gimple_set_bb (gimple stmt, basic_block bb)
2107{
2108 stmt->gsbase.bb = bb;
2109
2110 /* If the statement is a label, add the label to block-to-labels map
2111 so that we can speed up edge creation for GIMPLE_GOTOs. */
2112 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2113 {
2114 tree t;
2115 int uid;
2116
2117 t = gimple_label_label (stmt);
2118 uid = LABEL_DECL_UID (t);
2119 if (uid == -1)
2120 {
2121 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2122 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2123 if (old_len <= (unsigned) uid)
2124 {
5006671f 2125 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
2126
2127 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2128 new_len);
2129 }
2130 }
2131
2132 VEC_replace (basic_block, label_to_block_map, uid, bb);
2133 }
2134}
2135
2136
726a989a
RB
2137/* Modify the RHS of the assignment pointed-to by GSI using the
2138 operands in the expression tree EXPR.
2139
2140 NOTE: The statement pointed-to by GSI may be reallocated if it
2141 did not have enough operand slots.
2142
2143 This function is useful to convert an existing tree expression into
2144 the flat representation used for the RHS of a GIMPLE assignment.
2145 It will reallocate memory as needed to expand or shrink the number
2146 of operand slots needed to represent EXPR.
2147
2148 NOTE: If you find yourself building a tree and then calling this
2149 function, you are most certainly doing it the slow way. It is much
2150 better to build a new assignment or to use the function
2151 gimple_assign_set_rhs_with_ops, which does not require an
2152 expression tree to be built. */
2153
2154void
2155gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2156{
2157 enum tree_code subcode;
0354c0c7 2158 tree op1, op2, op3;
726a989a 2159
0354c0c7
BS
2160 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2161 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
726a989a
RB
2162}
2163
2164
2165/* Set the RHS of assignment statement pointed-to by GSI to CODE with
0354c0c7 2166 operands OP1, OP2 and OP3.
726a989a
RB
2167
2168 NOTE: The statement pointed-to by GSI may be reallocated if it
2169 did not have enough operand slots. */
2170
2171void
0354c0c7
BS
2172gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2173 tree op1, tree op2, tree op3)
726a989a
RB
2174{
2175 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2176 gimple stmt = gsi_stmt (*gsi);
2177
2178 /* If the new CODE needs more operands, allocate a new statement. */
2179 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2180 {
2181 tree lhs = gimple_assign_lhs (stmt);
2182 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2183 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2184 gsi_replace (gsi, new_stmt, true);
2185 stmt = new_stmt;
2186
2187 /* The LHS needs to be reset as this also changes the SSA name
2188 on the LHS. */
2189 gimple_assign_set_lhs (stmt, lhs);
2190 }
2191
2192 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2193 gimple_set_subcode (stmt, code);
2194 gimple_assign_set_rhs1 (stmt, op1);
2195 if (new_rhs_ops > 1)
2196 gimple_assign_set_rhs2 (stmt, op2);
0354c0c7
BS
2197 if (new_rhs_ops > 2)
2198 gimple_assign_set_rhs3 (stmt, op3);
726a989a
RB
2199}
2200
2201
2202/* Return the LHS of a statement that performs an assignment,
2203 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2204 for a call to a function that returns no value, or for a
2205 statement other than an assignment or a call. */
2206
2207tree
2208gimple_get_lhs (const_gimple stmt)
2209{
e0c68ce9 2210 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2211
2212 if (code == GIMPLE_ASSIGN)
2213 return gimple_assign_lhs (stmt);
2214 else if (code == GIMPLE_CALL)
2215 return gimple_call_lhs (stmt);
2216 else
2217 return NULL_TREE;
2218}
2219
2220
2221/* Set the LHS of a statement that performs an assignment,
2222 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2223
2224void
2225gimple_set_lhs (gimple stmt, tree lhs)
2226{
e0c68ce9 2227 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2228
2229 if (code == GIMPLE_ASSIGN)
2230 gimple_assign_set_lhs (stmt, lhs);
2231 else if (code == GIMPLE_CALL)
2232 gimple_call_set_lhs (stmt, lhs);
2233 else
2234 gcc_unreachable();
2235}
2236
21cf7180
AO
2237/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2238 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2239 expression with a different value.
2240
2241 This will update any annotations (say debug bind stmts) referring
2242 to the original LHS, so that they use the RHS instead. This is
2243 done even if NLHS and LHS are the same, for it is understood that
2244 the RHS will be modified afterwards, and NLHS will not be assigned
2245 an equivalent value.
2246
2247 Adjusting any non-annotation uses of the LHS, if needed, is a
2248 responsibility of the caller.
2249
2250 The effect of this call should be pretty much the same as that of
2251 inserting a copy of STMT before STMT, and then removing the
2252 original stmt, at which time gsi_remove() would have update
2253 annotations, but using this function saves all the inserting,
2254 copying and removing. */
2255
2256void
2257gimple_replace_lhs (gimple stmt, tree nlhs)
2258{
2259 if (MAY_HAVE_DEBUG_STMTS)
2260 {
2261 tree lhs = gimple_get_lhs (stmt);
2262
2263 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2264
2265 insert_debug_temp_for_var_def (NULL, lhs);
2266 }
2267
2268 gimple_set_lhs (stmt, nlhs);
2269}
726a989a
RB
2270
2271/* Return a deep copy of statement STMT. All the operands from STMT
2272 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2273 and VUSE operand arrays are set to empty in the new copy. */
2274
2275gimple
2276gimple_copy (gimple stmt)
2277{
2278 enum gimple_code code = gimple_code (stmt);
2279 unsigned num_ops = gimple_num_ops (stmt);
2280 gimple copy = gimple_alloc (code, num_ops);
2281 unsigned i;
2282
2283 /* Shallow copy all the fields from STMT. */
2284 memcpy (copy, stmt, gimple_size (code));
2285
2286 /* If STMT has sub-statements, deep-copy them as well. */
2287 if (gimple_has_substatements (stmt))
2288 {
2289 gimple_seq new_seq;
2290 tree t;
2291
2292 switch (gimple_code (stmt))
2293 {
2294 case GIMPLE_BIND:
2295 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2296 gimple_bind_set_body (copy, new_seq);
2297 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2298 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2299 break;
2300
2301 case GIMPLE_CATCH:
2302 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2303 gimple_catch_set_handler (copy, new_seq);
2304 t = unshare_expr (gimple_catch_types (stmt));
2305 gimple_catch_set_types (copy, t);
2306 break;
2307
2308 case GIMPLE_EH_FILTER:
2309 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2310 gimple_eh_filter_set_failure (copy, new_seq);
2311 t = unshare_expr (gimple_eh_filter_types (stmt));
2312 gimple_eh_filter_set_types (copy, t);
2313 break;
2314
0a35513e
AH
2315 case GIMPLE_EH_ELSE:
2316 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2317 gimple_eh_else_set_n_body (copy, new_seq);
2318 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2319 gimple_eh_else_set_e_body (copy, new_seq);
2320 break;
2321
726a989a
RB
2322 case GIMPLE_TRY:
2323 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2324 gimple_try_set_eval (copy, new_seq);
2325 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2326 gimple_try_set_cleanup (copy, new_seq);
2327 break;
2328
2329 case GIMPLE_OMP_FOR:
2330 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2331 gimple_omp_for_set_pre_body (copy, new_seq);
2332 t = unshare_expr (gimple_omp_for_clauses (stmt));
2333 gimple_omp_for_set_clauses (copy, t);
2334 copy->gimple_omp_for.iter
a9429e29
LB
2335 = ggc_alloc_vec_gimple_omp_for_iter
2336 (gimple_omp_for_collapse (stmt));
726a989a
RB
2337 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2338 {
2339 gimple_omp_for_set_cond (copy, i,
2340 gimple_omp_for_cond (stmt, i));
2341 gimple_omp_for_set_index (copy, i,
2342 gimple_omp_for_index (stmt, i));
2343 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2344 gimple_omp_for_set_initial (copy, i, t);
2345 t = unshare_expr (gimple_omp_for_final (stmt, i));
2346 gimple_omp_for_set_final (copy, i, t);
2347 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2348 gimple_omp_for_set_incr (copy, i, t);
2349 }
2350 goto copy_omp_body;
2351
2352 case GIMPLE_OMP_PARALLEL:
2353 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2354 gimple_omp_parallel_set_clauses (copy, t);
2355 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2356 gimple_omp_parallel_set_child_fn (copy, t);
2357 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2358 gimple_omp_parallel_set_data_arg (copy, t);
2359 goto copy_omp_body;
2360
2361 case GIMPLE_OMP_TASK:
2362 t = unshare_expr (gimple_omp_task_clauses (stmt));
2363 gimple_omp_task_set_clauses (copy, t);
2364 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2365 gimple_omp_task_set_child_fn (copy, t);
2366 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2367 gimple_omp_task_set_data_arg (copy, t);
2368 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2369 gimple_omp_task_set_copy_fn (copy, t);
2370 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2371 gimple_omp_task_set_arg_size (copy, t);
2372 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2373 gimple_omp_task_set_arg_align (copy, t);
2374 goto copy_omp_body;
2375
2376 case GIMPLE_OMP_CRITICAL:
2377 t = unshare_expr (gimple_omp_critical_name (stmt));
2378 gimple_omp_critical_set_name (copy, t);
2379 goto copy_omp_body;
2380
2381 case GIMPLE_OMP_SECTIONS:
2382 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2383 gimple_omp_sections_set_clauses (copy, t);
2384 t = unshare_expr (gimple_omp_sections_control (stmt));
2385 gimple_omp_sections_set_control (copy, t);
2386 /* FALLTHRU */
2387
2388 case GIMPLE_OMP_SINGLE:
2389 case GIMPLE_OMP_SECTION:
2390 case GIMPLE_OMP_MASTER:
2391 case GIMPLE_OMP_ORDERED:
2392 copy_omp_body:
2393 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2394 gimple_omp_set_body (copy, new_seq);
2395 break;
2396
0a35513e
AH
2397 case GIMPLE_TRANSACTION:
2398 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2399 gimple_transaction_set_body (copy, new_seq);
2400 break;
2401
726a989a
RB
2402 case GIMPLE_WITH_CLEANUP_EXPR:
2403 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2404 gimple_wce_set_cleanup (copy, new_seq);
2405 break;
2406
2407 default:
2408 gcc_unreachable ();
2409 }
2410 }
2411
2412 /* Make copy of operands. */
2413 if (num_ops > 0)
2414 {
2415 for (i = 0; i < num_ops; i++)
2416 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2417
ccacdf06 2418 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2419 if (gimple_has_ops (stmt))
2420 {
2421 gimple_set_def_ops (copy, NULL);
2422 gimple_set_use_ops (copy, NULL);
726a989a
RB
2423 }
2424
2425 if (gimple_has_mem_ops (stmt))
2426 {
5006671f
RG
2427 gimple_set_vdef (copy, gimple_vdef (stmt));
2428 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2429 }
2430
5006671f
RG
2431 /* SSA operands need to be updated. */
2432 gimple_set_modified (copy, true);
726a989a
RB
2433 }
2434
2435 return copy;
2436}
2437
2438
2439/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2440 a MODIFIED field. */
2441
2442void
2443gimple_set_modified (gimple s, bool modifiedp)
2444{
2445 if (gimple_has_ops (s))
0f8c63cc 2446 s->gsbase.modified = (unsigned) modifiedp;
726a989a
RB
2447}
2448
2449
2450/* Return true if statement S has side-effects. We consider a
2451 statement to have side effects if:
2452
2453 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2454 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2455
2456bool
2457gimple_has_side_effects (const_gimple s)
2458{
b5b8b0ac
AO
2459 if (is_gimple_debug (s))
2460 return false;
2461
726a989a
RB
2462 /* We don't have to scan the arguments to check for
2463 volatile arguments, though, at present, we still
2464 do a scan to check for TREE_SIDE_EFFECTS. */
2465 if (gimple_has_volatile_ops (s))
2466 return true;
2467
179184e3
RG
2468 if (gimple_code (s) == GIMPLE_ASM
2469 && gimple_asm_volatile_p (s))
2470 return true;
2471
726a989a
RB
2472 if (is_gimple_call (s))
2473 {
723afc44 2474 int flags = gimple_call_flags (s);
726a989a 2475
723afc44
RG
2476 /* An infinite loop is considered a side effect. */
2477 if (!(flags & (ECF_CONST | ECF_PURE))
2478 || (flags & ECF_LOOPING_CONST_OR_PURE))
726a989a
RB
2479 return true;
2480
726a989a
RB
2481 return false;
2482 }
726a989a
RB
2483
2484 return false;
2485}
2486
726a989a 2487/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
e1fd038a
SP
2488 Return true if S can trap. When INCLUDE_MEM is true, check whether
2489 the memory operations could trap. When INCLUDE_STORES is true and
2490 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
726a989a 2491
e1fd038a
SP
2492bool
2493gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
726a989a 2494{
726a989a
RB
2495 tree t, div = NULL_TREE;
2496 enum tree_code op;
2497
e1fd038a
SP
2498 if (include_mem)
2499 {
2500 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
726a989a 2501
e1fd038a
SP
2502 for (i = start; i < gimple_num_ops (s); i++)
2503 if (tree_could_trap_p (gimple_op (s, i)))
2504 return true;
2505 }
726a989a
RB
2506
2507 switch (gimple_code (s))
2508 {
2509 case GIMPLE_ASM:
2510 return gimple_asm_volatile_p (s);
2511
2512 case GIMPLE_CALL:
2513 t = gimple_call_fndecl (s);
2514 /* Assume that calls to weak functions may trap. */
2515 if (!t || !DECL_P (t) || DECL_WEAK (t))
2516 return true;
2517 return false;
2518
2519 case GIMPLE_ASSIGN:
2520 t = gimple_expr_type (s);
2521 op = gimple_assign_rhs_code (s);
2522 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2523 div = gimple_assign_rhs2 (s);
2524 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2525 (INTEGRAL_TYPE_P (t)
2526 && TYPE_OVERFLOW_TRAPS (t)),
2527 div));
2528
2529 default:
2530 break;
2531 }
2532
2533 return false;
726a989a
RB
2534}
2535
726a989a
RB
2536/* Return true if statement S can trap. */
2537
2538bool
2539gimple_could_trap_p (gimple s)
2540{
e1fd038a 2541 return gimple_could_trap_p_1 (s, true, true);
726a989a
RB
2542}
2543
726a989a
RB
2544/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2545
2546bool
2547gimple_assign_rhs_could_trap_p (gimple s)
2548{
2549 gcc_assert (is_gimple_assign (s));
e1fd038a 2550 return gimple_could_trap_p_1 (s, true, false);
726a989a
RB
2551}
2552
2553
2554/* Print debugging information for gimple stmts generated. */
2555
2556void
2557dump_gimple_statistics (void)
2558{
2559#ifdef GATHER_STATISTICS
2560 int i, total_tuples = 0, total_bytes = 0;
2561
2562 fprintf (stderr, "\nGIMPLE statements\n");
2563 fprintf (stderr, "Kind Stmts Bytes\n");
2564 fprintf (stderr, "---------------------------------------\n");
2565 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2566 {
2567 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2568 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2569 total_tuples += gimple_alloc_counts[i];
2570 total_bytes += gimple_alloc_sizes[i];
2571 }
2572 fprintf (stderr, "---------------------------------------\n");
2573 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2574 fprintf (stderr, "---------------------------------------\n");
2575#else
2576 fprintf (stderr, "No gimple statistics\n");
2577#endif
2578}
2579
2580
726a989a
RB
2581/* Return the number of operands needed on the RHS of a GIMPLE
2582 assignment for an expression with tree code CODE. */
2583
2584unsigned
2585get_gimple_rhs_num_ops (enum tree_code code)
2586{
2587 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2588
2589 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2590 return 1;
2591 else if (rhs_class == GIMPLE_BINARY_RHS)
2592 return 2;
0354c0c7
BS
2593 else if (rhs_class == GIMPLE_TERNARY_RHS)
2594 return 3;
726a989a
RB
2595 else
2596 gcc_unreachable ();
2597}
2598
2599#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2600 (unsigned char) \
2601 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2602 : ((TYPE) == tcc_binary \
2603 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2604 : ((TYPE) == tcc_constant \
2605 || (TYPE) == tcc_declaration \
2606 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2607 : ((SYM) == TRUTH_AND_EXPR \
2608 || (SYM) == TRUTH_OR_EXPR \
2609 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2610 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
4e71066d
RG
2611 : ((SYM) == COND_EXPR \
2612 || (SYM) == WIDEN_MULT_PLUS_EXPR \
16949072 2613 || (SYM) == WIDEN_MULT_MINUS_EXPR \
f471fe72
RG
2614 || (SYM) == DOT_PROD_EXPR \
2615 || (SYM) == REALIGN_LOAD_EXPR \
4e71066d 2616 || (SYM) == VEC_COND_EXPR \
2205ed25 2617 || (SYM) == VEC_PERM_EXPR \
16949072 2618 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
4e71066d 2619 : ((SYM) == CONSTRUCTOR \
726a989a
RB
2620 || (SYM) == OBJ_TYPE_REF \
2621 || (SYM) == ASSERT_EXPR \
2622 || (SYM) == ADDR_EXPR \
2623 || (SYM) == WITH_SIZE_EXPR \
4e71066d 2624 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
726a989a
RB
2625 : GIMPLE_INVALID_RHS),
2626#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2627
2628const unsigned char gimple_rhs_class_table[] = {
2629#include "all-tree.def"
2630};
2631
2632#undef DEFTREECODE
2633#undef END_OF_BASE_TREE_CODES
2634
2635/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2636
2637/* Validation of GIMPLE expressions. */
2638
726a989a
RB
2639/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2640
2641bool
2642is_gimple_lvalue (tree t)
2643{
2644 return (is_gimple_addressable (t)
2645 || TREE_CODE (t) == WITH_SIZE_EXPR
2646 /* These are complex lvalues, but don't have addresses, so they
2647 go here. */
2648 || TREE_CODE (t) == BIT_FIELD_REF);
2649}
2650
2651/* Return true if T is a GIMPLE condition. */
2652
2653bool
2654is_gimple_condexpr (tree t)
2655{
2656 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
f9613c9a 2657 && !tree_could_throw_p (t)
726a989a
RB
2658 && is_gimple_val (TREE_OPERAND (t, 0))
2659 && is_gimple_val (TREE_OPERAND (t, 1))));
2660}
2661
2662/* Return true if T is something whose address can be taken. */
2663
2664bool
2665is_gimple_addressable (tree t)
2666{
70f34814
RG
2667 return (is_gimple_id (t) || handled_component_p (t)
2668 || TREE_CODE (t) == MEM_REF);
726a989a
RB
2669}
2670
2671/* Return true if T is a valid gimple constant. */
2672
2673bool
2674is_gimple_constant (const_tree t)
2675{
2676 switch (TREE_CODE (t))
2677 {
2678 case INTEGER_CST:
2679 case REAL_CST:
2680 case FIXED_CST:
2681 case STRING_CST:
2682 case COMPLEX_CST:
2683 case VECTOR_CST:
2684 return true;
2685
2686 /* Vector constant constructors are gimple invariant. */
2687 case CONSTRUCTOR:
2688 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2689 return TREE_CONSTANT (t);
2690 else
2691 return false;
2692
2693 default:
2694 return false;
2695 }
2696}
2697
2698/* Return true if T is a gimple address. */
2699
2700bool
2701is_gimple_address (const_tree t)
2702{
2703 tree op;
2704
2705 if (TREE_CODE (t) != ADDR_EXPR)
2706 return false;
2707
2708 op = TREE_OPERAND (t, 0);
2709 while (handled_component_p (op))
2710 {
2711 if ((TREE_CODE (op) == ARRAY_REF
2712 || TREE_CODE (op) == ARRAY_RANGE_REF)
2713 && !is_gimple_val (TREE_OPERAND (op, 1)))
2714 return false;
2715
2716 op = TREE_OPERAND (op, 0);
2717 }
2718
70f34814 2719 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
726a989a
RB
2720 return true;
2721
2722 switch (TREE_CODE (op))
2723 {
2724 case PARM_DECL:
2725 case RESULT_DECL:
2726 case LABEL_DECL:
2727 case FUNCTION_DECL:
2728 case VAR_DECL:
2729 case CONST_DECL:
2730 return true;
2731
2732 default:
2733 return false;
2734 }
2735}
2736
00fc2333
JH
2737/* Return true if T is a gimple invariant address. */
2738
2739bool
2740is_gimple_invariant_address (const_tree t)
2741{
2742 const_tree op;
2743
2744 if (TREE_CODE (t) != ADDR_EXPR)
2745 return false;
2746
2747 op = strip_invariant_refs (TREE_OPERAND (t, 0));
70f34814
RG
2748 if (!op)
2749 return false;
00fc2333 2750
70f34814
RG
2751 if (TREE_CODE (op) == MEM_REF)
2752 {
2753 const_tree op0 = TREE_OPERAND (op, 0);
2754 return (TREE_CODE (op0) == ADDR_EXPR
2755 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2756 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2757 }
2758
2759 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
00fc2333
JH
2760}
2761
2762/* Return true if T is a gimple invariant address at IPA level
2763 (so addresses of variables on stack are not allowed). */
2764
2765bool
2766is_gimple_ip_invariant_address (const_tree t)
2767{
2768 const_tree op;
2769
2770 if (TREE_CODE (t) != ADDR_EXPR)
2771 return false;
2772
2773 op = strip_invariant_refs (TREE_OPERAND (t, 0));
39cc8c3d
MJ
2774 if (!op)
2775 return false;
2776
2777 if (TREE_CODE (op) == MEM_REF)
2778 {
2779 const_tree op0 = TREE_OPERAND (op, 0);
2780 return (TREE_CODE (op0) == ADDR_EXPR
2781 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2782 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2783 }
00fc2333 2784
39cc8c3d 2785 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
726a989a
RB
2786}
2787
2788/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2789 form of function invariant. */
2790
2791bool
2792is_gimple_min_invariant (const_tree t)
2793{
2794 if (TREE_CODE (t) == ADDR_EXPR)
2795 return is_gimple_invariant_address (t);
2796
2797 return is_gimple_constant (t);
2798}
2799
00fc2333
JH
2800/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2801 form of gimple minimal invariant. */
2802
2803bool
2804is_gimple_ip_invariant (const_tree t)
2805{
2806 if (TREE_CODE (t) == ADDR_EXPR)
2807 return is_gimple_ip_invariant_address (t);
2808
2809 return is_gimple_constant (t);
2810}
2811
726a989a
RB
2812/* Return true if T is a variable. */
2813
2814bool
2815is_gimple_variable (tree t)
2816{
2817 return (TREE_CODE (t) == VAR_DECL
2818 || TREE_CODE (t) == PARM_DECL
2819 || TREE_CODE (t) == RESULT_DECL
2820 || TREE_CODE (t) == SSA_NAME);
2821}
2822
2823/* Return true if T is a GIMPLE identifier (something with an address). */
2824
2825bool
2826is_gimple_id (tree t)
2827{
2828 return (is_gimple_variable (t)
2829 || TREE_CODE (t) == FUNCTION_DECL
2830 || TREE_CODE (t) == LABEL_DECL
2831 || TREE_CODE (t) == CONST_DECL
2832 /* Allow string constants, since they are addressable. */
2833 || TREE_CODE (t) == STRING_CST);
2834}
2835
726a989a
RB
2836/* Return true if T is a non-aggregate register variable. */
2837
2838bool
2839is_gimple_reg (tree t)
2840{
2841 if (TREE_CODE (t) == SSA_NAME)
2842 t = SSA_NAME_VAR (t);
2843
726a989a
RB
2844 if (!is_gimple_variable (t))
2845 return false;
2846
2847 if (!is_gimple_reg_type (TREE_TYPE (t)))
2848 return false;
2849
2850 /* A volatile decl is not acceptable because we can't reuse it as
2851 needed. We need to copy it into a temp first. */
2852 if (TREE_THIS_VOLATILE (t))
2853 return false;
2854
2855 /* We define "registers" as things that can be renamed as needed,
2856 which with our infrastructure does not apply to memory. */
2857 if (needs_to_live_in_memory (t))
2858 return false;
2859
2860 /* Hard register variables are an interesting case. For those that
2861 are call-clobbered, we don't know where all the calls are, since
2862 we don't (want to) take into account which operations will turn
2863 into libcalls at the rtl level. For those that are call-saved,
2864 we don't currently model the fact that calls may in fact change
2865 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2866 level, and so miss variable changes that might imply. All around,
2867 it seems safest to not do too much optimization with these at the
2868 tree level at all. We'll have to rely on the rtl optimizers to
2869 clean this up, as there we've got all the appropriate bits exposed. */
2870 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2871 return false;
2872
4636b850
RG
2873 /* Complex and vector values must have been put into SSA-like form.
2874 That is, no assignments to the individual components. */
2875 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2876 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2877 return DECL_GIMPLE_REG_P (t);
2878
726a989a
RB
2879 return true;
2880}
2881
2882
726a989a
RB
2883/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2884
2885bool
2886is_gimple_val (tree t)
2887{
2888 /* Make loads from volatiles and memory vars explicit. */
2889 if (is_gimple_variable (t)
2890 && is_gimple_reg_type (TREE_TYPE (t))
2891 && !is_gimple_reg (t))
2892 return false;
2893
726a989a
RB
2894 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2895}
2896
2897/* Similarly, but accept hard registers as inputs to asm statements. */
2898
2899bool
2900is_gimple_asm_val (tree t)
2901{
2902 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2903 return true;
2904
2905 return is_gimple_val (t);
2906}
2907
2908/* Return true if T is a GIMPLE minimal lvalue. */
2909
2910bool
2911is_gimple_min_lval (tree t)
2912{
ba4d8f9d
RG
2913 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2914 return false;
70f34814 2915 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
726a989a
RB
2916}
2917
726a989a
RB
2918/* Return true if T is a valid function operand of a CALL_EXPR. */
2919
2920bool
2921is_gimple_call_addr (tree t)
2922{
2923 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2924}
2925
70f34814
RG
2926/* Return true if T is a valid address operand of a MEM_REF. */
2927
2928bool
2929is_gimple_mem_ref_addr (tree t)
2930{
2931 return (is_gimple_reg (t)
2932 || TREE_CODE (t) == INTEGER_CST
2933 || (TREE_CODE (t) == ADDR_EXPR
2934 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2935 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2936}
2937
726a989a
RB
2938
2939/* Given a memory reference expression T, return its base address.
2940 The base address of a memory reference expression is the main
2941 object being referenced. For instance, the base address for
2942 'array[i].fld[j]' is 'array'. You can think of this as stripping
2943 away the offset part from a memory address.
2944
2945 This function calls handled_component_p to strip away all the inner
2946 parts of the memory reference until it reaches the base object. */
2947
2948tree
2949get_base_address (tree t)
2950{
2951 while (handled_component_p (t))
2952 t = TREE_OPERAND (t, 0);
b8698a0f 2953
4d948885
RG
2954 if ((TREE_CODE (t) == MEM_REF
2955 || TREE_CODE (t) == TARGET_MEM_REF)
70f34814
RG
2956 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2957 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2958
b3b9f3d0
JH
2959 if (TREE_CODE (t) == SSA_NAME
2960 || DECL_P (t)
726a989a
RB
2961 || TREE_CODE (t) == STRING_CST
2962 || TREE_CODE (t) == CONSTRUCTOR
70f34814 2963 || INDIRECT_REF_P (t)
4d948885
RG
2964 || TREE_CODE (t) == MEM_REF
2965 || TREE_CODE (t) == TARGET_MEM_REF)
726a989a
RB
2966 return t;
2967 else
2968 return NULL_TREE;
2969}
2970
2971void
2972recalculate_side_effects (tree t)
2973{
2974 enum tree_code code = TREE_CODE (t);
2975 int len = TREE_OPERAND_LENGTH (t);
2976 int i;
2977
2978 switch (TREE_CODE_CLASS (code))
2979 {
2980 case tcc_expression:
2981 switch (code)
2982 {
2983 case INIT_EXPR:
2984 case MODIFY_EXPR:
2985 case VA_ARG_EXPR:
2986 case PREDECREMENT_EXPR:
2987 case PREINCREMENT_EXPR:
2988 case POSTDECREMENT_EXPR:
2989 case POSTINCREMENT_EXPR:
2990 /* All of these have side-effects, no matter what their
2991 operands are. */
2992 return;
2993
2994 default:
2995 break;
2996 }
2997 /* Fall through. */
2998
2999 case tcc_comparison: /* a comparison expression */
3000 case tcc_unary: /* a unary arithmetic expression */
3001 case tcc_binary: /* a binary arithmetic expression */
3002 case tcc_reference: /* a reference */
3003 case tcc_vl_exp: /* a function call */
3004 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3005 for (i = 0; i < len; ++i)
3006 {
3007 tree op = TREE_OPERAND (t, i);
3008 if (op && TREE_SIDE_EFFECTS (op))
3009 TREE_SIDE_EFFECTS (t) = 1;
3010 }
3011 break;
3012
13f95bdb
EB
3013 case tcc_constant:
3014 /* No side-effects. */
3015 return;
3016
726a989a 3017 default:
726a989a
RB
3018 gcc_unreachable ();
3019 }
3020}
3021
3022/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3023 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3024 we failed to create one. */
3025
3026tree
3027canonicalize_cond_expr_cond (tree t)
3028{
b66a1bac
RG
3029 /* Strip conversions around boolean operations. */
3030 if (CONVERT_EXPR_P (t)
9b80d091
KT
3031 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3032 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3033 == BOOLEAN_TYPE))
b66a1bac
RG
3034 t = TREE_OPERAND (t, 0);
3035
726a989a 3036 /* For !x use x == 0. */
12430896 3037 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
726a989a
RB
3038 {
3039 tree top0 = TREE_OPERAND (t, 0);
3040 t = build2 (EQ_EXPR, TREE_TYPE (t),
3041 top0, build_int_cst (TREE_TYPE (top0), 0));
3042 }
3043 /* For cmp ? 1 : 0 use cmp. */
3044 else if (TREE_CODE (t) == COND_EXPR
3045 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3046 && integer_onep (TREE_OPERAND (t, 1))
3047 && integer_zerop (TREE_OPERAND (t, 2)))
3048 {
3049 tree top0 = TREE_OPERAND (t, 0);
3050 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3051 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3052 }
3053
3054 if (is_gimple_condexpr (t))
3055 return t;
3056
3057 return NULL_TREE;
3058}
3059
e6c99067
DN
3060/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3061 the positions marked by the set ARGS_TO_SKIP. */
3062
c6f7cfc1 3063gimple
5c0466b5 3064gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3065{
3066 int i;
c6f7cfc1
JH
3067 int nargs = gimple_call_num_args (stmt);
3068 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3069 gimple new_stmt;
3070
3071 for (i = 0; i < nargs; i++)
3072 if (!bitmap_bit_p (args_to_skip, i))
3073 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3074
25583c4f
RS
3075 if (gimple_call_internal_p (stmt))
3076 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3077 vargs);
3078 else
3079 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
c6f7cfc1
JH
3080 VEC_free (tree, heap, vargs);
3081 if (gimple_call_lhs (stmt))
3082 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3083
5006671f
RG
3084 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3085 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3086
c6f7cfc1
JH
3087 gimple_set_block (new_stmt, gimple_block (stmt));
3088 if (gimple_has_location (stmt))
3089 gimple_set_location (new_stmt, gimple_location (stmt));
8d2adc24 3090 gimple_call_copy_flags (new_stmt, stmt);
c6f7cfc1 3091 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
5006671f
RG
3092
3093 gimple_set_modified (new_stmt, true);
3094
c6f7cfc1
JH
3095 return new_stmt;
3096}
3097
5006671f 3098
0ca8de87
RG
3099enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3100
b5e04de5 3101static hashval_t gimple_type_hash (const void *);
d7f09764
DN
3102
3103/* Structure used to maintain a cache of some type pairs compared by
3104 gimple_types_compatible_p when comparing aggregate types. There are
c4fcd06a 3105 three possible values for SAME_P:
d7f09764
DN
3106
3107 -2: The pair (T1, T2) has just been inserted in the table.
d7f09764
DN
3108 0: T1 and T2 are different types.
3109 1: T1 and T2 are the same type.
3110
c4fcd06a
RG
3111 The two elements in the SAME_P array are indexed by the comparison
3112 mode gtc_mode. */
3113
d7f09764
DN
3114struct type_pair_d
3115{
88ca1146
RG
3116 unsigned int uid1;
3117 unsigned int uid2;
c4fcd06a 3118 signed char same_p[2];
d7f09764
DN
3119};
3120typedef struct type_pair_d *type_pair_t;
d4398a43
RG
3121DEF_VEC_P(type_pair_t);
3122DEF_VEC_ALLOC_P(type_pair_t,heap);
3123
a30726a4
JH
3124#define GIMPLE_TYPE_PAIR_SIZE 16381
3125struct type_pair_d *type_pair_cache;
d7f09764 3126
d7f09764
DN
3127
3128/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3129 entry if none existed. */
3130
a30726a4
JH
3131static inline type_pair_t
3132lookup_type_pair (tree t1, tree t2)
d7f09764 3133{
a30726a4
JH
3134 unsigned int index;
3135 unsigned int uid1, uid2;
d7f09764 3136
a30726a4
JH
3137 if (type_pair_cache == NULL)
3138 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
d7f09764 3139
4fc07af9
JH
3140 if (TYPE_UID (t1) < TYPE_UID (t2))
3141 {
a30726a4
JH
3142 uid1 = TYPE_UID (t1);
3143 uid2 = TYPE_UID (t2);
4fc07af9
JH
3144 }
3145 else
3146 {
a30726a4
JH
3147 uid1 = TYPE_UID (t2);
3148 uid2 = TYPE_UID (t1);
4fc07af9 3149 }
a30726a4 3150 gcc_checking_assert (uid1 != uid2);
d7f09764 3151
a30726a4
JH
3152 /* iterative_hash_hashval_t imply an function calls.
3153 We know that UIDS are in limited range. */
3154 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3155 % GIMPLE_TYPE_PAIR_SIZE);
3156 if (type_pair_cache [index].uid1 == uid1
3157 && type_pair_cache [index].uid2 == uid2)
3158 return &type_pair_cache[index];
d7f09764 3159
a30726a4
JH
3160 type_pair_cache [index].uid1 = uid1;
3161 type_pair_cache [index].uid2 = uid2;
3162 type_pair_cache [index].same_p[0] = -2;
3163 type_pair_cache [index].same_p[1] = -2;
3164
3165 return &type_pair_cache[index];
d7f09764
DN
3166}
3167
d4398a43
RG
3168/* Per pointer state for the SCC finding. The on_sccstack flag
3169 is not strictly required, it is true when there is no hash value
3170 recorded for the type and false otherwise. But querying that
3171 is slower. */
3172
3173struct sccs
3174{
3175 unsigned int dfsnum;
3176 unsigned int low;
3177 bool on_sccstack;
3178 union {
3179 hashval_t hash;
c4fcd06a 3180 signed char same_p;
d4398a43
RG
3181 } u;
3182};
3183
3184static unsigned int next_dfs_num;
3185static unsigned int gtc_next_dfs_num;
d7f09764 3186
4490cae6
RG
3187
3188/* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3189
3190typedef struct GTY(()) gimple_type_leader_entry_s {
3191 tree type;
3192 tree leader;
3193} gimple_type_leader_entry;
3194
3195#define GIMPLE_TYPE_LEADER_SIZE 16381
e89964e3
MM
3196static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3197 gimple_type_leader_entry *gimple_type_leader;
4490cae6
RG
3198
3199/* Lookup an existing leader for T and return it or NULL_TREE, if
3200 there is none in the cache. */
3201
4fc07af9 3202static inline tree
4490cae6
RG
3203gimple_lookup_type_leader (tree t)
3204{
3205 gimple_type_leader_entry *leader;
3206
3207 if (!gimple_type_leader)
3208 return NULL_TREE;
3209
3210 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3211 if (leader->type != t)
3212 return NULL_TREE;
3213
3214 return leader->leader;
3215}
3216
77785f4f
RG
3217/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3218 true then if any type has no name return false, otherwise return
3219 true if both types have no names. */
d7f09764
DN
3220
3221static bool
6a20ce76 3222compare_type_names_p (tree t1, tree t2)
d7f09764
DN
3223{
3224 tree name1 = TYPE_NAME (t1);
3225 tree name2 = TYPE_NAME (t2);
3226
ee7a54c5
RG
3227 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
3228 return false;
3229
3230 if (name1 == NULL_TREE)
3231 return true;
3232
3233 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
3234 if (TREE_CODE (name1) != TREE_CODE (name2))
3235 return false;
3236
3237 if (TREE_CODE (name1) == TYPE_DECL)
6a20ce76
RG
3238 name1 = DECL_NAME (name1);
3239 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
d7f09764 3240
ee7a54c5 3241 if (TREE_CODE (name2) == TYPE_DECL)
6a20ce76
RG
3242 name2 = DECL_NAME (name2);
3243 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
d7f09764
DN
3244
3245 /* Identifiers can be compared with pointer equality rather
3246 than a string comparison. */
3247 if (name1 == name2)
3248 return true;
3249
3250 return false;
3251}
3252
d025732d
EB
3253/* Return true if the field decls F1 and F2 are at the same offset.
3254
91f2fae8 3255 This is intended to be used on GIMPLE types only. */
d7f09764 3256
1e4bc4eb 3257bool
d025732d 3258gimple_compare_field_offset (tree f1, tree f2)
d7f09764
DN
3259{
3260 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
d025732d
EB
3261 {
3262 tree offset1 = DECL_FIELD_OFFSET (f1);
3263 tree offset2 = DECL_FIELD_OFFSET (f2);
3264 return ((offset1 == offset2
3265 /* Once gimplification is done, self-referential offsets are
3266 instantiated as operand #2 of the COMPONENT_REF built for
3267 each access and reset. Therefore, they are not relevant
3268 anymore and fields are interchangeable provided that they
3269 represent the same access. */
3270 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3271 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3272 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3273 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3274 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3275 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3276 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3277 || operand_equal_p (offset1, offset2, 0))
3278 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3279 DECL_FIELD_BIT_OFFSET (f2)));
3280 }
d7f09764
DN
3281
3282 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3283 should be, so handle differing ones specially by decomposing
3284 the offset into a byte and bit offset manually. */
3285 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3286 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3287 {
3288 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3289 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3290 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3291 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3292 + bit_offset1 / BITS_PER_UNIT);
3293 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3294 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3295 + bit_offset2 / BITS_PER_UNIT);
3296 if (byte_offset1 != byte_offset2)
3297 return false;
3298 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3299 }
3300
3301 return false;
3302}
3303
d4398a43 3304static bool
b5e04de5 3305gimple_types_compatible_p_1 (tree, tree, type_pair_t,
c4fcd06a 3306 VEC(type_pair_t, heap) **,
d4398a43 3307 struct pointer_map_t *, struct obstack *);
d7f09764 3308
d4398a43
RG
3309/* DFS visit the edge from the callers type pair with state *STATE to
3310 the pair T1, T2 while operating in FOR_MERGING_P mode.
3311 Update the merging status if it is not part of the SCC containing the
3312 callers pair and return it.
3313 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3314
3315static bool
b5e04de5 3316gtc_visit (tree t1, tree t2,
d4398a43
RG
3317 struct sccs *state,
3318 VEC(type_pair_t, heap) **sccstack,
3319 struct pointer_map_t *sccstate,
3320 struct obstack *sccstate_obstack)
d7f09764 3321{
d4398a43
RG
3322 struct sccs *cstate = NULL;
3323 type_pair_t p;
3324 void **slot;
b5e04de5 3325 tree leader1, leader2;
d7f09764
DN
3326
3327 /* Check first for the obvious case of pointer identity. */
3328 if (t1 == t2)
d4398a43 3329 return true;
d7f09764
DN
3330
3331 /* Check that we have two types to compare. */
3332 if (t1 == NULL_TREE || t2 == NULL_TREE)
d4398a43 3333 return false;
d7f09764
DN
3334
3335 /* Can't be the same type if the types don't have the same code. */
3336 if (TREE_CODE (t1) != TREE_CODE (t2))
d4398a43 3337 return false;
b0cc341f
RG
3338
3339 /* Can't be the same type if they have different CV qualifiers. */
3340 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
d4398a43 3341 return false;
d7f09764 3342
61332f77
RG
3343 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3344 return false;
3345
3346 /* Void types and nullptr types are always the same. */
3347 if (TREE_CODE (t1) == VOID_TYPE
3348 || TREE_CODE (t1) == NULLPTR_TYPE)
d4398a43 3349 return true;
d7f09764 3350
61332f77
RG
3351 /* Can't be the same type if they have different alignment or mode. */
3352 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3353 || TYPE_MODE (t1) != TYPE_MODE (t2))
3354 return false;
3355
c9549072 3356 /* Do some simple checks before doing three hashtable queries. */
b0cc341f
RG
3357 if (INTEGRAL_TYPE_P (t1)
3358 || SCALAR_FLOAT_TYPE_P (t1)
3359 || FIXED_POINT_TYPE_P (t1)
3360 || TREE_CODE (t1) == VECTOR_TYPE
b23dc2c0 3361 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
3362 || TREE_CODE (t1) == OFFSET_TYPE
3363 || POINTER_TYPE_P (t1))
b0cc341f 3364 {
61332f77
RG
3365 /* Can't be the same type if they have different sign or precision. */
3366 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
b0cc341f 3367 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
d4398a43 3368 return false;
b0cc341f
RG
3369
3370 if (TREE_CODE (t1) == INTEGER_TYPE
3371 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3372 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
d4398a43 3373 return false;
b0cc341f
RG
3374
3375 /* That's all we need to check for float and fixed-point types. */
3376 if (SCALAR_FLOAT_TYPE_P (t1)
3377 || FIXED_POINT_TYPE_P (t1))
d4398a43 3378 return true;
b0cc341f 3379
61332f77 3380 /* For other types fall thru to more complex checks. */
b0cc341f 3381 }
d7f09764 3382
61332f77
RG
3383 /* If the types have been previously registered and found equal
3384 they still are. */
3385 leader1 = gimple_lookup_type_leader (t1);
3386 leader2 = gimple_lookup_type_leader (t2);
3387 if (leader1 == t2
3388 || t1 == leader2
3389 || (leader1 && leader1 == leader2))
3390 return true;
c9549072 3391
d7f09764
DN
3392 /* If the hash values of t1 and t2 are different the types can't
3393 possibly be the same. This helps keeping the type-pair hashtable
3394 small, only tracking comparisons for hash collisions. */
b5e04de5 3395 if (gimple_type_hash (t1) != gimple_type_hash (t2))
d4398a43 3396 return false;
d7f09764 3397
d4398a43 3398 /* Allocate a new cache entry for this comparison. */
a30726a4 3399 p = lookup_type_pair (t1, t2);
b5e04de5 3400 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
d7f09764
DN
3401 {
3402 /* We have already decided whether T1 and T2 are the
3403 same, return the cached result. */
b5e04de5 3404 return p->same_p[GTC_MERGE] == 1;
d7f09764 3405 }
d4398a43 3406
d4398a43
RG
3407 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3408 cstate = (struct sccs *)*slot;
67701d1d 3409 /* Not yet visited. DFS recurse. */
d4398a43 3410 if (!cstate)
d7f09764 3411 {
b5e04de5 3412 gimple_types_compatible_p_1 (t1, t2, p,
67701d1d
RG
3413 sccstack, sccstate, sccstate_obstack);
3414 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
d4398a43 3415 state->low = MIN (state->low, cstate->low);
d7f09764 3416 }
67701d1d 3417 /* If the type is still on the SCC stack adjust the parents low. */
d4398a43
RG
3418 if (cstate->dfsnum < state->dfsnum
3419 && cstate->on_sccstack)
3420 state->low = MIN (cstate->dfsnum, state->low);
d7f09764 3421
67701d1d
RG
3422 /* Return the current lattice value. We start with an equality
3423 assumption so types part of a SCC will be optimistically
3424 treated equal unless proven otherwise. */
3425 return cstate->u.same_p;
d4398a43
RG
3426}
3427
3428/* Worker for gimple_types_compatible.
3429 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3430
3431static bool
b5e04de5 3432gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
d4398a43
RG
3433 VEC(type_pair_t, heap) **sccstack,
3434 struct pointer_map_t *sccstate,
3435 struct obstack *sccstate_obstack)
3436{
d4398a43
RG
3437 struct sccs *state;
3438
b5e04de5 3439 gcc_assert (p->same_p[GTC_MERGE] == -2);
d7f09764 3440
d4398a43
RG
3441 state = XOBNEW (sccstate_obstack, struct sccs);
3442 *pointer_map_insert (sccstate, p) = state;
3443
3444 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3445 state->dfsnum = gtc_next_dfs_num++;
3446 state->low = state->dfsnum;
3447 state->on_sccstack = true;
67701d1d
RG
3448 /* Start with an equality assumption. As we DFS recurse into child
3449 SCCs this assumption may get revisited. */
3450 state->u.same_p = 1;
d7f09764 3451
e1caba18 3452 /* The struct tags shall compare equal. */
6a20ce76 3453 if (!compare_type_names_p (t1, t2))
e1caba18
RG
3454 goto different_types;
3455
ee7a54c5
RG
3456 /* We may not merge typedef types to the same type in different
3457 contexts. */
3458 if (TYPE_NAME (t1)
3459 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
3460 && DECL_CONTEXT (TYPE_NAME (t1))
3461 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
3462 {
3463 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
3464 DECL_CONTEXT (TYPE_NAME (t2)),
3465 state, sccstack, sccstate, sccstate_obstack))
3466 goto different_types;
3467 }
3468
d7f09764
DN
3469 /* If their attributes are not the same they can't be the same type. */
3470 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3471 goto different_types;
3472
d7f09764
DN
3473 /* Do type-specific comparisons. */
3474 switch (TREE_CODE (t1))
3475 {
d4398a43
RG
3476 case VECTOR_TYPE:
3477 case COMPLEX_TYPE:
b5e04de5 3478 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43
RG
3479 state, sccstack, sccstate, sccstate_obstack))
3480 goto different_types;
3481 goto same_types;
3482
d7f09764
DN
3483 case ARRAY_TYPE:
3484 /* Array types are the same if the element types are the same and
3485 the number of elements are the same. */
b5e04de5 3486 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43 3487 state, sccstack, sccstate, sccstate_obstack)
b0cc341f
RG
3488 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3489 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
d7f09764
DN
3490 goto different_types;
3491 else
3492 {
3493 tree i1 = TYPE_DOMAIN (t1);
3494 tree i2 = TYPE_DOMAIN (t2);
3495
3496 /* For an incomplete external array, the type domain can be
3497 NULL_TREE. Check this condition also. */
3498 if (i1 == NULL_TREE && i2 == NULL_TREE)
3499 goto same_types;
3500 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3501 goto different_types;
3502 /* If for a complete array type the possibly gimplified sizes
3503 are different the types are different. */
3504 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3505 || (TYPE_SIZE (i1)
3506 && TYPE_SIZE (i2)
3507 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3508 goto different_types;
3509 else
3510 {
3511 tree min1 = TYPE_MIN_VALUE (i1);
3512 tree min2 = TYPE_MIN_VALUE (i2);
3513 tree max1 = TYPE_MAX_VALUE (i1);
3514 tree max2 = TYPE_MAX_VALUE (i2);
3515
3516 /* The minimum/maximum values have to be the same. */
3517 if ((min1 == min2
f56000ed
EB
3518 || (min1 && min2
3519 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3520 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3521 || operand_equal_p (min1, min2, 0))))
d7f09764 3522 && (max1 == max2
f56000ed
EB
3523 || (max1 && max2
3524 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3525 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3526 || operand_equal_p (max1, max2, 0)))))
d7f09764
DN
3527 goto same_types;
3528 else
3529 goto different_types;
3530 }
3531 }
3532
3533 case METHOD_TYPE:
3534 /* Method types should belong to the same class. */
d4398a43 3535 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
b5e04de5 3536 state, sccstack, sccstate, sccstate_obstack))
d7f09764
DN
3537 goto different_types;
3538
3539 /* Fallthru */
3540
3541 case FUNCTION_TYPE:
3542 /* Function types are the same if the return type and arguments types
3543 are the same. */
b5e04de5
RG
3544 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3545 state, sccstack, sccstate, sccstate_obstack))
bcee752e
RG
3546 goto different_types;
3547
ac9a30ae 3548 if (!comp_type_attributes (t1, t2))
d7f09764 3549 goto different_types;
bcee752e
RG
3550
3551 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3552 goto same_types;
d7f09764
DN
3553 else
3554 {
bcee752e 3555 tree parms1, parms2;
d7f09764 3556
bcee752e
RG
3557 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3558 parms1 && parms2;
3559 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
d7f09764 3560 {
b5e04de5
RG
3561 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3562 state, sccstack, sccstate, sccstate_obstack))
d7f09764 3563 goto different_types;
d7f09764 3564 }
bcee752e
RG
3565
3566 if (parms1 || parms2)
3567 goto different_types;
3568
3569 goto same_types;
d7f09764
DN
3570 }
3571
b23dc2c0
RG
3572 case OFFSET_TYPE:
3573 {
b5e04de5 3574 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43
RG
3575 state, sccstack, sccstate, sccstate_obstack)
3576 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
b5e04de5 3577 TYPE_OFFSET_BASETYPE (t2),
d4398a43 3578 state, sccstack, sccstate, sccstate_obstack))
b23dc2c0
RG
3579 goto different_types;
3580
3581 goto same_types;
3582 }
3583
d7f09764
DN
3584 case POINTER_TYPE:
3585 case REFERENCE_TYPE:
e575382e
RG
3586 {
3587 /* If the two pointers have different ref-all attributes,
3588 they can't be the same type. */
3589 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3590 goto different_types;
d7f09764 3591
e575382e
RG
3592 /* Otherwise, pointer and reference types are the same if the
3593 pointed-to types are the same. */
b5e04de5 3594 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43 3595 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3596 goto same_types;
3597
3598 goto different_types;
3599 }
d7f09764 3600
b0cc341f
RG
3601 case INTEGER_TYPE:
3602 case BOOLEAN_TYPE:
3603 {
3604 tree min1 = TYPE_MIN_VALUE (t1);
3605 tree max1 = TYPE_MAX_VALUE (t1);
3606 tree min2 = TYPE_MIN_VALUE (t2);
3607 tree max2 = TYPE_MAX_VALUE (t2);
3608 bool min_equal_p = false;
3609 bool max_equal_p = false;
3610
3611 /* If either type has a minimum value, the other type must
3612 have the same. */
3613 if (min1 == NULL_TREE && min2 == NULL_TREE)
3614 min_equal_p = true;
3615 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3616 min_equal_p = true;
3617
3618 /* Likewise, if either type has a maximum value, the other
3619 type must have the same. */
3620 if (max1 == NULL_TREE && max2 == NULL_TREE)
3621 max_equal_p = true;
3622 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3623 max_equal_p = true;
3624
3625 if (!min_equal_p || !max_equal_p)
3626 goto different_types;
3627
3628 goto same_types;
3629 }
3630
d7f09764 3631 case ENUMERAL_TYPE:
e575382e 3632 {
b0cc341f
RG
3633 /* FIXME lto, we cannot check bounds on enumeral types because
3634 different front ends will produce different values.
3635 In C, enumeral types are integers, while in C++ each element
3636 will have its own symbolic value. We should decide how enums
3637 are to be represented in GIMPLE and have each front end lower
3638 to that. */
e575382e 3639 tree v1, v2;
d7f09764 3640
b0cc341f 3641 /* For enumeral types, all the values must be the same. */
e575382e
RG
3642 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3643 goto same_types;
d7f09764 3644
e575382e
RG
3645 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3646 v1 && v2;
3647 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3648 {
3649 tree c1 = TREE_VALUE (v1);
3650 tree c2 = TREE_VALUE (v2);
d7f09764 3651
e575382e
RG
3652 if (TREE_CODE (c1) == CONST_DECL)
3653 c1 = DECL_INITIAL (c1);
d7f09764 3654
e575382e
RG
3655 if (TREE_CODE (c2) == CONST_DECL)
3656 c2 = DECL_INITIAL (c2);
d7f09764 3657
e575382e
RG
3658 if (tree_int_cst_equal (c1, c2) != 1)
3659 goto different_types;
abe36b81 3660
b5e04de5 3661 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
abe36b81 3662 goto different_types;
e575382e 3663 }
d7f09764 3664
e575382e
RG
3665 /* If one enumeration has more values than the other, they
3666 are not the same. */
3667 if (v1 || v2)
3668 goto different_types;
d7f09764 3669
e575382e
RG
3670 goto same_types;
3671 }
d7f09764
DN
3672
3673 case RECORD_TYPE:
3674 case UNION_TYPE:
3675 case QUAL_UNION_TYPE:
e575382e
RG
3676 {
3677 tree f1, f2;
d7f09764 3678
e575382e
RG
3679 /* For aggregate types, all the fields must be the same. */
3680 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3681 f1 && f2;
3682 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3683 {
17e99cdb
RG
3684 /* Different field kinds are not compatible. */
3685 if (TREE_CODE (f1) != TREE_CODE (f2))
3686 goto different_types;
3687 /* Field decls must have the same name and offset. */
3688 if (TREE_CODE (f1) == FIELD_DECL
3689 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3690 || !gimple_compare_field_offset (f1, f2)))
3691 goto different_types;
3692 /* All entities should have the same name and type. */
b5e04de5 3693 if (DECL_NAME (f1) != DECL_NAME (f2)
b5e04de5 3694 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
d4398a43 3695 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3696 goto different_types;
3697 }
d7f09764 3698
e575382e
RG
3699 /* If one aggregate has more fields than the other, they
3700 are not the same. */
3701 if (f1 || f2)
3702 goto different_types;
d7f09764 3703
e575382e
RG
3704 goto same_types;
3705 }
d7f09764 3706
d7f09764 3707 default:
b0cc341f 3708 gcc_unreachable ();
d7f09764
DN
3709 }
3710
3711 /* Common exit path for types that are not compatible. */
3712different_types:
d4398a43
RG
3713 state->u.same_p = 0;
3714 goto pop;
d7f09764
DN
3715
3716 /* Common exit path for types that are compatible. */
3717same_types:
67701d1d 3718 gcc_assert (state->u.same_p == 1);
d7f09764 3719
d4398a43
RG
3720pop:
3721 if (state->low == state->dfsnum)
3722 {
3723 type_pair_t x;
d7f09764 3724
67701d1d
RG
3725 /* Pop off the SCC and set its cache values to the final
3726 comparison result. */
d4398a43
RG
3727 do
3728 {
3729 struct sccs *cstate;
3730 x = VEC_pop (type_pair_t, *sccstack);
3731 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3732 cstate->on_sccstack = false;
b5e04de5 3733 x->same_p[GTC_MERGE] = state->u.same_p;
d4398a43
RG
3734 }
3735 while (x != p);
3736 }
d7f09764 3737
d4398a43
RG
3738 return state->u.same_p;
3739}
d7f09764 3740
d4398a43
RG
3741/* Return true iff T1 and T2 are structurally identical. When
3742 FOR_MERGING_P is true the an incomplete type and a complete type
3743 are considered different, otherwise they are considered compatible. */
d7f09764 3744
0ca8de87 3745static bool
b5e04de5 3746gimple_types_compatible_p (tree t1, tree t2)
d7f09764 3747{
d4398a43
RG
3748 VEC(type_pair_t, heap) *sccstack = NULL;
3749 struct pointer_map_t *sccstate;
3750 struct obstack sccstate_obstack;
3751 type_pair_t p = NULL;
3752 bool res;
b5e04de5 3753 tree leader1, leader2;
d4398a43
RG
3754
3755 /* Before starting to set up the SCC machinery handle simple cases. */
3756
3757 /* Check first for the obvious case of pointer identity. */
3758 if (t1 == t2)
3759 return true;
3760
3761 /* Check that we have two types to compare. */
3762 if (t1 == NULL_TREE || t2 == NULL_TREE)
3763 return false;
3764
d4398a43
RG
3765 /* Can't be the same type if the types don't have the same code. */
3766 if (TREE_CODE (t1) != TREE_CODE (t2))
3767 return false;
3768
3769 /* Can't be the same type if they have different CV qualifiers. */
3770 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3771 return false;
3772
61332f77
RG
3773 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3774 return false;
3775
3776 /* Void types and nullptr types are always the same. */
3777 if (TREE_CODE (t1) == VOID_TYPE
3778 || TREE_CODE (t1) == NULLPTR_TYPE)
d4398a43
RG
3779 return true;
3780
61332f77
RG
3781 /* Can't be the same type if they have different alignment or mode. */
3782 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3783 || TYPE_MODE (t1) != TYPE_MODE (t2))
3784 return false;
3785
d4398a43
RG
3786 /* Do some simple checks before doing three hashtable queries. */
3787 if (INTEGRAL_TYPE_P (t1)
3788 || SCALAR_FLOAT_TYPE_P (t1)
3789 || FIXED_POINT_TYPE_P (t1)
3790 || TREE_CODE (t1) == VECTOR_TYPE
3791 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
3792 || TREE_CODE (t1) == OFFSET_TYPE
3793 || POINTER_TYPE_P (t1))
d4398a43 3794 {
61332f77
RG
3795 /* Can't be the same type if they have different sign or precision. */
3796 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
d4398a43
RG
3797 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3798 return false;
3799
3800 if (TREE_CODE (t1) == INTEGER_TYPE
3801 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3802 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3803 return false;
3804
3805 /* That's all we need to check for float and fixed-point types. */
3806 if (SCALAR_FLOAT_TYPE_P (t1)
3807 || FIXED_POINT_TYPE_P (t1))
3808 return true;
3809
61332f77 3810 /* For other types fall thru to more complex checks. */
d4398a43
RG
3811 }
3812
61332f77
RG
3813 /* If the types have been previously registered and found equal
3814 they still are. */
3815 leader1 = gimple_lookup_type_leader (t1);
3816 leader2 = gimple_lookup_type_leader (t2);
3817 if (leader1 == t2
3818 || t1 == leader2
3819 || (leader1 && leader1 == leader2))
3820 return true;
d4398a43
RG
3821
3822 /* If the hash values of t1 and t2 are different the types can't
3823 possibly be the same. This helps keeping the type-pair hashtable
3824 small, only tracking comparisons for hash collisions. */
b5e04de5 3825 if (gimple_type_hash (t1) != gimple_type_hash (t2))
d4398a43
RG
3826 return false;
3827
3828 /* If we've visited this type pair before (in the case of aggregates
3829 with self-referential types), and we made a decision, return it. */
a30726a4 3830 p = lookup_type_pair (t1, t2);
b5e04de5 3831 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
d4398a43
RG
3832 {
3833 /* We have already decided whether T1 and T2 are the
3834 same, return the cached result. */
b5e04de5 3835 return p->same_p[GTC_MERGE] == 1;
d4398a43
RG
3836 }
3837
3838 /* Now set up the SCC machinery for the comparison. */
3839 gtc_next_dfs_num = 1;
3840 sccstate = pointer_map_create ();
3841 gcc_obstack_init (&sccstate_obstack);
b5e04de5 3842 res = gimple_types_compatible_p_1 (t1, t2, p,
d4398a43
RG
3843 &sccstack, sccstate, &sccstate_obstack);
3844 VEC_free (type_pair_t, heap, sccstack);
3845 pointer_map_destroy (sccstate);
3846 obstack_free (&sccstate_obstack, NULL);
3847
3848 return res;
3849}
d7f09764 3850
d7f09764
DN
3851
3852static hashval_t
3853iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
b5e04de5 3854 struct pointer_map_t *, struct obstack *);
d7f09764
DN
3855
3856/* DFS visit the edge from the callers type with state *STATE to T.
3857 Update the callers type hash V with the hash for T if it is not part
3858 of the SCC containing the callers type and return it.
3859 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3860
3861static hashval_t
3862visit (tree t, struct sccs *state, hashval_t v,
3863 VEC (tree, heap) **sccstack,
3864 struct pointer_map_t *sccstate,
b5e04de5 3865 struct obstack *sccstate_obstack)
d7f09764
DN
3866{
3867 struct sccs *cstate = NULL;
0f443ad0 3868 struct tree_int_map m;
d7f09764
DN
3869 void **slot;
3870
3871 /* If there is a hash value recorded for this type then it can't
3872 possibly be part of our parent SCC. Simply mix in its hash. */
0f443ad0 3873 m.base.from = t;
b5e04de5 3874 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
0f443ad0
RG
3875 && *slot)
3876 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
d7f09764
DN
3877
3878 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3879 cstate = (struct sccs *)*slot;
3880 if (!cstate)
3881 {
3882 hashval_t tem;
3883 /* Not yet visited. DFS recurse. */
3884 tem = iterative_hash_gimple_type (t, v,
b5e04de5 3885 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
3886 if (!cstate)
3887 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3888 state->low = MIN (state->low, cstate->low);
3889 /* If the type is no longer on the SCC stack and thus is not part
3890 of the parents SCC mix in its hash value. Otherwise we will
3891 ignore the type for hashing purposes and return the unaltered
3892 hash value. */
3893 if (!cstate->on_sccstack)
3894 return tem;
3895 }
3896 if (cstate->dfsnum < state->dfsnum
3897 && cstate->on_sccstack)
3898 state->low = MIN (cstate->dfsnum, state->low);
3899
3900 /* We are part of our parents SCC, skip this type during hashing
3901 and return the unaltered hash value. */
3902 return v;
3903}
3904
77785f4f 3905/* Hash NAME with the previous hash value V and return it. */
d7f09764
DN
3906
3907static hashval_t
77785f4f 3908iterative_hash_name (tree name, hashval_t v)
d7f09764 3909{
d7f09764
DN
3910 if (!name)
3911 return v;
ee7a54c5 3912 v = iterative_hash_hashval_t (TREE_CODE (name), v);
d7f09764
DN
3913 if (TREE_CODE (name) == TYPE_DECL)
3914 name = DECL_NAME (name);
3915 if (!name)
3916 return v;
3917 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
d7f09764
DN
3918 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3919}
3920
3066f593
RG
3921/* A type, hashvalue pair for sorting SCC members. */
3922
3923struct type_hash_pair {
3924 tree type;
3925 hashval_t hash;
3926};
3927
3928/* Compare two type, hashvalue pairs. */
3929
3930static int
3931type_hash_pair_compare (const void *p1_, const void *p2_)
3932{
3933 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
3934 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
5beaf664
RG
3935 if (p1->hash < p2->hash)
3936 return -1;
3937 else if (p1->hash > p2->hash)
3938 return 1;
3939 return 0;
3066f593
RG
3940}
3941
d7f09764
DN
3942/* Returning a hash value for gimple type TYPE combined with VAL.
3943 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3944
3945 To hash a type we end up hashing in types that are reachable.
3946 Through pointers we can end up with cycles which messes up the
3947 required property that we need to compute the same hash value
3948 for structurally equivalent types. To avoid this we have to
3949 hash all types in a cycle (the SCC) in a commutative way. The
3950 easiest way is to not mix in the hashes of the SCC members at
3951 all. To make this work we have to delay setting the hash
3952 values of the SCC until it is complete. */
3953
3954static hashval_t
3955iterative_hash_gimple_type (tree type, hashval_t val,
3956 VEC(tree, heap) **sccstack,
3957 struct pointer_map_t *sccstate,
b5e04de5 3958 struct obstack *sccstate_obstack)
d7f09764
DN
3959{
3960 hashval_t v;
3961 void **slot;
3962 struct sccs *state;
3963
0f443ad0 3964 /* Not visited during this DFS walk. */
77a74ed7 3965 gcc_checking_assert (!pointer_map_contains (sccstate, type));
d7f09764
DN
3966 state = XOBNEW (sccstate_obstack, struct sccs);
3967 *pointer_map_insert (sccstate, type) = state;
3968
3969 VEC_safe_push (tree, heap, *sccstack, type);
3970 state->dfsnum = next_dfs_num++;
3971 state->low = state->dfsnum;
3972 state->on_sccstack = true;
3973
3974 /* Combine a few common features of types so that types are grouped into
3975 smaller sets; when searching for existing matching types to merge,
3976 only existing types having the same features as the new type will be
3977 checked. */
e1caba18 3978 v = iterative_hash_name (TYPE_NAME (type), 0);
ee7a54c5
RG
3979 if (TYPE_NAME (type)
3980 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3981 && DECL_CONTEXT (TYPE_NAME (type))
3982 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
3983 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
3984 sccstack, sccstate, sccstate_obstack);
e1caba18 3985 v = iterative_hash_hashval_t (TREE_CODE (type), v);
d7f09764
DN
3986 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
3987 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3988
3989 /* Do not hash the types size as this will cause differences in
3990 hash values for the complete vs. the incomplete type variant. */
3991
3992 /* Incorporate common features of numerical types. */
3993 if (INTEGRAL_TYPE_P (type)
3994 || SCALAR_FLOAT_TYPE_P (type)
3995 || FIXED_POINT_TYPE_P (type))
3996 {
3997 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3998 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3999 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4000 }
4001
4002 /* For pointer and reference types, fold in information about the type
1e83b5f1 4003 pointed to. */
d7f09764 4004 if (POINTER_TYPE_P (type))
1e83b5f1
RG
4005 v = visit (TREE_TYPE (type), state, v,
4006 sccstack, sccstate, sccstate_obstack);
d7f09764 4007
f798226d
RG
4008 /* For integer types hash the types min/max values and the string flag. */
4009 if (TREE_CODE (type) == INTEGER_TYPE)
4010 {
429c98c9
RG
4011 /* OMP lowering can introduce error_mark_node in place of
4012 random local decls in types. */
4013 if (TYPE_MIN_VALUE (type) != error_mark_node)
4014 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4015 if (TYPE_MAX_VALUE (type) != error_mark_node)
4016 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
f798226d
RG
4017 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4018 }
4019
4020 /* For array types hash their domain and the string flag. */
4021 if (TREE_CODE (type) == ARRAY_TYPE
4022 && TYPE_DOMAIN (type))
4023 {
4024 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4025 v = visit (TYPE_DOMAIN (type), state, v,
b5e04de5 4026 sccstack, sccstate, sccstate_obstack);
f798226d
RG
4027 }
4028
4029 /* Recurse for aggregates with a single element type. */
d7f09764
DN
4030 if (TREE_CODE (type) == ARRAY_TYPE
4031 || TREE_CODE (type) == COMPLEX_TYPE
4032 || TREE_CODE (type) == VECTOR_TYPE)
4033 v = visit (TREE_TYPE (type), state, v,
b5e04de5 4034 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4035
4036 /* Incorporate function return and argument types. */
4037 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4038 {
4039 unsigned na;
4040 tree p;
4041
4042 /* For method types also incorporate their parent class. */
4043 if (TREE_CODE (type) == METHOD_TYPE)
4044 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
b5e04de5 4045 sccstack, sccstate, sccstate_obstack);
d7f09764 4046
1e83b5f1
RG
4047 /* Check result and argument types. */
4048 v = visit (TREE_TYPE (type), state, v,
4049 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4050 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4051 {
1e83b5f1
RG
4052 v = visit (TREE_VALUE (p), state, v,
4053 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4054 na++;
4055 }
4056
4057 v = iterative_hash_hashval_t (na, v);
4058 }
4059
aa47290b 4060 if (RECORD_OR_UNION_TYPE_P (type))
d7f09764
DN
4061 {
4062 unsigned nf;
4063 tree f;
4064
d7f09764
DN
4065 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4066 {
b5e04de5 4067 v = iterative_hash_name (DECL_NAME (f), v);
d7f09764 4068 v = visit (TREE_TYPE (f), state, v,
b5e04de5 4069 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4070 nf++;
4071 }
4072
4073 v = iterative_hash_hashval_t (nf, v);
4074 }
4075
4076 /* Record hash for us. */
d4398a43 4077 state->u.hash = v;
d7f09764
DN
4078
4079 /* See if we found an SCC. */
4080 if (state->low == state->dfsnum)
4081 {
4082 tree x;
3066f593 4083 struct tree_int_map *m;
d7f09764
DN
4084
4085 /* Pop off the SCC and set its hash values. */
3066f593 4086 x = VEC_pop (tree, *sccstack);
3066f593
RG
4087 /* Optimize SCC size one. */
4088 if (x == type)
d7f09764 4089 {
008bad7a 4090 state->on_sccstack = false;
3066f593 4091 m = ggc_alloc_cleared_tree_int_map ();
0f443ad0 4092 m->base.from = x;
008bad7a 4093 m->to = v;
b5e04de5 4094 slot = htab_find_slot (type_hash_cache, m, INSERT);
0f443ad0
RG
4095 gcc_assert (!*slot);
4096 *slot = (void *) m;
d7f09764 4097 }
3066f593
RG
4098 else
4099 {
008bad7a 4100 struct sccs *cstate;
3066f593
RG
4101 unsigned first, i, size, j;
4102 struct type_hash_pair *pairs;
4103 /* Pop off the SCC and build an array of type, hash pairs. */
4104 first = VEC_length (tree, *sccstack) - 1;
4105 while (VEC_index (tree, *sccstack, first) != type)
4106 --first;
4107 size = VEC_length (tree, *sccstack) - first + 1;
4108 pairs = XALLOCAVEC (struct type_hash_pair, size);
4109 i = 0;
008bad7a
RG
4110 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4111 cstate->on_sccstack = false;
3066f593
RG
4112 pairs[i].type = x;
4113 pairs[i].hash = cstate->u.hash;
4114 do
4115 {
4116 x = VEC_pop (tree, *sccstack);
4117 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4118 cstate->on_sccstack = false;
4119 ++i;
4120 pairs[i].type = x;
4121 pairs[i].hash = cstate->u.hash;
4122 }
4123 while (x != type);
4124 gcc_assert (i + 1 == size);
4125 /* Sort the arrays of type, hash pairs so that when we mix in
4126 all members of the SCC the hash value becomes independent on
4127 the order we visited the SCC. Disregard hashes equal to
4128 the hash of the type we mix into because we cannot guarantee
4129 a stable sort for those across different TUs. */
4130 qsort (pairs, size, sizeof (struct type_hash_pair),
4131 type_hash_pair_compare);
4132 for (i = 0; i < size; ++i)
4133 {
4134 hashval_t hash;
4135 m = ggc_alloc_cleared_tree_int_map ();
4136 m->base.from = pairs[i].type;
4137 hash = pairs[i].hash;
4138 /* Skip same hashes. */
4139 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4140 ;
4141 for (; j < size; ++j)
4142 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4143 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4144 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4145 m->to = hash;
008bad7a
RG
4146 if (pairs[i].type == type)
4147 v = hash;
3066f593
RG
4148 slot = htab_find_slot (type_hash_cache, m, INSERT);
4149 gcc_assert (!*slot);
4150 *slot = (void *) m;
4151 }
4152 }
d7f09764
DN
4153 }
4154
4155 return iterative_hash_hashval_t (v, val);
4156}
4157
4158
4159/* Returns a hash value for P (assumed to be a type). The hash value
4160 is computed using some distinguishing features of the type. Note
4161 that we cannot use pointer hashing here as we may be dealing with
4162 two distinct instances of the same type.
4163
4164 This function should produce the same hash value for two compatible
4165 types according to gimple_types_compatible_p. */
4166
4167static hashval_t
b5e04de5 4168gimple_type_hash (const void *p)
d7f09764 4169{
ddd4d0e1 4170 const_tree t = (const_tree) p;
d7f09764
DN
4171 VEC(tree, heap) *sccstack = NULL;
4172 struct pointer_map_t *sccstate;
4173 struct obstack sccstate_obstack;
4174 hashval_t val;
4175 void **slot;
0f443ad0 4176 struct tree_int_map m;
d7f09764 4177
b5e04de5 4178 if (type_hash_cache == NULL)
0f443ad0
RG
4179 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4180 tree_int_map_eq, NULL);
d7f09764 4181
0f443ad0 4182 m.base.from = CONST_CAST_TREE (t);
b5e04de5 4183 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
0f443ad0
RG
4184 && *slot)
4185 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
d7f09764
DN
4186
4187 /* Perform a DFS walk and pre-hash all reachable types. */
4188 next_dfs_num = 1;
4189 sccstate = pointer_map_create ();
4190 gcc_obstack_init (&sccstate_obstack);
ddd4d0e1 4191 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
b5e04de5 4192 &sccstack, sccstate, &sccstate_obstack);
d7f09764
DN
4193 VEC_free (tree, heap, sccstack);
4194 pointer_map_destroy (sccstate);
4195 obstack_free (&sccstate_obstack, NULL);
4196
4197 return val;
4198}
4199
825b27de
RG
4200/* Returning a hash value for gimple type TYPE combined with VAL.
4201
4202 The hash value returned is equal for types considered compatible
4203 by gimple_canonical_types_compatible_p. */
4204
4205static hashval_t
4206iterative_hash_canonical_type (tree type, hashval_t val)
4207{
4208 hashval_t v;
4209 void **slot;
4210 struct tree_int_map *mp, m;
4211
4212 m.base.from = type;
4213 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4214 && *slot)
d0340959 4215 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
825b27de
RG
4216
4217 /* Combine a few common features of types so that types are grouped into
4218 smaller sets; when searching for existing matching types to merge,
4219 only existing types having the same features as the new type will be
4220 checked. */
4221 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
825b27de 4222 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
61332f77
RG
4223 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4224 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
825b27de
RG
4225
4226 /* Incorporate common features of numerical types. */
4227 if (INTEGRAL_TYPE_P (type)
4228 || SCALAR_FLOAT_TYPE_P (type)
61332f77
RG
4229 || FIXED_POINT_TYPE_P (type)
4230 || TREE_CODE (type) == VECTOR_TYPE
4231 || TREE_CODE (type) == COMPLEX_TYPE
4232 || TREE_CODE (type) == OFFSET_TYPE
4233 || POINTER_TYPE_P (type))
825b27de
RG
4234 {
4235 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
825b27de
RG
4236 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4237 }
4238
4239 /* For pointer and reference types, fold in information about the type
4240 pointed to but do not recurse to the pointed-to type. */
4241 if (POINTER_TYPE_P (type))
4242 {
4243 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
61332f77
RG
4244 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4245 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
825b27de
RG
4246 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4247 }
4248
4249 /* For integer types hash the types min/max values and the string flag. */
4250 if (TREE_CODE (type) == INTEGER_TYPE)
4251 {
825b27de 4252 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
61332f77 4253 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
825b27de
RG
4254 }
4255
4256 /* For array types hash their domain and the string flag. */
4257 if (TREE_CODE (type) == ARRAY_TYPE
4258 && TYPE_DOMAIN (type))
4259 {
4260 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4261 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4262 }
4263
4264 /* Recurse for aggregates with a single element type. */
4265 if (TREE_CODE (type) == ARRAY_TYPE
4266 || TREE_CODE (type) == COMPLEX_TYPE
4267 || TREE_CODE (type) == VECTOR_TYPE)
4268 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4269
4270 /* Incorporate function return and argument types. */
4271 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4272 {
4273 unsigned na;
4274 tree p;
4275
4276 /* For method types also incorporate their parent class. */
4277 if (TREE_CODE (type) == METHOD_TYPE)
4278 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4279
6a20ce76 4280 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
825b27de
RG
4281
4282 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4283 {
6a20ce76 4284 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
825b27de
RG
4285 na++;
4286 }
4287
4288 v = iterative_hash_hashval_t (na, v);
4289 }
4290
aa47290b 4291 if (RECORD_OR_UNION_TYPE_P (type))
825b27de
RG
4292 {
4293 unsigned nf;
4294 tree f;
4295
4296 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
e7cfe241
RG
4297 if (TREE_CODE (f) == FIELD_DECL)
4298 {
4299 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4300 nf++;
4301 }
825b27de
RG
4302
4303 v = iterative_hash_hashval_t (nf, v);
4304 }
4305
4306 /* Cache the just computed hash value. */
4307 mp = ggc_alloc_cleared_tree_int_map ();
4308 mp->base.from = type;
4309 mp->to = v;
4310 *slot = (void *) mp;
4311
4312 return iterative_hash_hashval_t (v, val);
4313}
4314
a844a60b
RG
4315static hashval_t
4316gimple_canonical_type_hash (const void *p)
4317{
825b27de
RG
4318 if (canonical_type_hash_cache == NULL)
4319 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4320 tree_int_map_eq, NULL);
4321
4322 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
a844a60b
RG
4323}
4324
d7f09764
DN
4325
4326/* Returns nonzero if P1 and P2 are equal. */
4327
4328static int
4329gimple_type_eq (const void *p1, const void *p2)
4330{
4331 const_tree t1 = (const_tree) p1;
4332 const_tree t2 = (const_tree) p2;
f5d6836a 4333 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
b5e04de5 4334 CONST_CAST_TREE (t2));
d7f09764
DN
4335}
4336
4337
93b2a207
RG
4338/* Worker for gimple_register_type.
4339 Register type T in the global type table gimple_types.
4340 When REGISTERING_MV is false first recurse for the main variant of T. */
d7f09764 4341
93b2a207
RG
4342static tree
4343gimple_register_type_1 (tree t, bool registering_mv)
d7f09764
DN
4344{
4345 void **slot;
4490cae6 4346 gimple_type_leader_entry *leader;
d7f09764 4347
4490cae6
RG
4348 /* If we registered this type before return the cached result. */
4349 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4350 if (leader->type == t)
4351 return leader->leader;
4a2ac96f 4352
20d36f0e
RG
4353 /* Always register the main variant first. This is important so we
4354 pick up the non-typedef variants as canonical, otherwise we'll end
93b2a207
RG
4355 up taking typedef ids for structure tags during comparison.
4356 It also makes sure that main variants will be merged to main variants.
4357 As we are operating on a possibly partially fixed up type graph
4358 do not bother to recurse more than once, otherwise we may end up
31b3ca64
RG
4359 walking in circles.
4360 If we are registering a main variant it will either remain its
4361 own main variant or it will be merged to something else in which
4362 case we do not care for the main variant leader. */
93b2a207
RG
4363 if (!registering_mv
4364 && TYPE_MAIN_VARIANT (t) != t)
fb291a1e 4365 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
d7f09764 4366
fb291a1e 4367 /* See if we already have an equivalent type registered. */
d7f09764
DN
4368 slot = htab_find_slot (gimple_types, t, INSERT);
4369 if (*slot
4370 && *(tree *)slot != t)
4371 {
4372 tree new_type = (tree) *((tree *) slot);
4490cae6
RG
4373 leader->type = t;
4374 leader->leader = new_type;
fb291a1e 4375 return new_type;
4490cae6
RG
4376 }
4377
fb291a1e
RG
4378 /* If not, insert it to the cache and the hash. */
4379 leader->type = t;
4380 leader->leader = t;
4381 *slot = (void *) t;
4490cae6
RG
4382 return t;
4383}
4384
93b2a207
RG
4385/* Register type T in the global type table gimple_types.
4386 If another type T', compatible with T, already existed in
4387 gimple_types then return T', otherwise return T. This is used by
4388 LTO to merge identical types read from different TUs. */
4389
4390tree
4391gimple_register_type (tree t)
4392{
4393 gcc_assert (TYPE_P (t));
4394
4395 if (!gimple_type_leader)
4396 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4397 (GIMPLE_TYPE_LEADER_SIZE);
4398
4399 if (gimple_types == NULL)
4400 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4401
4402 return gimple_register_type_1 (t, false);
4403}
4490cae6 4404
825b27de
RG
4405/* The TYPE_CANONICAL merging machinery. It should closely resemble
4406 the middle-end types_compatible_p function. It needs to avoid
4407 claiming types are different for types that should be treated
4408 the same with respect to TBAA. Canonical types are also used
4409 for IL consistency checks via the useless_type_conversion_p
4410 predicate which does not handle all type kinds itself but falls
4411 back to pointer-comparison of TYPE_CANONICAL for aggregates
4412 for example. */
4413
4414/* Return true iff T1 and T2 are structurally identical for what
4415 TBAA is concerned. */
4416
4417static bool
4418gimple_canonical_types_compatible_p (tree t1, tree t2)
4419{
825b27de
RG
4420 /* Before starting to set up the SCC machinery handle simple cases. */
4421
4422 /* Check first for the obvious case of pointer identity. */
4423 if (t1 == t2)
4424 return true;
4425
4426 /* Check that we have two types to compare. */
4427 if (t1 == NULL_TREE || t2 == NULL_TREE)
4428 return false;
4429
4430 /* If the types have been previously registered and found equal
4431 they still are. */
4432 if (TYPE_CANONICAL (t1)
4433 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4434 return true;
4435
4436 /* Can't be the same type if the types don't have the same code. */
4437 if (TREE_CODE (t1) != TREE_CODE (t2))
4438 return false;
4439
61332f77 4440 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
825b27de
RG
4441 return false;
4442
61332f77
RG
4443 /* Qualifiers do not matter for canonical type comparison purposes. */
4444
4445 /* Void types and nullptr types are always the same. */
4446 if (TREE_CODE (t1) == VOID_TYPE
4447 || TREE_CODE (t1) == NULLPTR_TYPE)
825b27de
RG
4448 return true;
4449
61332f77
RG
4450 /* Can't be the same type if they have different alignment, or mode. */
4451 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4452 || TYPE_MODE (t1) != TYPE_MODE (t2))
4453 return false;
4454
4455 /* Non-aggregate types can be handled cheaply. */
825b27de
RG
4456 if (INTEGRAL_TYPE_P (t1)
4457 || SCALAR_FLOAT_TYPE_P (t1)
4458 || FIXED_POINT_TYPE_P (t1)
4459 || TREE_CODE (t1) == VECTOR_TYPE
4460 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
4461 || TREE_CODE (t1) == OFFSET_TYPE
4462 || POINTER_TYPE_P (t1))
825b27de 4463 {
61332f77
RG
4464 /* Can't be the same type if they have different sign or precision. */
4465 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
825b27de
RG
4466 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4467 return false;
4468
4469 if (TREE_CODE (t1) == INTEGER_TYPE
4470 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4471 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4472 return false;
4473
61332f77
RG
4474 /* For canonical type comparisons we do not want to build SCCs
4475 so we cannot compare pointed-to types. But we can, for now,
4476 require the same pointed-to type kind and match what
4477 useless_type_conversion_p would do. */
4478 if (POINTER_TYPE_P (t1))
4479 {
4480 /* If the two pointers have different ref-all attributes,
4481 they can't be the same type. */
4482 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4483 return false;
825b27de 4484
61332f77
RG
4485 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4486 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4487 return false;
825b27de 4488
61332f77
RG
4489 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4490 return false;
4491
4492 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4493 return false;
4494 }
4495
4496 /* Tail-recurse to components. */
4497 if (TREE_CODE (t1) == VECTOR_TYPE
4498 || TREE_CODE (t1) == COMPLEX_TYPE)
4499 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4500 TREE_TYPE (t2));
4501
4502 return true;
825b27de
RG
4503 }
4504
825b27de
RG
4505 /* If their attributes are not the same they can't be the same type. */
4506 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
b8a71aed 4507 return false;
825b27de
RG
4508
4509 /* Do type-specific comparisons. */
4510 switch (TREE_CODE (t1))
4511 {
825b27de
RG
4512 case ARRAY_TYPE:
4513 /* Array types are the same if the element types are the same and
4514 the number of elements are the same. */
4515 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4516 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4517 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
b8a71aed 4518 return false;
825b27de
RG
4519 else
4520 {
4521 tree i1 = TYPE_DOMAIN (t1);
4522 tree i2 = TYPE_DOMAIN (t2);
4523
4524 /* For an incomplete external array, the type domain can be
4525 NULL_TREE. Check this condition also. */
4526 if (i1 == NULL_TREE && i2 == NULL_TREE)
b8a71aed 4527 return true;
825b27de 4528 else if (i1 == NULL_TREE || i2 == NULL_TREE)
b8a71aed 4529 return false;
825b27de
RG
4530 /* If for a complete array type the possibly gimplified sizes
4531 are different the types are different. */
4532 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4533 || (TYPE_SIZE (i1)
4534 && TYPE_SIZE (i2)
4535 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
b8a71aed 4536 return false;
825b27de
RG
4537 else
4538 {
4539 tree min1 = TYPE_MIN_VALUE (i1);
4540 tree min2 = TYPE_MIN_VALUE (i2);
4541 tree max1 = TYPE_MAX_VALUE (i1);
4542 tree max2 = TYPE_MAX_VALUE (i2);
4543
4544 /* The minimum/maximum values have to be the same. */
4545 if ((min1 == min2
4546 || (min1 && min2
4547 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4548 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4549 || operand_equal_p (min1, min2, 0))))
4550 && (max1 == max2
4551 || (max1 && max2
4552 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4553 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4554 || operand_equal_p (max1, max2, 0)))))
b8a71aed 4555 return true;
825b27de 4556 else
b8a71aed 4557 return false;
825b27de
RG
4558 }
4559 }
4560
4561 case METHOD_TYPE:
4562 /* Method types should belong to the same class. */
4563 if (!gimple_canonical_types_compatible_p
4564 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
b8a71aed 4565 return false;
825b27de
RG
4566
4567 /* Fallthru */
4568
4569 case FUNCTION_TYPE:
4570 /* Function types are the same if the return type and arguments types
4571 are the same. */
6a20ce76 4572 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
b8a71aed 4573 return false;
825b27de
RG
4574
4575 if (!comp_type_attributes (t1, t2))
b8a71aed 4576 return false;
825b27de
RG
4577
4578 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
b8a71aed 4579 return true;
825b27de
RG
4580 else
4581 {
4582 tree parms1, parms2;
4583
4584 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4585 parms1 && parms2;
4586 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4587 {
6a20ce76
RG
4588 if (!gimple_canonical_types_compatible_p
4589 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
b8a71aed 4590 return false;
825b27de
RG
4591 }
4592
4593 if (parms1 || parms2)
b8a71aed 4594 return false;
825b27de 4595
b8a71aed 4596 return true;
825b27de
RG
4597 }
4598
825b27de
RG
4599 case RECORD_TYPE:
4600 case UNION_TYPE:
4601 case QUAL_UNION_TYPE:
4602 {
4603 tree f1, f2;
4604
4605 /* For aggregate types, all the fields must be the same. */
4606 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4acd1c84 4607 f1 || f2;
825b27de
RG
4608 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4609 {
e7cfe241
RG
4610 /* Skip non-fields. */
4611 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4612 f1 = TREE_CHAIN (f1);
4613 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4614 f2 = TREE_CHAIN (f2);
4615 if (!f1 || !f2)
4616 break;
825b27de
RG
4617 /* The fields must have the same name, offset and type. */
4618 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4619 || !gimple_compare_field_offset (f1, f2)
4620 || !gimple_canonical_types_compatible_p
4621 (TREE_TYPE (f1), TREE_TYPE (f2)))
b8a71aed 4622 return false;
825b27de
RG
4623 }
4624
4625 /* If one aggregate has more fields than the other, they
4626 are not the same. */
4627 if (f1 || f2)
b8a71aed 4628 return false;
825b27de 4629
b8a71aed 4630 return true;
825b27de
RG
4631 }
4632
4633 default:
4634 gcc_unreachable ();
4635 }
825b27de
RG
4636}
4637
4638
4490cae6
RG
4639/* Returns nonzero if P1 and P2 are equal. */
4640
4641static int
4642gimple_canonical_type_eq (const void *p1, const void *p2)
4643{
4644 const_tree t1 = (const_tree) p1;
4645 const_tree t2 = (const_tree) p2;
825b27de
RG
4646 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4647 CONST_CAST_TREE (t2));
4490cae6
RG
4648}
4649
4650/* Register type T in the global type table gimple_types.
4651 If another type T', compatible with T, already existed in
4652 gimple_types then return T', otherwise return T. This is used by
96d91dcf
RG
4653 LTO to merge identical types read from different TUs.
4654
4655 ??? This merging does not exactly match how the tree.c middle-end
4656 functions will assign TYPE_CANONICAL when new types are created
4657 during optimization (which at least happens for pointer and array
4658 types). */
4490cae6
RG
4659
4660tree
4661gimple_register_canonical_type (tree t)
4662{
4663 void **slot;
4664
4665 gcc_assert (TYPE_P (t));
4666
61332f77
RG
4667 if (TYPE_CANONICAL (t))
4668 return TYPE_CANONICAL (t);
4669
4490cae6 4670 if (gimple_canonical_types == NULL)
a844a60b 4671 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4490cae6
RG
4672 gimple_canonical_type_eq, 0);
4673
4674 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4675 if (*slot
4676 && *(tree *)slot != t)
4677 {
4678 tree new_type = (tree) *((tree *) slot);
4679
4680 TYPE_CANONICAL (t) = new_type;
4681 t = new_type;
4682 }
4683 else
4684 {
4685 TYPE_CANONICAL (t) = t;
4a2ac96f
RG
4686 *slot = (void *) t;
4687 }
d7f09764
DN
4688
4689 return t;
4690}
4691
4692
4693/* Show statistics on references to the global type table gimple_types. */
4694
4695void
4696print_gimple_types_stats (void)
4697{
4698 if (gimple_types)
4699 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4700 "%ld searches, %ld collisions (ratio: %f)\n",
4701 (long) htab_size (gimple_types),
4702 (long) htab_elements (gimple_types),
4703 (long) gimple_types->searches,
4704 (long) gimple_types->collisions,
4705 htab_collisions (gimple_types));
4706 else
4707 fprintf (stderr, "GIMPLE type table is empty\n");
a844a60b
RG
4708 if (type_hash_cache)
4709 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4710 "%ld searches, %ld collisions (ratio: %f)\n",
4711 (long) htab_size (type_hash_cache),
4712 (long) htab_elements (type_hash_cache),
4713 (long) type_hash_cache->searches,
4714 (long) type_hash_cache->collisions,
4715 htab_collisions (type_hash_cache));
4716 else
4717 fprintf (stderr, "GIMPLE type hash table is empty\n");
4490cae6
RG
4718 if (gimple_canonical_types)
4719 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4720 "%ld searches, %ld collisions (ratio: %f)\n",
4721 (long) htab_size (gimple_canonical_types),
4722 (long) htab_elements (gimple_canonical_types),
4723 (long) gimple_canonical_types->searches,
4724 (long) gimple_canonical_types->collisions,
4725 htab_collisions (gimple_canonical_types));
4726 else
4727 fprintf (stderr, "GIMPLE canonical type table is empty\n");
a844a60b
RG
4728 if (canonical_type_hash_cache)
4729 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
0f443ad0 4730 "%ld searches, %ld collisions (ratio: %f)\n",
a844a60b
RG
4731 (long) htab_size (canonical_type_hash_cache),
4732 (long) htab_elements (canonical_type_hash_cache),
4733 (long) canonical_type_hash_cache->searches,
4734 (long) canonical_type_hash_cache->collisions,
4735 htab_collisions (canonical_type_hash_cache));
0f443ad0 4736 else
a844a60b 4737 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
d7f09764
DN
4738}
4739
0d0bfe17
RG
4740/* Free the gimple type hashtables used for LTO type merging. */
4741
4742void
4743free_gimple_type_tables (void)
4744{
4745 /* Last chance to print stats for the tables. */
4746 if (flag_lto_report)
4747 print_gimple_types_stats ();
4748
4749 if (gimple_types)
4750 {
4751 htab_delete (gimple_types);
4752 gimple_types = NULL;
4753 }
4490cae6
RG
4754 if (gimple_canonical_types)
4755 {
4756 htab_delete (gimple_canonical_types);
4757 gimple_canonical_types = NULL;
4758 }
0d0bfe17
RG
4759 if (type_hash_cache)
4760 {
0f443ad0 4761 htab_delete (type_hash_cache);
0d0bfe17
RG
4762 type_hash_cache = NULL;
4763 }
a844a60b
RG
4764 if (canonical_type_hash_cache)
4765 {
4766 htab_delete (canonical_type_hash_cache);
4767 canonical_type_hash_cache = NULL;
4768 }
a30726a4 4769 if (type_pair_cache)
0d0bfe17 4770 {
a30726a4
JH
4771 free (type_pair_cache);
4772 type_pair_cache = NULL;
0d0bfe17 4773 }
4490cae6 4774 gimple_type_leader = NULL;
0d0bfe17
RG
4775}
4776
d7f09764
DN
4777
4778/* Return a type the same as TYPE except unsigned or
4779 signed according to UNSIGNEDP. */
4780
4781static tree
4782gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4783{
4784 tree type1;
4785
4786 type1 = TYPE_MAIN_VARIANT (type);
4787 if (type1 == signed_char_type_node
4788 || type1 == char_type_node
4789 || type1 == unsigned_char_type_node)
4790 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4791 if (type1 == integer_type_node || type1 == unsigned_type_node)
4792 return unsignedp ? unsigned_type_node : integer_type_node;
4793 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4794 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4795 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4796 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4797 if (type1 == long_long_integer_type_node
4798 || type1 == long_long_unsigned_type_node)
4799 return unsignedp
4800 ? long_long_unsigned_type_node
4801 : long_long_integer_type_node;
a6766312
KT
4802 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4803 return unsignedp
4804 ? int128_unsigned_type_node
4805 : int128_integer_type_node;
d7f09764
DN
4806#if HOST_BITS_PER_WIDE_INT >= 64
4807 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4808 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4809#endif
4810 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4811 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4812 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4813 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4814 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4815 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4816 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4817 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4818
4819#define GIMPLE_FIXED_TYPES(NAME) \
4820 if (type1 == short_ ## NAME ## _type_node \
4821 || type1 == unsigned_short_ ## NAME ## _type_node) \
4822 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4823 : short_ ## NAME ## _type_node; \
4824 if (type1 == NAME ## _type_node \
4825 || type1 == unsigned_ ## NAME ## _type_node) \
4826 return unsignedp ? unsigned_ ## NAME ## _type_node \
4827 : NAME ## _type_node; \
4828 if (type1 == long_ ## NAME ## _type_node \
4829 || type1 == unsigned_long_ ## NAME ## _type_node) \
4830 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4831 : long_ ## NAME ## _type_node; \
4832 if (type1 == long_long_ ## NAME ## _type_node \
4833 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4834 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4835 : long_long_ ## NAME ## _type_node;
4836
4837#define GIMPLE_FIXED_MODE_TYPES(NAME) \
4838 if (type1 == NAME ## _type_node \
4839 || type1 == u ## NAME ## _type_node) \
4840 return unsignedp ? u ## NAME ## _type_node \
4841 : NAME ## _type_node;
4842
4843#define GIMPLE_FIXED_TYPES_SAT(NAME) \
4844 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4845 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4846 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4847 : sat_ ## short_ ## NAME ## _type_node; \
4848 if (type1 == sat_ ## NAME ## _type_node \
4849 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4850 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4851 : sat_ ## NAME ## _type_node; \
4852 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4853 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4854 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4855 : sat_ ## long_ ## NAME ## _type_node; \
4856 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4857 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4858 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4859 : sat_ ## long_long_ ## NAME ## _type_node;
4860
4861#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4862 if (type1 == sat_ ## NAME ## _type_node \
4863 || type1 == sat_ ## u ## NAME ## _type_node) \
4864 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4865 : sat_ ## NAME ## _type_node;
4866
4867 GIMPLE_FIXED_TYPES (fract);
4868 GIMPLE_FIXED_TYPES_SAT (fract);
4869 GIMPLE_FIXED_TYPES (accum);
4870 GIMPLE_FIXED_TYPES_SAT (accum);
4871
4872 GIMPLE_FIXED_MODE_TYPES (qq);
4873 GIMPLE_FIXED_MODE_TYPES (hq);
4874 GIMPLE_FIXED_MODE_TYPES (sq);
4875 GIMPLE_FIXED_MODE_TYPES (dq);
4876 GIMPLE_FIXED_MODE_TYPES (tq);
4877 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4878 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4879 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4880 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4881 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4882 GIMPLE_FIXED_MODE_TYPES (ha);
4883 GIMPLE_FIXED_MODE_TYPES (sa);
4884 GIMPLE_FIXED_MODE_TYPES (da);
4885 GIMPLE_FIXED_MODE_TYPES (ta);
4886 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4887 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4888 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4889 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4890
4891 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4892 the precision; they have precision set to match their range, but
4893 may use a wider mode to match an ABI. If we change modes, we may
4894 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4895 the precision as well, so as to yield correct results for
4896 bit-field types. C++ does not have these separate bit-field
4897 types, and producing a signed or unsigned variant of an
4898 ENUMERAL_TYPE may cause other problems as well. */
4899 if (!INTEGRAL_TYPE_P (type)
4900 || TYPE_UNSIGNED (type) == unsignedp)
4901 return type;
4902
4903#define TYPE_OK(node) \
4904 (TYPE_MODE (type) == TYPE_MODE (node) \
4905 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4906 if (TYPE_OK (signed_char_type_node))
4907 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4908 if (TYPE_OK (integer_type_node))
4909 return unsignedp ? unsigned_type_node : integer_type_node;
4910 if (TYPE_OK (short_integer_type_node))
4911 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4912 if (TYPE_OK (long_integer_type_node))
4913 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4914 if (TYPE_OK (long_long_integer_type_node))
4915 return (unsignedp
4916 ? long_long_unsigned_type_node
4917 : long_long_integer_type_node);
a6766312
KT
4918 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4919 return (unsignedp
4920 ? int128_unsigned_type_node
4921 : int128_integer_type_node);
d7f09764
DN
4922
4923#if HOST_BITS_PER_WIDE_INT >= 64
4924 if (TYPE_OK (intTI_type_node))
4925 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4926#endif
4927 if (TYPE_OK (intDI_type_node))
4928 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4929 if (TYPE_OK (intSI_type_node))
4930 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4931 if (TYPE_OK (intHI_type_node))
4932 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4933 if (TYPE_OK (intQI_type_node))
4934 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4935
4936#undef GIMPLE_FIXED_TYPES
4937#undef GIMPLE_FIXED_MODE_TYPES
4938#undef GIMPLE_FIXED_TYPES_SAT
4939#undef GIMPLE_FIXED_MODE_TYPES_SAT
4940#undef TYPE_OK
4941
4942 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4943}
4944
4945
4946/* Return an unsigned type the same as TYPE in other respects. */
4947
4948tree
4949gimple_unsigned_type (tree type)
4950{
4951 return gimple_signed_or_unsigned_type (true, type);
4952}
4953
4954
4955/* Return a signed type the same as TYPE in other respects. */
4956
4957tree
4958gimple_signed_type (tree type)
4959{
4960 return gimple_signed_or_unsigned_type (false, type);
4961}
4962
4963
4964/* Return the typed-based alias set for T, which may be an expression
4965 or a type. Return -1 if we don't do anything special. */
4966
4967alias_set_type
4968gimple_get_alias_set (tree t)
4969{
4970 tree u;
4971
4972 /* Permit type-punning when accessing a union, provided the access
4973 is directly through the union. For example, this code does not
4974 permit taking the address of a union member and then storing
4975 through it. Even the type-punning allowed here is a GCC
4976 extension, albeit a common and useful one; the C standard says
4977 that such accesses have implementation-defined behavior. */
4978 for (u = t;
4979 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4980 u = TREE_OPERAND (u, 0))
4981 if (TREE_CODE (u) == COMPONENT_REF
4982 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4983 return 0;
4984
4985 /* That's all the expressions we handle specially. */
4986 if (!TYPE_P (t))
4987 return -1;
4988
4989 /* For convenience, follow the C standard when dealing with
4990 character types. Any object may be accessed via an lvalue that
4991 has character type. */
4992 if (t == char_type_node
4993 || t == signed_char_type_node
4994 || t == unsigned_char_type_node)
4995 return 0;
4996
4997 /* Allow aliasing between signed and unsigned variants of the same
4998 type. We treat the signed variant as canonical. */
4999 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5000 {
5001 tree t1 = gimple_signed_type (t);
5002
5003 /* t1 == t can happen for boolean nodes which are always unsigned. */
5004 if (t1 != t)
5005 return get_alias_set (t1);
5006 }
d7f09764
DN
5007
5008 return -1;
5009}
5010
5011
5006671f
RG
5012/* Data structure used to count the number of dereferences to PTR
5013 inside an expression. */
5014struct count_ptr_d
5015{
5016 tree ptr;
5017 unsigned num_stores;
5018 unsigned num_loads;
5019};
5020
5021/* Helper for count_uses_and_derefs. Called by walk_tree to look for
5022 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5023
5024static tree
5025count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5026{
5027 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5028 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5029
5030 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5031 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5032 the address of 'fld' as 'ptr + offsetof(fld)'. */
5033 if (TREE_CODE (*tp) == ADDR_EXPR)
5034 {
5035 *walk_subtrees = 0;
5036 return NULL_TREE;
5037 }
5038
70f34814 5039 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5006671f
RG
5040 {
5041 if (wi_p->is_lhs)
5042 count_p->num_stores++;
5043 else
5044 count_p->num_loads++;
5045 }
5046
5047 return NULL_TREE;
5048}
5049
5050/* Count the number of direct and indirect uses for pointer PTR in
5051 statement STMT. The number of direct uses is stored in
5052 *NUM_USES_P. Indirect references are counted separately depending
5053 on whether they are store or load operations. The counts are
5054 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5055
5056void
5057count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5058 unsigned *num_loads_p, unsigned *num_stores_p)
5059{
5060 ssa_op_iter i;
5061 tree use;
5062
5063 *num_uses_p = 0;
5064 *num_loads_p = 0;
5065 *num_stores_p = 0;
5066
5067 /* Find out the total number of uses of PTR in STMT. */
5068 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5069 if (use == ptr)
5070 (*num_uses_p)++;
5071
5072 /* Now count the number of indirect references to PTR. This is
5073 truly awful, but we don't have much choice. There are no parent
5074 pointers inside INDIRECT_REFs, so an expression like
5075 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5076 find all the indirect and direct uses of x_1 inside. The only
5077 shortcut we can take is the fact that GIMPLE only allows
5078 INDIRECT_REFs inside the expressions below. */
5079 if (is_gimple_assign (stmt)
5080 || gimple_code (stmt) == GIMPLE_RETURN
5081 || gimple_code (stmt) == GIMPLE_ASM
5082 || is_gimple_call (stmt))
5083 {
5084 struct walk_stmt_info wi;
5085 struct count_ptr_d count;
5086
5087 count.ptr = ptr;
5088 count.num_stores = 0;
5089 count.num_loads = 0;
5090
5091 memset (&wi, 0, sizeof (wi));
5092 wi.info = &count;
5093 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5094
5095 *num_stores_p = count.num_stores;
5096 *num_loads_p = count.num_loads;
5097 }
5098
5099 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5100}
5101
346ef3fa
RG
5102/* From a tree operand OP return the base of a load or store operation
5103 or NULL_TREE if OP is not a load or a store. */
5104
5105static tree
5106get_base_loadstore (tree op)
5107{
5108 while (handled_component_p (op))
5109 op = TREE_OPERAND (op, 0);
5110 if (DECL_P (op)
5111 || INDIRECT_REF_P (op)
70f34814 5112 || TREE_CODE (op) == MEM_REF
346ef3fa
RG
5113 || TREE_CODE (op) == TARGET_MEM_REF)
5114 return op;
5115 return NULL_TREE;
5116}
5117
5118/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5119 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5120 passing the STMT, the base of the operand and DATA to it. The base
5121 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5122 or the argument of an address expression.
5123 Returns the results of these callbacks or'ed. */
5124
5125bool
5126walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5127 bool (*visit_load)(gimple, tree, void *),
5128 bool (*visit_store)(gimple, tree, void *),
5129 bool (*visit_addr)(gimple, tree, void *))
5130{
5131 bool ret = false;
5132 unsigned i;
5133 if (gimple_assign_single_p (stmt))
5134 {
5135 tree lhs, rhs;
5136 if (visit_store)
5137 {
5138 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5139 if (lhs)
5140 ret |= visit_store (stmt, lhs, data);
5141 }
5142 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
5143 while (handled_component_p (rhs))
5144 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
5145 if (visit_addr)
5146 {
5147 if (TREE_CODE (rhs) == ADDR_EXPR)
5148 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5149 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5150 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5151 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5152 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5153 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5154 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5155 0), data);
cb3d2e33
JJ
5156 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5157 {
5158 unsigned int ix;
5159 tree val;
5160
5161 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5162 if (TREE_CODE (val) == ADDR_EXPR)
5163 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5164 else if (TREE_CODE (val) == OBJ_TYPE_REF
5165 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5166 ret |= visit_addr (stmt,
5167 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5168 0), data);
5169 }
fff1894c
AB
5170 lhs = gimple_assign_lhs (stmt);
5171 if (TREE_CODE (lhs) == TARGET_MEM_REF
fff1894c
AB
5172 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5173 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
5174 }
5175 if (visit_load)
5176 {
5177 rhs = get_base_loadstore (rhs);
5178 if (rhs)
5179 ret |= visit_load (stmt, rhs, data);
5180 }
5181 }
5182 else if (visit_addr
5183 && (is_gimple_assign (stmt)
4d7a65ea 5184 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
5185 {
5186 for (i = 0; i < gimple_num_ops (stmt); ++i)
9dd58aa4
JJ
5187 {
5188 tree op = gimple_op (stmt, i);
5189 if (op == NULL_TREE)
5190 ;
5191 else if (TREE_CODE (op) == ADDR_EXPR)
5192 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5193 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5194 tree with two operands. */
5195 else if (i == 1 && COMPARISON_CLASS_P (op))
5196 {
5197 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5198 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5199 0), data);
5200 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5201 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5202 0), data);
5203 }
5204 }
346ef3fa
RG
5205 }
5206 else if (is_gimple_call (stmt))
5207 {
5208 if (visit_store)
5209 {
5210 tree lhs = gimple_call_lhs (stmt);
5211 if (lhs)
5212 {
5213 lhs = get_base_loadstore (lhs);
5214 if (lhs)
5215 ret |= visit_store (stmt, lhs, data);
5216 }
5217 }
5218 if (visit_load || visit_addr)
5219 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5220 {
5221 tree rhs = gimple_call_arg (stmt, i);
5222 if (visit_addr
5223 && TREE_CODE (rhs) == ADDR_EXPR)
5224 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5225 else if (visit_load)
5226 {
5227 rhs = get_base_loadstore (rhs);
5228 if (rhs)
5229 ret |= visit_load (stmt, rhs, data);
5230 }
5231 }
5232 if (visit_addr
5233 && gimple_call_chain (stmt)
5234 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5235 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5236 data);
1d24fdd9
RG
5237 if (visit_addr
5238 && gimple_call_return_slot_opt_p (stmt)
5239 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 5240 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 5241 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
5242 }
5243 else if (gimple_code (stmt) == GIMPLE_ASM)
5244 {
5245 unsigned noutputs;
5246 const char *constraint;
5247 const char **oconstraints;
5248 bool allows_mem, allows_reg, is_inout;
5249 noutputs = gimple_asm_noutputs (stmt);
5250 oconstraints = XALLOCAVEC (const char *, noutputs);
5251 if (visit_store || visit_addr)
5252 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5253 {
5254 tree link = gimple_asm_output_op (stmt, i);
5255 tree op = get_base_loadstore (TREE_VALUE (link));
5256 if (op && visit_store)
5257 ret |= visit_store (stmt, op, data);
5258 if (visit_addr)
5259 {
5260 constraint = TREE_STRING_POINTER
5261 (TREE_VALUE (TREE_PURPOSE (link)));
5262 oconstraints[i] = constraint;
5263 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5264 &allows_reg, &is_inout);
5265 if (op && !allows_reg && allows_mem)
5266 ret |= visit_addr (stmt, op, data);
5267 }
5268 }
5269 if (visit_load || visit_addr)
5270 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5271 {
5272 tree link = gimple_asm_input_op (stmt, i);
5273 tree op = TREE_VALUE (link);
5274 if (visit_addr
5275 && TREE_CODE (op) == ADDR_EXPR)
5276 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5277 else if (visit_load || visit_addr)
5278 {
5279 op = get_base_loadstore (op);
5280 if (op)
5281 {
5282 if (visit_load)
5283 ret |= visit_load (stmt, op, data);
5284 if (visit_addr)
5285 {
5286 constraint = TREE_STRING_POINTER
5287 (TREE_VALUE (TREE_PURPOSE (link)));
5288 parse_input_constraint (&constraint, 0, 0, noutputs,
5289 0, oconstraints,
5290 &allows_mem, &allows_reg);
5291 if (!allows_reg && allows_mem)
5292 ret |= visit_addr (stmt, op, data);
5293 }
5294 }
5295 }
5296 }
5297 }
5298 else if (gimple_code (stmt) == GIMPLE_RETURN)
5299 {
5300 tree op = gimple_return_retval (stmt);
5301 if (op)
5302 {
5303 if (visit_addr
5304 && TREE_CODE (op) == ADDR_EXPR)
5305 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5306 else if (visit_load)
5307 {
5308 op = get_base_loadstore (op);
5309 if (op)
5310 ret |= visit_load (stmt, op, data);
5311 }
5312 }
5313 }
5314 else if (visit_addr
5315 && gimple_code (stmt) == GIMPLE_PHI)
5316 {
5317 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5318 {
5319 tree op = PHI_ARG_DEF (stmt, i);
5320 if (TREE_CODE (op) == ADDR_EXPR)
5321 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5322 }
5323 }
5324
5325 return ret;
5326}
5327
5328/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5329 should make a faster clone for this case. */
5330
5331bool
5332walk_stmt_load_store_ops (gimple stmt, void *data,
5333 bool (*visit_load)(gimple, tree, void *),
5334 bool (*visit_store)(gimple, tree, void *))
5335{
5336 return walk_stmt_load_store_addr_ops (stmt, data,
5337 visit_load, visit_store, NULL);
5338}
5339
ccacdf06
RG
5340/* Helper for gimple_ior_addresses_taken_1. */
5341
5342static bool
5343gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5344 tree addr, void *data)
5345{
5346 bitmap addresses_taken = (bitmap)data;
2ea9dc64
RG
5347 addr = get_base_address (addr);
5348 if (addr
5349 && DECL_P (addr))
ccacdf06
RG
5350 {
5351 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5352 return true;
5353 }
5354 return false;
5355}
5356
5357/* Set the bit for the uid of all decls that have their address taken
5358 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5359 were any in this stmt. */
5360
5361bool
5362gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5363{
5364 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5365 gimple_ior_addresses_taken_1);
5366}
5367
4537ec0c
DN
5368
5369/* Return a printable name for symbol DECL. */
5370
5371const char *
5372gimple_decl_printable_name (tree decl, int verbosity)
5373{
98b2dfbb
RG
5374 if (!DECL_NAME (decl))
5375 return NULL;
4537ec0c
DN
5376
5377 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5378 {
5379 const char *str, *mangled_str;
5380 int dmgl_opts = DMGL_NO_OPTS;
5381
5382 if (verbosity >= 2)
5383 {
5384 dmgl_opts = DMGL_VERBOSE
4537ec0c
DN
5385 | DMGL_ANSI
5386 | DMGL_GNU_V3
5387 | DMGL_RET_POSTFIX;
5388 if (TREE_CODE (decl) == FUNCTION_DECL)
5389 dmgl_opts |= DMGL_PARAMS;
5390 }
5391
5392 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5393 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5394 return (str) ? str : mangled_str;
5395 }
5396
5397 return IDENTIFIER_POINTER (DECL_NAME (decl));
5398}
5399
c54c785d
JH
5400/* Return true when STMT is builtins call to CODE. */
5401
5402bool
5403gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5404{
5405 tree fndecl;
5406 return (is_gimple_call (stmt)
5407 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5408 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5409 && DECL_FUNCTION_CODE (fndecl) == code);
5410}
5411
edcdea5b
NF
5412/* Return true if STMT clobbers memory. STMT is required to be a
5413 GIMPLE_ASM. */
5414
5415bool
5416gimple_asm_clobbers_memory_p (const_gimple stmt)
5417{
5418 unsigned i;
5419
5420 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5421 {
5422 tree op = gimple_asm_clobber_op (stmt, i);
5423 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5424 return true;
5425 }
5426
5427 return false;
5428}
726a989a 5429#include "gt-gimple.h"
This page took 2.098912 seconds and 5 git commands to generate.