]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple.c
pr42629.c: Add dg-require-effective-target int32plus
[gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
6a4d4e8a 3 Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
d7f09764 26#include "target.h"
726a989a
RB
27#include "tree.h"
28#include "ggc.h"
726a989a
RB
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "gimple.h"
32#include "diagnostic.h"
33#include "tree-flow.h"
34#include "value-prof.h"
35#include "flags.h"
d7f09764 36#include "alias.h"
4537ec0c 37#include "demangle.h"
0f443ad0 38#include "langhooks.h"
726a989a 39
d7f09764
DN
40/* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
0f443ad0
RG
44static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
4490cae6
RG
46static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
0f443ad0
RG
48static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
a844a60b
RG
50static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
d7f09764 52
0f443ad0
RG
53/* Global type comparison cache. This is by TYPE_UID for space efficiency
54 and thus cannot use and does not need GC. */
d7f09764 55static htab_t gtc_visited;
88ca1146 56static struct obstack gtc_ob;
726a989a 57
f2c4a81c 58/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
59 of the structure. Therefore, the offset required to find the
60 operands vector the size of the structure minus the size of the 1
61 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
62#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
63 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 64EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
65#include "gsstruct.def"
66};
67#undef DEFGSSTRUCT
68
69#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
70static const size_t gsstruct_code_size[] = {
71#include "gsstruct.def"
72};
73#undef DEFGSSTRUCT
74
75#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
76const char *const gimple_code_name[] = {
77#include "gimple.def"
78};
79#undef DEFGSCODE
80
81#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
82EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
83#include "gimple.def"
84};
85#undef DEFGSCODE
86
87#ifdef GATHER_STATISTICS
88/* Gimple stats. */
89
90int gimple_alloc_counts[(int) gimple_alloc_kind_all];
91int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
92
93/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
94static const char * const gimple_alloc_kind_names[] = {
95 "assignments",
96 "phi nodes",
97 "conditionals",
98 "sequences",
99 "everything else"
100};
101
102#endif /* GATHER_STATISTICS */
103
104/* A cache of gimple_seq objects. Sequences are created and destroyed
105 fairly often during gimplification. */
106static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
107
108/* Private API manipulation functions shared only with some
109 other files. */
110extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
111extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
112
113/* Gimple tuple constructors.
114 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
115 be passed a NULL to start with an empty sequence. */
116
117/* Set the code for statement G to CODE. */
118
119static inline void
120gimple_set_code (gimple g, enum gimple_code code)
121{
122 g->gsbase.code = code;
123}
124
726a989a
RB
125/* Return the number of bytes needed to hold a GIMPLE statement with
126 code CODE. */
127
f2c4a81c 128static inline size_t
726a989a
RB
129gimple_size (enum gimple_code code)
130{
f2c4a81c 131 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
132}
133
726a989a
RB
134/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
135 operands. */
136
d7f09764 137gimple
726a989a
RB
138gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
139{
140 size_t size;
141 gimple stmt;
142
143 size = gimple_size (code);
144 if (num_ops > 0)
145 size += sizeof (tree) * (num_ops - 1);
146
147#ifdef GATHER_STATISTICS
148 {
149 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
150 gimple_alloc_counts[(int) kind]++;
151 gimple_alloc_sizes[(int) kind] += size;
152 }
153#endif
154
a9429e29 155 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
726a989a
RB
156 gimple_set_code (stmt, code);
157 gimple_set_num_ops (stmt, num_ops);
158
159 /* Do not call gimple_set_modified here as it has other side
160 effects and this tuple is still not completely built. */
161 stmt->gsbase.modified = 1;
162
163 return stmt;
164}
165
166/* Set SUBCODE to be the code of the expression computed by statement G. */
167
168static inline void
169gimple_set_subcode (gimple g, unsigned subcode)
170{
171 /* We only have 16 bits for the RHS code. Assert that we are not
172 overflowing it. */
173 gcc_assert (subcode < (1 << 16));
174 g->gsbase.subcode = subcode;
175}
176
177
178
179/* Build a tuple with operands. CODE is the statement to build (which
180 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
b8698a0f 181 for the new tuple. NUM_OPS is the number of operands to allocate. */
726a989a
RB
182
183#define gimple_build_with_ops(c, s, n) \
184 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
185
186static gimple
b5b8b0ac 187gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
188 unsigned num_ops MEM_STAT_DECL)
189{
190 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
191 gimple_set_subcode (s, subcode);
192
193 return s;
194}
195
196
197/* Build a GIMPLE_RETURN statement returning RETVAL. */
198
199gimple
200gimple_build_return (tree retval)
201{
bbbbb16a 202 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
203 if (retval)
204 gimple_return_set_retval (s, retval);
205 return s;
206}
207
d086d311
RG
208/* Reset alias information on call S. */
209
210void
211gimple_call_reset_alias_info (gimple s)
212{
213 if (gimple_call_flags (s) & ECF_CONST)
214 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
215 else
216 pt_solution_reset (gimple_call_use_set (s));
217 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
218 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
219 else
220 pt_solution_reset (gimple_call_clobber_set (s));
221}
222
726a989a
RB
223/* Helper for gimple_build_call, gimple_build_call_vec and
224 gimple_build_call_from_tree. Build the basic components of a
225 GIMPLE_CALL statement to function FN with NARGS arguments. */
226
227static inline gimple
228gimple_build_call_1 (tree fn, unsigned nargs)
229{
bbbbb16a 230 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
231 if (TREE_CODE (fn) == FUNCTION_DECL)
232 fn = build_fold_addr_expr (fn);
726a989a 233 gimple_set_op (s, 1, fn);
f20ca725 234 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
d086d311 235 gimple_call_reset_alias_info (s);
726a989a
RB
236 return s;
237}
238
239
240/* Build a GIMPLE_CALL statement to function FN with the arguments
241 specified in vector ARGS. */
242
243gimple
244gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
245{
246 unsigned i;
247 unsigned nargs = VEC_length (tree, args);
248 gimple call = gimple_build_call_1 (fn, nargs);
249
250 for (i = 0; i < nargs; i++)
251 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
252
253 return call;
254}
255
256
257/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
258 arguments. The ... are the arguments. */
259
260gimple
261gimple_build_call (tree fn, unsigned nargs, ...)
262{
263 va_list ap;
264 gimple call;
265 unsigned i;
266
267 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
268
269 call = gimple_build_call_1 (fn, nargs);
270
271 va_start (ap, nargs);
272 for (i = 0; i < nargs; i++)
273 gimple_call_set_arg (call, i, va_arg (ap, tree));
274 va_end (ap);
275
276 return call;
277}
278
279
280/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
281 assumed to be in GIMPLE form already. Minimal checking is done of
282 this fact. */
283
284gimple
285gimple_build_call_from_tree (tree t)
286{
287 unsigned i, nargs;
288 gimple call;
289 tree fndecl = get_callee_fndecl (t);
290
291 gcc_assert (TREE_CODE (t) == CALL_EXPR);
292
293 nargs = call_expr_nargs (t);
294 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
295
296 for (i = 0; i < nargs; i++)
297 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
298
299 gimple_set_block (call, TREE_BLOCK (t));
300
301 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
302 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
303 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
304 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
305 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
63d2a353
MM
306 if (fndecl
307 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
308 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
309 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
310 else
311 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
726a989a 312 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
9bb1a81b 313 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
d665b6e5 314 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
315
316 return call;
317}
318
319
320/* Extract the operands and code for expression EXPR into *SUBCODE_P,
0354c0c7 321 *OP1_P, *OP2_P and *OP3_P respectively. */
726a989a
RB
322
323void
0354c0c7
BS
324extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
325 tree *op2_p, tree *op3_p)
726a989a 326{
82d6e6fc 327 enum gimple_rhs_class grhs_class;
726a989a
RB
328
329 *subcode_p = TREE_CODE (expr);
82d6e6fc 330 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 331
0354c0c7 332 if (grhs_class == GIMPLE_TERNARY_RHS)
726a989a
RB
333 {
334 *op1_p = TREE_OPERAND (expr, 0);
335 *op2_p = TREE_OPERAND (expr, 1);
0354c0c7
BS
336 *op3_p = TREE_OPERAND (expr, 2);
337 }
338 else if (grhs_class == GIMPLE_BINARY_RHS)
339 {
340 *op1_p = TREE_OPERAND (expr, 0);
341 *op2_p = TREE_OPERAND (expr, 1);
342 *op3_p = NULL_TREE;
726a989a 343 }
82d6e6fc 344 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
345 {
346 *op1_p = TREE_OPERAND (expr, 0);
347 *op2_p = NULL_TREE;
0354c0c7 348 *op3_p = NULL_TREE;
726a989a 349 }
82d6e6fc 350 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
351 {
352 *op1_p = expr;
353 *op2_p = NULL_TREE;
0354c0c7 354 *op3_p = NULL_TREE;
726a989a
RB
355 }
356 else
357 gcc_unreachable ();
358}
359
360
361/* Build a GIMPLE_ASSIGN statement.
362
363 LHS of the assignment.
364 RHS of the assignment which can be unary or binary. */
365
366gimple
367gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
368{
369 enum tree_code subcode;
0354c0c7 370 tree op1, op2, op3;
726a989a 371
0354c0c7
BS
372 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
373 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
726a989a
RB
374 PASS_MEM_STAT);
375}
376
377
378/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
379 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
380 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
381
382gimple
383gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
0354c0c7 384 tree op2, tree op3 MEM_STAT_DECL)
726a989a
RB
385{
386 unsigned num_ops;
387 gimple p;
388
389 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
390 code). */
391 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
b8698a0f 392
b5b8b0ac 393 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
394 PASS_MEM_STAT);
395 gimple_assign_set_lhs (p, lhs);
396 gimple_assign_set_rhs1 (p, op1);
397 if (op2)
398 {
399 gcc_assert (num_ops > 2);
400 gimple_assign_set_rhs2 (p, op2);
401 }
402
0354c0c7
BS
403 if (op3)
404 {
405 gcc_assert (num_ops > 3);
406 gimple_assign_set_rhs3 (p, op3);
407 }
408
726a989a
RB
409 return p;
410}
411
412
413/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
414
415 DST/SRC are the destination and source respectively. You can pass
416 ungimplified trees in DST or SRC, in which case they will be
417 converted to a gimple operand if necessary.
418
419 This function returns the newly created GIMPLE_ASSIGN tuple. */
420
5fd8300b 421gimple
726a989a 422gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
b8698a0f 423{
726a989a
RB
424 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
425 gimplify_and_add (t, seq_p);
426 ggc_free (t);
427 return gimple_seq_last_stmt (*seq_p);
428}
429
430
431/* Build a GIMPLE_COND statement.
432
433 PRED is the condition used to compare LHS and the RHS.
434 T_LABEL is the label to jump to if the condition is true.
435 F_LABEL is the label to jump to otherwise. */
436
437gimple
438gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
439 tree t_label, tree f_label)
440{
441 gimple p;
442
443 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
444 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
445 gimple_cond_set_lhs (p, lhs);
446 gimple_cond_set_rhs (p, rhs);
447 gimple_cond_set_true_label (p, t_label);
448 gimple_cond_set_false_label (p, f_label);
449 return p;
450}
451
452
453/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
454
455void
456gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
457 tree *lhs_p, tree *rhs_p)
458{
459 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
460 || TREE_CODE (cond) == TRUTH_NOT_EXPR
461 || is_gimple_min_invariant (cond)
462 || SSA_VAR_P (cond));
463
464 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
465
466 /* Canonicalize conditionals of the form 'if (!VAL)'. */
467 if (*code_p == TRUTH_NOT_EXPR)
468 {
469 *code_p = EQ_EXPR;
470 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 471 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
472 }
473 /* Canonicalize conditionals of the form 'if (VAL)' */
474 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
475 {
476 *code_p = NE_EXPR;
477 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 478 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
479 }
480}
481
482
483/* Build a GIMPLE_COND statement from the conditional expression tree
484 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
485
486gimple
487gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
488{
489 enum tree_code code;
490 tree lhs, rhs;
491
492 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
493 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
494}
495
496/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
497 boolean expression tree COND. */
498
499void
500gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
501{
502 enum tree_code code;
503 tree lhs, rhs;
504
505 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
506 gimple_cond_set_condition (stmt, code, lhs, rhs);
507}
508
509/* Build a GIMPLE_LABEL statement for LABEL. */
510
511gimple
512gimple_build_label (tree label)
513{
bbbbb16a 514 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
515 gimple_label_set_label (p, label);
516 return p;
517}
518
519/* Build a GIMPLE_GOTO statement to label DEST. */
520
521gimple
522gimple_build_goto (tree dest)
523{
bbbbb16a 524 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
525 gimple_goto_set_dest (p, dest);
526 return p;
527}
528
529
530/* Build a GIMPLE_NOP statement. */
531
b8698a0f 532gimple
726a989a
RB
533gimple_build_nop (void)
534{
535 return gimple_alloc (GIMPLE_NOP, 0);
536}
537
538
539/* Build a GIMPLE_BIND statement.
540 VARS are the variables in BODY.
541 BLOCK is the containing block. */
542
543gimple
544gimple_build_bind (tree vars, gimple_seq body, tree block)
545{
546 gimple p = gimple_alloc (GIMPLE_BIND, 0);
547 gimple_bind_set_vars (p, vars);
548 if (body)
549 gimple_bind_set_body (p, body);
550 if (block)
551 gimple_bind_set_block (p, block);
552 return p;
553}
554
555/* Helper function to set the simple fields of a asm stmt.
556
557 STRING is a pointer to a string that is the asm blocks assembly code.
558 NINPUT is the number of register inputs.
559 NOUTPUT is the number of register outputs.
560 NCLOBBERS is the number of clobbered registers.
561 */
562
563static inline gimple
b8698a0f 564gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
1c384bf1 565 unsigned nclobbers, unsigned nlabels)
726a989a
RB
566{
567 gimple p;
568 int size = strlen (string);
569
1c384bf1
RH
570 /* ASMs with labels cannot have outputs. This should have been
571 enforced by the front end. */
572 gcc_assert (nlabels == 0 || noutputs == 0);
573
bbbbb16a 574 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
1c384bf1 575 ninputs + noutputs + nclobbers + nlabels);
726a989a
RB
576
577 p->gimple_asm.ni = ninputs;
578 p->gimple_asm.no = noutputs;
579 p->gimple_asm.nc = nclobbers;
1c384bf1 580 p->gimple_asm.nl = nlabels;
726a989a
RB
581 p->gimple_asm.string = ggc_alloc_string (string, size);
582
583#ifdef GATHER_STATISTICS
584 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
585#endif
b8698a0f 586
726a989a
RB
587 return p;
588}
589
590/* Build a GIMPLE_ASM statement.
591
592 STRING is the assembly code.
593 NINPUT is the number of register inputs.
594 NOUTPUT is the number of register outputs.
595 NCLOBBERS is the number of clobbered registers.
596 INPUTS is a vector of the input register parameters.
597 OUTPUTS is a vector of the output register parameters.
1c384bf1
RH
598 CLOBBERS is a vector of the clobbered register parameters.
599 LABELS is a vector of destination labels. */
726a989a
RB
600
601gimple
b8698a0f 602gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
1c384bf1
RH
603 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
604 VEC(tree,gc)* labels)
726a989a
RB
605{
606 gimple p;
607 unsigned i;
608
609 p = gimple_build_asm_1 (string,
610 VEC_length (tree, inputs),
b8698a0f 611 VEC_length (tree, outputs),
1c384bf1
RH
612 VEC_length (tree, clobbers),
613 VEC_length (tree, labels));
b8698a0f 614
726a989a
RB
615 for (i = 0; i < VEC_length (tree, inputs); i++)
616 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
617
618 for (i = 0; i < VEC_length (tree, outputs); i++)
619 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
620
621 for (i = 0; i < VEC_length (tree, clobbers); i++)
622 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
b8698a0f 623
1c384bf1
RH
624 for (i = 0; i < VEC_length (tree, labels); i++)
625 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
b8698a0f 626
726a989a
RB
627 return p;
628}
629
630/* Build a GIMPLE_CATCH statement.
631
632 TYPES are the catch types.
633 HANDLER is the exception handler. */
634
635gimple
636gimple_build_catch (tree types, gimple_seq handler)
637{
638 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
639 gimple_catch_set_types (p, types);
640 if (handler)
641 gimple_catch_set_handler (p, handler);
642
643 return p;
644}
645
646/* Build a GIMPLE_EH_FILTER statement.
647
648 TYPES are the filter's types.
649 FAILURE is the filter's failure action. */
650
651gimple
652gimple_build_eh_filter (tree types, gimple_seq failure)
653{
654 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
655 gimple_eh_filter_set_types (p, types);
656 if (failure)
657 gimple_eh_filter_set_failure (p, failure);
658
659 return p;
660}
661
1d65f45c
RH
662/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
663
664gimple
665gimple_build_eh_must_not_throw (tree decl)
666{
786f715d 667 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
1d65f45c
RH
668
669 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
670 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
d7f09764 671 gimple_eh_must_not_throw_set_fndecl (p, decl);
1d65f45c
RH
672
673 return p;
674}
675
726a989a
RB
676/* Build a GIMPLE_TRY statement.
677
678 EVAL is the expression to evaluate.
679 CLEANUP is the cleanup expression.
680 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
681 whether this is a try/catch or a try/finally respectively. */
682
683gimple
684gimple_build_try (gimple_seq eval, gimple_seq cleanup,
685 enum gimple_try_flags kind)
686{
687 gimple p;
688
689 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
690 p = gimple_alloc (GIMPLE_TRY, 0);
691 gimple_set_subcode (p, kind);
692 if (eval)
693 gimple_try_set_eval (p, eval);
694 if (cleanup)
695 gimple_try_set_cleanup (p, cleanup);
696
697 return p;
698}
699
700/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
701
702 CLEANUP is the cleanup expression. */
703
704gimple
705gimple_build_wce (gimple_seq cleanup)
706{
707 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
708 if (cleanup)
709 gimple_wce_set_cleanup (p, cleanup);
710
711 return p;
712}
713
714
1d65f45c 715/* Build a GIMPLE_RESX statement. */
726a989a
RB
716
717gimple
718gimple_build_resx (int region)
719{
1d65f45c
RH
720 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
721 p->gimple_eh_ctrl.region = region;
726a989a
RB
722 return p;
723}
724
725
726/* The helper for constructing a gimple switch statement.
727 INDEX is the switch's index.
728 NLABELS is the number of labels in the switch excluding the default.
729 DEFAULT_LABEL is the default label for the switch statement. */
730
b8698a0f 731gimple
1d65f45c 732gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
733{
734 /* nlabels + 1 default label + 1 index. */
bbbbb16a 735 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
1d65f45c 736 1 + (default_label != NULL) + nlabels);
726a989a 737 gimple_switch_set_index (p, index);
1d65f45c
RH
738 if (default_label)
739 gimple_switch_set_default_label (p, default_label);
726a989a
RB
740 return p;
741}
742
743
744/* Build a GIMPLE_SWITCH statement.
745
746 INDEX is the switch's index.
b8698a0f 747 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
726a989a
RB
748 ... are the labels excluding the default. */
749
b8698a0f 750gimple
726a989a
RB
751gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
752{
753 va_list al;
1d65f45c
RH
754 unsigned i, offset;
755 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a
RB
756
757 /* Store the rest of the labels. */
758 va_start (al, default_label);
1d65f45c
RH
759 offset = (default_label != NULL);
760 for (i = 0; i < nlabels; i++)
761 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
726a989a
RB
762 va_end (al);
763
764 return p;
765}
766
767
768/* Build a GIMPLE_SWITCH statement.
769
770 INDEX is the switch's index.
771 DEFAULT_LABEL is the default label
772 ARGS is a vector of labels excluding the default. */
773
774gimple
775gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
776{
1d65f45c
RH
777 unsigned i, offset, nlabels = VEC_length (tree, args);
778 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 779
1d65f45c
RH
780 /* Copy the labels from the vector to the switch statement. */
781 offset = (default_label != NULL);
782 for (i = 0; i < nlabels; i++)
783 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
726a989a
RB
784
785 return p;
786}
787
1d65f45c
RH
788/* Build a GIMPLE_EH_DISPATCH statement. */
789
790gimple
791gimple_build_eh_dispatch (int region)
792{
793 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
794 p->gimple_eh_ctrl.region = region;
795 return p;
796}
726a989a 797
b5b8b0ac
AO
798/* Build a new GIMPLE_DEBUG_BIND statement.
799
800 VAR is bound to VALUE; block and location are taken from STMT. */
801
802gimple
803gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
804{
805 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
806 (unsigned)GIMPLE_DEBUG_BIND, 2
807 PASS_MEM_STAT);
808
809 gimple_debug_bind_set_var (p, var);
810 gimple_debug_bind_set_value (p, value);
811 if (stmt)
812 {
813 gimple_set_block (p, gimple_block (stmt));
814 gimple_set_location (p, gimple_location (stmt));
815 }
816
817 return p;
818}
819
820
726a989a
RB
821/* Build a GIMPLE_OMP_CRITICAL statement.
822
823 BODY is the sequence of statements for which only one thread can execute.
824 NAME is optional identifier for this critical block. */
825
b8698a0f 826gimple
726a989a
RB
827gimple_build_omp_critical (gimple_seq body, tree name)
828{
829 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
830 gimple_omp_critical_set_name (p, name);
831 if (body)
832 gimple_omp_set_body (p, body);
833
834 return p;
835}
836
837/* Build a GIMPLE_OMP_FOR statement.
838
839 BODY is sequence of statements inside the for loop.
b8698a0f 840 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
726a989a
RB
841 lastprivate, reductions, ordered, schedule, and nowait.
842 COLLAPSE is the collapse count.
843 PRE_BODY is the sequence of statements that are loop invariant. */
844
845gimple
846gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
847 gimple_seq pre_body)
848{
849 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
850 if (body)
851 gimple_omp_set_body (p, body);
852 gimple_omp_for_set_clauses (p, clauses);
853 p->gimple_omp_for.collapse = collapse;
a9429e29
LB
854 p->gimple_omp_for.iter
855 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
726a989a
RB
856 if (pre_body)
857 gimple_omp_for_set_pre_body (p, pre_body);
858
859 return p;
860}
861
862
863/* Build a GIMPLE_OMP_PARALLEL statement.
864
865 BODY is sequence of statements which are executed in parallel.
866 CLAUSES, are the OMP parallel construct's clauses.
867 CHILD_FN is the function created for the parallel threads to execute.
868 DATA_ARG are the shared data argument(s). */
869
b8698a0f
L
870gimple
871gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
726a989a
RB
872 tree data_arg)
873{
874 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
875 if (body)
876 gimple_omp_set_body (p, body);
877 gimple_omp_parallel_set_clauses (p, clauses);
878 gimple_omp_parallel_set_child_fn (p, child_fn);
879 gimple_omp_parallel_set_data_arg (p, data_arg);
880
881 return p;
882}
883
884
885/* Build a GIMPLE_OMP_TASK statement.
886
887 BODY is sequence of statements which are executed by the explicit task.
888 CLAUSES, are the OMP parallel construct's clauses.
889 CHILD_FN is the function created for the parallel threads to execute.
890 DATA_ARG are the shared data argument(s).
891 COPY_FN is the optional function for firstprivate initialization.
892 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
893
b8698a0f 894gimple
726a989a
RB
895gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
896 tree data_arg, tree copy_fn, tree arg_size,
897 tree arg_align)
898{
899 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
900 if (body)
901 gimple_omp_set_body (p, body);
902 gimple_omp_task_set_clauses (p, clauses);
903 gimple_omp_task_set_child_fn (p, child_fn);
904 gimple_omp_task_set_data_arg (p, data_arg);
905 gimple_omp_task_set_copy_fn (p, copy_fn);
906 gimple_omp_task_set_arg_size (p, arg_size);
907 gimple_omp_task_set_arg_align (p, arg_align);
908
909 return p;
910}
911
912
913/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
914
915 BODY is the sequence of statements in the section. */
916
917gimple
918gimple_build_omp_section (gimple_seq body)
919{
920 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
921 if (body)
922 gimple_omp_set_body (p, body);
923
924 return p;
925}
926
927
928/* Build a GIMPLE_OMP_MASTER statement.
929
930 BODY is the sequence of statements to be executed by just the master. */
931
b8698a0f 932gimple
726a989a
RB
933gimple_build_omp_master (gimple_seq body)
934{
935 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
936 if (body)
937 gimple_omp_set_body (p, body);
938
939 return p;
940}
941
942
943/* Build a GIMPLE_OMP_CONTINUE statement.
944
945 CONTROL_DEF is the definition of the control variable.
946 CONTROL_USE is the use of the control variable. */
947
b8698a0f 948gimple
726a989a
RB
949gimple_build_omp_continue (tree control_def, tree control_use)
950{
951 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
952 gimple_omp_continue_set_control_def (p, control_def);
953 gimple_omp_continue_set_control_use (p, control_use);
954 return p;
955}
956
957/* Build a GIMPLE_OMP_ORDERED statement.
958
959 BODY is the sequence of statements inside a loop that will executed in
960 sequence. */
961
b8698a0f 962gimple
726a989a
RB
963gimple_build_omp_ordered (gimple_seq body)
964{
965 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
966 if (body)
967 gimple_omp_set_body (p, body);
968
969 return p;
970}
971
972
973/* Build a GIMPLE_OMP_RETURN statement.
974 WAIT_P is true if this is a non-waiting return. */
975
b8698a0f 976gimple
726a989a
RB
977gimple_build_omp_return (bool wait_p)
978{
979 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
980 if (wait_p)
981 gimple_omp_return_set_nowait (p);
982
983 return p;
984}
985
986
987/* Build a GIMPLE_OMP_SECTIONS statement.
988
989 BODY is a sequence of section statements.
990 CLAUSES are any of the OMP sections contsruct's clauses: private,
991 firstprivate, lastprivate, reduction, and nowait. */
992
b8698a0f 993gimple
726a989a
RB
994gimple_build_omp_sections (gimple_seq body, tree clauses)
995{
996 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
997 if (body)
998 gimple_omp_set_body (p, body);
999 gimple_omp_sections_set_clauses (p, clauses);
1000
1001 return p;
1002}
1003
1004
1005/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1006
1007gimple
1008gimple_build_omp_sections_switch (void)
1009{
1010 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1011}
1012
1013
1014/* Build a GIMPLE_OMP_SINGLE statement.
1015
1016 BODY is the sequence of statements that will be executed once.
1017 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1018 copyprivate, nowait. */
1019
b8698a0f 1020gimple
726a989a
RB
1021gimple_build_omp_single (gimple_seq body, tree clauses)
1022{
1023 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1024 if (body)
1025 gimple_omp_set_body (p, body);
1026 gimple_omp_single_set_clauses (p, clauses);
1027
1028 return p;
1029}
1030
1031
726a989a
RB
1032/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1033
1034gimple
1035gimple_build_omp_atomic_load (tree lhs, tree rhs)
1036{
1037 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1038 gimple_omp_atomic_load_set_lhs (p, lhs);
1039 gimple_omp_atomic_load_set_rhs (p, rhs);
1040 return p;
1041}
1042
1043/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1044
1045 VAL is the value we are storing. */
1046
1047gimple
1048gimple_build_omp_atomic_store (tree val)
1049{
1050 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1051 gimple_omp_atomic_store_set_val (p, val);
1052 return p;
1053}
1054
1055/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1056 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1057
1058gimple
1059gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1060{
1061 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1062 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1063 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1064 gimple_predict_set_predictor (p, predictor);
1065 gimple_predict_set_outcome (p, outcome);
1066 return p;
1067}
1068
cea094ed 1069#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1070/* Complain of a gimple type mismatch and die. */
1071
1072void
1073gimple_check_failed (const_gimple gs, const char *file, int line,
1074 const char *function, enum gimple_code code,
1075 enum tree_code subcode)
1076{
1077 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1078 gimple_code_name[code],
1079 tree_code_name[subcode],
1080 gimple_code_name[gimple_code (gs)],
1081 gs->gsbase.subcode > 0
1082 ? tree_code_name[gs->gsbase.subcode]
1083 : "",
1084 function, trim_filename (file), line);
1085}
726a989a
RB
1086#endif /* ENABLE_GIMPLE_CHECKING */
1087
1088
1089/* Allocate a new GIMPLE sequence in GC memory and return it. If
1090 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1091 instead. */
1092
1093gimple_seq
1094gimple_seq_alloc (void)
1095{
1096 gimple_seq seq = gimple_seq_cache;
1097 if (seq)
1098 {
1099 gimple_seq_cache = gimple_seq_cache->next_free;
1100 gcc_assert (gimple_seq_cache != seq);
1101 memset (seq, 0, sizeof (*seq));
1102 }
1103 else
1104 {
a9429e29 1105 seq = ggc_alloc_cleared_gimple_seq_d ();
726a989a
RB
1106#ifdef GATHER_STATISTICS
1107 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1108 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1109#endif
1110 }
1111
1112 return seq;
1113}
1114
1115/* Return SEQ to the free pool of GIMPLE sequences. */
1116
1117void
1118gimple_seq_free (gimple_seq seq)
1119{
1120 if (seq == NULL)
1121 return;
1122
1123 gcc_assert (gimple_seq_first (seq) == NULL);
1124 gcc_assert (gimple_seq_last (seq) == NULL);
1125
1126 /* If this triggers, it's a sign that the same list is being freed
1127 twice. */
1128 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
b8698a0f 1129
726a989a
RB
1130 /* Add SEQ to the pool of free sequences. */
1131 seq->next_free = gimple_seq_cache;
1132 gimple_seq_cache = seq;
1133}
1134
1135
1136/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1137 *SEQ_P is NULL, a new sequence is allocated. */
1138
1139void
1140gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1141{
1142 gimple_stmt_iterator si;
1143
1144 if (gs == NULL)
1145 return;
1146
1147 if (*seq_p == NULL)
1148 *seq_p = gimple_seq_alloc ();
1149
1150 si = gsi_last (*seq_p);
1151 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1152}
1153
1154
1155/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1156 NULL, a new sequence is allocated. */
1157
1158void
1159gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1160{
1161 gimple_stmt_iterator si;
1162
1163 if (src == NULL)
1164 return;
1165
1166 if (*dst_p == NULL)
1167 *dst_p = gimple_seq_alloc ();
1168
1169 si = gsi_last (*dst_p);
1170 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1171}
1172
1173
1174/* Helper function of empty_body_p. Return true if STMT is an empty
1175 statement. */
1176
1177static bool
1178empty_stmt_p (gimple stmt)
1179{
1180 if (gimple_code (stmt) == GIMPLE_NOP)
1181 return true;
1182 if (gimple_code (stmt) == GIMPLE_BIND)
1183 return empty_body_p (gimple_bind_body (stmt));
1184 return false;
1185}
1186
1187
1188/* Return true if BODY contains nothing but empty statements. */
1189
1190bool
1191empty_body_p (gimple_seq body)
1192{
1193 gimple_stmt_iterator i;
1194
726a989a
RB
1195 if (gimple_seq_empty_p (body))
1196 return true;
1197 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1198 if (!empty_stmt_p (gsi_stmt (i))
1199 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1200 return false;
1201
1202 return true;
1203}
1204
1205
1206/* Perform a deep copy of sequence SRC and return the result. */
1207
1208gimple_seq
1209gimple_seq_copy (gimple_seq src)
1210{
1211 gimple_stmt_iterator gsi;
82d6e6fc 1212 gimple_seq new_seq = gimple_seq_alloc ();
726a989a
RB
1213 gimple stmt;
1214
1215 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1216 {
1217 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1218 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1219 }
1220
82d6e6fc 1221 return new_seq;
726a989a
RB
1222}
1223
1224
1225/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1226 on each one. WI is as in walk_gimple_stmt.
b8698a0f 1227
726a989a
RB
1228 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1229 value is stored in WI->CALLBACK_RESULT and the statement that
1230 produced the value is returned.
1231
1232 Otherwise, all the statements are walked and NULL returned. */
1233
1234gimple
1235walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1236 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1237{
1238 gimple_stmt_iterator gsi;
1239
1240 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1241 {
1242 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1243 if (ret)
1244 {
1245 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1246 to hold it. */
1247 gcc_assert (wi);
1248 wi->callback_result = ret;
1249 return gsi_stmt (gsi);
1250 }
1251 }
1252
1253 if (wi)
1254 wi->callback_result = NULL_TREE;
1255
1256 return NULL;
1257}
1258
1259
1260/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1261
1262static tree
1263walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1264 struct walk_stmt_info *wi)
1265{
1c384bf1 1266 tree ret, op;
726a989a
RB
1267 unsigned noutputs;
1268 const char **oconstraints;
1c384bf1 1269 unsigned i, n;
726a989a
RB
1270 const char *constraint;
1271 bool allows_mem, allows_reg, is_inout;
1272
1273 noutputs = gimple_asm_noutputs (stmt);
1274 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1275
1276 if (wi)
1277 wi->is_lhs = true;
1278
1279 for (i = 0; i < noutputs; i++)
1280 {
1c384bf1 1281 op = gimple_asm_output_op (stmt, i);
726a989a
RB
1282 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1283 oconstraints[i] = constraint;
1284 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1285 &is_inout);
1286 if (wi)
1287 wi->val_only = (allows_reg || !allows_mem);
1288 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1289 if (ret)
1290 return ret;
1291 }
1292
1c384bf1
RH
1293 n = gimple_asm_ninputs (stmt);
1294 for (i = 0; i < n; i++)
726a989a 1295 {
1c384bf1 1296 op = gimple_asm_input_op (stmt, i);
726a989a
RB
1297 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1298 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1299 oconstraints, &allows_mem, &allows_reg);
1300 if (wi)
1c384bf1
RH
1301 {
1302 wi->val_only = (allows_reg || !allows_mem);
1303 /* Although input "m" is not really a LHS, we need a lvalue. */
1304 wi->is_lhs = !wi->val_only;
1305 }
726a989a
RB
1306 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1307 if (ret)
1308 return ret;
1309 }
1310
1311 if (wi)
1312 {
1313 wi->is_lhs = false;
1314 wi->val_only = true;
1315 }
1316
1c384bf1
RH
1317 n = gimple_asm_nlabels (stmt);
1318 for (i = 0; i < n; i++)
1319 {
1320 op = gimple_asm_label_op (stmt, i);
1321 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1322 if (ret)
1323 return ret;
1324 }
1325
726a989a
RB
1326 return NULL_TREE;
1327}
1328
1329
1330/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1331 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1332
1333 CALLBACK_OP is called on each operand of STMT via walk_tree.
1334 Additional parameters to walk_tree must be stored in WI. For each operand
1335 OP, walk_tree is called as:
1336
1337 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1338
1339 If CALLBACK_OP returns non-NULL for an operand, the remaining
1340 operands are not scanned.
1341
1342 The return value is that returned by the last call to walk_tree, or
1343 NULL_TREE if no CALLBACK_OP is specified. */
1344
6a4d4e8a 1345tree
726a989a
RB
1346walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1347 struct walk_stmt_info *wi)
1348{
1349 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1350 unsigned i;
1351 tree ret = NULL_TREE;
1352
1353 switch (gimple_code (stmt))
1354 {
1355 case GIMPLE_ASSIGN:
cb3d597d
EB
1356 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1357 is a register variable, we may use a COMPONENT_REF on the RHS. */
726a989a 1358 if (wi)
cb3d597d
EB
1359 {
1360 tree lhs = gimple_assign_lhs (stmt);
1361 wi->val_only
1362 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1363 || !gimple_assign_single_p (stmt);
1364 }
726a989a
RB
1365
1366 for (i = 1; i < gimple_num_ops (stmt); i++)
1367 {
1368 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1369 pset);
1370 if (ret)
1371 return ret;
1372 }
1373
1374 /* Walk the LHS. If the RHS is appropriate for a memory, we
1375 may use a COMPONENT_REF on the LHS. */
1376 if (wi)
1377 {
1378 /* If the RHS has more than 1 operand, it is not appropriate
1379 for the memory. */
1380 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1381 || !gimple_assign_single_p (stmt);
1382 wi->is_lhs = true;
1383 }
1384
1385 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1386 if (ret)
1387 return ret;
1388
1389 if (wi)
1390 {
1391 wi->val_only = true;
1392 wi->is_lhs = false;
1393 }
1394 break;
1395
1396 case GIMPLE_CALL:
1397 if (wi)
523968bf
RG
1398 {
1399 wi->is_lhs = false;
1400 wi->val_only = true;
1401 }
726a989a
RB
1402
1403 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1404 if (ret)
1405 return ret;
1406
1407 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1408 if (ret)
1409 return ret;
1410
1411 for (i = 0; i < gimple_call_num_args (stmt); i++)
1412 {
523968bf
RG
1413 if (wi)
1414 wi->val_only = is_gimple_reg_type (gimple_call_arg (stmt, i));
726a989a
RB
1415 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1416 pset);
1417 if (ret)
1418 return ret;
1419 }
1420
523968bf
RG
1421 if (gimple_call_lhs (stmt))
1422 {
1423 if (wi)
1424 {
1425 wi->is_lhs = true;
1426 wi->val_only = is_gimple_reg_type (gimple_call_lhs (stmt));
1427 }
726a989a 1428
523968bf
RG
1429 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1430 if (ret)
1431 return ret;
1432 }
726a989a
RB
1433
1434 if (wi)
523968bf
RG
1435 {
1436 wi->is_lhs = false;
1437 wi->val_only = true;
1438 }
726a989a
RB
1439 break;
1440
1441 case GIMPLE_CATCH:
1442 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1443 pset);
1444 if (ret)
1445 return ret;
1446 break;
1447
1448 case GIMPLE_EH_FILTER:
1449 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1450 pset);
1451 if (ret)
1452 return ret;
1453 break;
1454
726a989a
RB
1455 case GIMPLE_ASM:
1456 ret = walk_gimple_asm (stmt, callback_op, wi);
1457 if (ret)
1458 return ret;
1459 break;
1460
1461 case GIMPLE_OMP_CONTINUE:
1462 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1463 callback_op, wi, pset);
1464 if (ret)
1465 return ret;
1466
1467 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1468 callback_op, wi, pset);
1469 if (ret)
1470 return ret;
1471 break;
1472
1473 case GIMPLE_OMP_CRITICAL:
1474 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1475 pset);
1476 if (ret)
1477 return ret;
1478 break;
1479
1480 case GIMPLE_OMP_FOR:
1481 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1482 pset);
1483 if (ret)
1484 return ret;
1485 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1486 {
1487 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1488 wi, pset);
1489 if (ret)
1490 return ret;
1491 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1492 wi, pset);
1493 if (ret)
1494 return ret;
1495 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1496 wi, pset);
1497 if (ret)
1498 return ret;
1499 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1500 wi, pset);
1501 }
1502 if (ret)
1503 return ret;
1504 break;
1505
1506 case GIMPLE_OMP_PARALLEL:
1507 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1508 wi, pset);
1509 if (ret)
1510 return ret;
1511 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1512 wi, pset);
1513 if (ret)
1514 return ret;
1515 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1516 wi, pset);
1517 if (ret)
1518 return ret;
1519 break;
1520
1521 case GIMPLE_OMP_TASK:
1522 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1523 wi, pset);
1524 if (ret)
1525 return ret;
1526 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1527 wi, pset);
1528 if (ret)
1529 return ret;
1530 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1531 wi, pset);
1532 if (ret)
1533 return ret;
1534 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1535 wi, pset);
1536 if (ret)
1537 return ret;
1538 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1539 wi, pset);
1540 if (ret)
1541 return ret;
1542 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1543 wi, pset);
1544 if (ret)
1545 return ret;
1546 break;
1547
1548 case GIMPLE_OMP_SECTIONS:
1549 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1550 wi, pset);
1551 if (ret)
1552 return ret;
1553
1554 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1555 wi, pset);
1556 if (ret)
1557 return ret;
1558
1559 break;
1560
1561 case GIMPLE_OMP_SINGLE:
1562 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1563 pset);
1564 if (ret)
1565 return ret;
1566 break;
1567
1568 case GIMPLE_OMP_ATOMIC_LOAD:
1569 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1570 pset);
1571 if (ret)
1572 return ret;
1573
1574 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1575 pset);
1576 if (ret)
1577 return ret;
1578 break;
1579
1580 case GIMPLE_OMP_ATOMIC_STORE:
1581 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1582 wi, pset);
1583 if (ret)
1584 return ret;
1585 break;
1586
1587 /* Tuples that do not have operands. */
1588 case GIMPLE_NOP:
1589 case GIMPLE_RESX:
1590 case GIMPLE_OMP_RETURN:
1591 case GIMPLE_PREDICT:
1592 break;
1593
1594 default:
1595 {
1596 enum gimple_statement_structure_enum gss;
1597 gss = gimple_statement_structure (stmt);
1598 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1599 for (i = 0; i < gimple_num_ops (stmt); i++)
1600 {
1601 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1602 if (ret)
1603 return ret;
1604 }
1605 }
1606 break;
1607 }
1608
1609 return NULL_TREE;
1610}
1611
1612
1613/* Walk the current statement in GSI (optionally using traversal state
1614 stored in WI). If WI is NULL, no state is kept during traversal.
1615 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1616 that it has handled all the operands of the statement, its return
1617 value is returned. Otherwise, the return value from CALLBACK_STMT
1618 is discarded and its operands are scanned.
1619
1620 If CALLBACK_STMT is NULL or it didn't handle the operands,
1621 CALLBACK_OP is called on each operand of the statement via
1622 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1623 operand, the remaining operands are not scanned. In this case, the
1624 return value from CALLBACK_OP is returned.
1625
1626 In any other case, NULL_TREE is returned. */
1627
1628tree
1629walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1630 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1631{
1632 gimple ret;
1633 tree tree_ret;
1634 gimple stmt = gsi_stmt (*gsi);
1635
1636 if (wi)
1637 wi->gsi = *gsi;
1638
1639 if (wi && wi->want_locations && gimple_has_location (stmt))
1640 input_location = gimple_location (stmt);
1641
1642 ret = NULL;
1643
1644 /* Invoke the statement callback. Return if the callback handled
1645 all of STMT operands by itself. */
1646 if (callback_stmt)
1647 {
1648 bool handled_ops = false;
1649 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1650 if (handled_ops)
1651 return tree_ret;
1652
1653 /* If CALLBACK_STMT did not handle operands, it should not have
1654 a value to return. */
1655 gcc_assert (tree_ret == NULL);
1656
1657 /* Re-read stmt in case the callback changed it. */
1658 stmt = gsi_stmt (*gsi);
1659 }
1660
1661 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1662 if (callback_op)
1663 {
1664 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1665 if (tree_ret)
1666 return tree_ret;
1667 }
1668
1669 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1670 switch (gimple_code (stmt))
1671 {
1672 case GIMPLE_BIND:
1673 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1674 callback_op, wi);
1675 if (ret)
1676 return wi->callback_result;
1677 break;
1678
1679 case GIMPLE_CATCH:
1680 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1681 callback_op, wi);
1682 if (ret)
1683 return wi->callback_result;
1684 break;
1685
1686 case GIMPLE_EH_FILTER:
1687 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1688 callback_op, wi);
1689 if (ret)
1690 return wi->callback_result;
1691 break;
1692
1693 case GIMPLE_TRY:
1694 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1695 wi);
1696 if (ret)
1697 return wi->callback_result;
1698
1699 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1700 callback_op, wi);
1701 if (ret)
1702 return wi->callback_result;
1703 break;
1704
1705 case GIMPLE_OMP_FOR:
1706 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1707 callback_op, wi);
1708 if (ret)
1709 return wi->callback_result;
1710
1711 /* FALL THROUGH. */
1712 case GIMPLE_OMP_CRITICAL:
1713 case GIMPLE_OMP_MASTER:
1714 case GIMPLE_OMP_ORDERED:
1715 case GIMPLE_OMP_SECTION:
1716 case GIMPLE_OMP_PARALLEL:
1717 case GIMPLE_OMP_TASK:
1718 case GIMPLE_OMP_SECTIONS:
1719 case GIMPLE_OMP_SINGLE:
1720 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1721 wi);
1722 if (ret)
1723 return wi->callback_result;
1724 break;
1725
1726 case GIMPLE_WITH_CLEANUP_EXPR:
1727 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1728 callback_op, wi);
1729 if (ret)
1730 return wi->callback_result;
1731 break;
1732
1733 default:
1734 gcc_assert (!gimple_has_substatements (stmt));
1735 break;
1736 }
1737
1738 return NULL;
1739}
1740
1741
1742/* Set sequence SEQ to be the GIMPLE body for function FN. */
1743
1744void
1745gimple_set_body (tree fndecl, gimple_seq seq)
1746{
1747 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1748 if (fn == NULL)
1749 {
1750 /* If FNDECL still does not have a function structure associated
1751 with it, then it does not make sense for it to receive a
1752 GIMPLE body. */
1753 gcc_assert (seq == NULL);
1754 }
1755 else
1756 fn->gimple_body = seq;
1757}
1758
1759
abbd64b9
JS
1760/* Return the body of GIMPLE statements for function FN. After the
1761 CFG pass, the function body doesn't exist anymore because it has
1762 been split up into basic blocks. In this case, it returns
1763 NULL. */
726a989a
RB
1764
1765gimple_seq
1766gimple_body (tree fndecl)
1767{
1768 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1769 return fn ? fn->gimple_body : NULL;
1770}
1771
39ecc018
JH
1772/* Return true when FNDECL has Gimple body either in unlowered
1773 or CFG form. */
1774bool
1775gimple_has_body_p (tree fndecl)
1776{
1777 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1778 return (gimple_body (fndecl) || (fn && fn->cfg));
1779}
726a989a
RB
1780
1781/* Detect flags from a GIMPLE_CALL. This is just like
1782 call_expr_flags, but for gimple tuples. */
1783
1784int
1785gimple_call_flags (const_gimple stmt)
1786{
1787 int flags;
1788 tree decl = gimple_call_fndecl (stmt);
726a989a
RB
1789
1790 if (decl)
1791 flags = flags_from_decl_or_type (decl);
1792 else
97e03fa1 1793 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
726a989a 1794
9bb1a81b
JM
1795 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1796 flags |= ECF_NOTHROW;
1797
726a989a
RB
1798 return flags;
1799}
1800
0b7b376d
RG
1801/* Detects argument flags for argument number ARG on call STMT. */
1802
1803int
1804gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1805{
9bfc434b 1806 tree type = gimple_call_fntype (stmt);
0b7b376d
RG
1807 tree attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1808 if (!attr)
1809 return 0;
1810
1811 attr = TREE_VALUE (TREE_VALUE (attr));
1812 if (1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1813 return 0;
1814
1815 switch (TREE_STRING_POINTER (attr)[1 + arg])
1816 {
1817 case 'x':
1818 case 'X':
1819 return EAF_UNUSED;
1820
1821 case 'R':
1822 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1823
1824 case 'r':
1825 return EAF_NOCLOBBER | EAF_NOESCAPE;
1826
1827 case 'W':
1828 return EAF_DIRECT | EAF_NOESCAPE;
1829
1830 case 'w':
1831 return EAF_NOESCAPE;
1832
1833 case '.':
1834 default:
1835 return 0;
1836 }
1837}
1838
1839/* Detects return flags for the call STMT. */
1840
1841int
1842gimple_call_return_flags (const_gimple stmt)
1843{
1844 tree type;
1845 tree attr = NULL_TREE;
1846
1847 if (gimple_call_flags (stmt) & ECF_MALLOC)
1848 return ERF_NOALIAS;
1849
9bfc434b 1850 type = gimple_call_fntype (stmt);
0b7b376d
RG
1851 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1852 if (!attr)
1853 return 0;
1854
1855 attr = TREE_VALUE (TREE_VALUE (attr));
1856 if (TREE_STRING_LENGTH (attr) < 1)
1857 return 0;
1858
1859 switch (TREE_STRING_POINTER (attr)[0])
1860 {
1861 case '1':
1862 case '2':
1863 case '3':
1864 case '4':
1865 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1866
1867 case 'm':
1868 return ERF_NOALIAS;
1869
1870 case '.':
1871 default:
1872 return 0;
1873 }
1874}
726a989a 1875
3dbe9454 1876
726a989a
RB
1877/* Return true if GS is a copy assignment. */
1878
1879bool
1880gimple_assign_copy_p (gimple gs)
1881{
3dbe9454
RG
1882 return (gimple_assign_single_p (gs)
1883 && is_gimple_val (gimple_op (gs, 1)));
726a989a
RB
1884}
1885
1886
1887/* Return true if GS is a SSA_NAME copy assignment. */
1888
1889bool
1890gimple_assign_ssa_name_copy_p (gimple gs)
1891{
3dbe9454 1892 return (gimple_assign_single_p (gs)
726a989a
RB
1893 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1894 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1895}
1896
1897
726a989a
RB
1898/* Return true if GS is an assignment with a unary RHS, but the
1899 operator has no effect on the assigned value. The logic is adapted
1900 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1901 instances in which STRIP_NOPS was previously applied to the RHS of
1902 an assignment.
1903
1904 NOTE: In the use cases that led to the creation of this function
1905 and of gimple_assign_single_p, it is typical to test for either
1906 condition and to proceed in the same manner. In each case, the
1907 assigned value is represented by the single RHS operand of the
1908 assignment. I suspect there may be cases where gimple_assign_copy_p,
1909 gimple_assign_single_p, or equivalent logic is used where a similar
1910 treatment of unary NOPs is appropriate. */
b8698a0f 1911
726a989a
RB
1912bool
1913gimple_assign_unary_nop_p (gimple gs)
1914{
3dbe9454 1915 return (is_gimple_assign (gs)
1a87cf0c 1916 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
1917 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1918 && gimple_assign_rhs1 (gs) != error_mark_node
1919 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1920 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1921}
1922
1923/* Set BB to be the basic block holding G. */
1924
1925void
1926gimple_set_bb (gimple stmt, basic_block bb)
1927{
1928 stmt->gsbase.bb = bb;
1929
1930 /* If the statement is a label, add the label to block-to-labels map
1931 so that we can speed up edge creation for GIMPLE_GOTOs. */
1932 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1933 {
1934 tree t;
1935 int uid;
1936
1937 t = gimple_label_label (stmt);
1938 uid = LABEL_DECL_UID (t);
1939 if (uid == -1)
1940 {
1941 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1942 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1943 if (old_len <= (unsigned) uid)
1944 {
5006671f 1945 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
1946
1947 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1948 new_len);
1949 }
1950 }
1951
1952 VEC_replace (basic_block, label_to_block_map, uid, bb);
1953 }
1954}
1955
1956
726a989a
RB
1957/* Modify the RHS of the assignment pointed-to by GSI using the
1958 operands in the expression tree EXPR.
1959
1960 NOTE: The statement pointed-to by GSI may be reallocated if it
1961 did not have enough operand slots.
1962
1963 This function is useful to convert an existing tree expression into
1964 the flat representation used for the RHS of a GIMPLE assignment.
1965 It will reallocate memory as needed to expand or shrink the number
1966 of operand slots needed to represent EXPR.
1967
1968 NOTE: If you find yourself building a tree and then calling this
1969 function, you are most certainly doing it the slow way. It is much
1970 better to build a new assignment or to use the function
1971 gimple_assign_set_rhs_with_ops, which does not require an
1972 expression tree to be built. */
1973
1974void
1975gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1976{
1977 enum tree_code subcode;
0354c0c7 1978 tree op1, op2, op3;
726a989a 1979
0354c0c7
BS
1980 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1981 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
726a989a
RB
1982}
1983
1984
1985/* Set the RHS of assignment statement pointed-to by GSI to CODE with
0354c0c7 1986 operands OP1, OP2 and OP3.
726a989a
RB
1987
1988 NOTE: The statement pointed-to by GSI may be reallocated if it
1989 did not have enough operand slots. */
1990
1991void
0354c0c7
BS
1992gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1993 tree op1, tree op2, tree op3)
726a989a
RB
1994{
1995 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1996 gimple stmt = gsi_stmt (*gsi);
1997
1998 /* If the new CODE needs more operands, allocate a new statement. */
1999 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2000 {
2001 tree lhs = gimple_assign_lhs (stmt);
2002 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2003 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2004 gsi_replace (gsi, new_stmt, true);
2005 stmt = new_stmt;
2006
2007 /* The LHS needs to be reset as this also changes the SSA name
2008 on the LHS. */
2009 gimple_assign_set_lhs (stmt, lhs);
2010 }
2011
2012 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2013 gimple_set_subcode (stmt, code);
2014 gimple_assign_set_rhs1 (stmt, op1);
2015 if (new_rhs_ops > 1)
2016 gimple_assign_set_rhs2 (stmt, op2);
0354c0c7
BS
2017 if (new_rhs_ops > 2)
2018 gimple_assign_set_rhs3 (stmt, op3);
726a989a
RB
2019}
2020
2021
2022/* Return the LHS of a statement that performs an assignment,
2023 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2024 for a call to a function that returns no value, or for a
2025 statement other than an assignment or a call. */
2026
2027tree
2028gimple_get_lhs (const_gimple stmt)
2029{
e0c68ce9 2030 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2031
2032 if (code == GIMPLE_ASSIGN)
2033 return gimple_assign_lhs (stmt);
2034 else if (code == GIMPLE_CALL)
2035 return gimple_call_lhs (stmt);
2036 else
2037 return NULL_TREE;
2038}
2039
2040
2041/* Set the LHS of a statement that performs an assignment,
2042 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2043
2044void
2045gimple_set_lhs (gimple stmt, tree lhs)
2046{
e0c68ce9 2047 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2048
2049 if (code == GIMPLE_ASSIGN)
2050 gimple_assign_set_lhs (stmt, lhs);
2051 else if (code == GIMPLE_CALL)
2052 gimple_call_set_lhs (stmt, lhs);
2053 else
2054 gcc_unreachable();
2055}
2056
21cf7180
AO
2057/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2058 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2059 expression with a different value.
2060
2061 This will update any annotations (say debug bind stmts) referring
2062 to the original LHS, so that they use the RHS instead. This is
2063 done even if NLHS and LHS are the same, for it is understood that
2064 the RHS will be modified afterwards, and NLHS will not be assigned
2065 an equivalent value.
2066
2067 Adjusting any non-annotation uses of the LHS, if needed, is a
2068 responsibility of the caller.
2069
2070 The effect of this call should be pretty much the same as that of
2071 inserting a copy of STMT before STMT, and then removing the
2072 original stmt, at which time gsi_remove() would have update
2073 annotations, but using this function saves all the inserting,
2074 copying and removing. */
2075
2076void
2077gimple_replace_lhs (gimple stmt, tree nlhs)
2078{
2079 if (MAY_HAVE_DEBUG_STMTS)
2080 {
2081 tree lhs = gimple_get_lhs (stmt);
2082
2083 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2084
2085 insert_debug_temp_for_var_def (NULL, lhs);
2086 }
2087
2088 gimple_set_lhs (stmt, nlhs);
2089}
726a989a
RB
2090
2091/* Return a deep copy of statement STMT. All the operands from STMT
2092 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2093 and VUSE operand arrays are set to empty in the new copy. */
2094
2095gimple
2096gimple_copy (gimple stmt)
2097{
2098 enum gimple_code code = gimple_code (stmt);
2099 unsigned num_ops = gimple_num_ops (stmt);
2100 gimple copy = gimple_alloc (code, num_ops);
2101 unsigned i;
2102
2103 /* Shallow copy all the fields from STMT. */
2104 memcpy (copy, stmt, gimple_size (code));
2105
2106 /* If STMT has sub-statements, deep-copy them as well. */
2107 if (gimple_has_substatements (stmt))
2108 {
2109 gimple_seq new_seq;
2110 tree t;
2111
2112 switch (gimple_code (stmt))
2113 {
2114 case GIMPLE_BIND:
2115 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2116 gimple_bind_set_body (copy, new_seq);
2117 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2118 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2119 break;
2120
2121 case GIMPLE_CATCH:
2122 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2123 gimple_catch_set_handler (copy, new_seq);
2124 t = unshare_expr (gimple_catch_types (stmt));
2125 gimple_catch_set_types (copy, t);
2126 break;
2127
2128 case GIMPLE_EH_FILTER:
2129 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2130 gimple_eh_filter_set_failure (copy, new_seq);
2131 t = unshare_expr (gimple_eh_filter_types (stmt));
2132 gimple_eh_filter_set_types (copy, t);
2133 break;
2134
2135 case GIMPLE_TRY:
2136 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2137 gimple_try_set_eval (copy, new_seq);
2138 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2139 gimple_try_set_cleanup (copy, new_seq);
2140 break;
2141
2142 case GIMPLE_OMP_FOR:
2143 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2144 gimple_omp_for_set_pre_body (copy, new_seq);
2145 t = unshare_expr (gimple_omp_for_clauses (stmt));
2146 gimple_omp_for_set_clauses (copy, t);
2147 copy->gimple_omp_for.iter
a9429e29
LB
2148 = ggc_alloc_vec_gimple_omp_for_iter
2149 (gimple_omp_for_collapse (stmt));
726a989a
RB
2150 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2151 {
2152 gimple_omp_for_set_cond (copy, i,
2153 gimple_omp_for_cond (stmt, i));
2154 gimple_omp_for_set_index (copy, i,
2155 gimple_omp_for_index (stmt, i));
2156 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2157 gimple_omp_for_set_initial (copy, i, t);
2158 t = unshare_expr (gimple_omp_for_final (stmt, i));
2159 gimple_omp_for_set_final (copy, i, t);
2160 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2161 gimple_omp_for_set_incr (copy, i, t);
2162 }
2163 goto copy_omp_body;
2164
2165 case GIMPLE_OMP_PARALLEL:
2166 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2167 gimple_omp_parallel_set_clauses (copy, t);
2168 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2169 gimple_omp_parallel_set_child_fn (copy, t);
2170 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2171 gimple_omp_parallel_set_data_arg (copy, t);
2172 goto copy_omp_body;
2173
2174 case GIMPLE_OMP_TASK:
2175 t = unshare_expr (gimple_omp_task_clauses (stmt));
2176 gimple_omp_task_set_clauses (copy, t);
2177 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2178 gimple_omp_task_set_child_fn (copy, t);
2179 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2180 gimple_omp_task_set_data_arg (copy, t);
2181 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2182 gimple_omp_task_set_copy_fn (copy, t);
2183 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2184 gimple_omp_task_set_arg_size (copy, t);
2185 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2186 gimple_omp_task_set_arg_align (copy, t);
2187 goto copy_omp_body;
2188
2189 case GIMPLE_OMP_CRITICAL:
2190 t = unshare_expr (gimple_omp_critical_name (stmt));
2191 gimple_omp_critical_set_name (copy, t);
2192 goto copy_omp_body;
2193
2194 case GIMPLE_OMP_SECTIONS:
2195 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2196 gimple_omp_sections_set_clauses (copy, t);
2197 t = unshare_expr (gimple_omp_sections_control (stmt));
2198 gimple_omp_sections_set_control (copy, t);
2199 /* FALLTHRU */
2200
2201 case GIMPLE_OMP_SINGLE:
2202 case GIMPLE_OMP_SECTION:
2203 case GIMPLE_OMP_MASTER:
2204 case GIMPLE_OMP_ORDERED:
2205 copy_omp_body:
2206 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2207 gimple_omp_set_body (copy, new_seq);
2208 break;
2209
2210 case GIMPLE_WITH_CLEANUP_EXPR:
2211 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2212 gimple_wce_set_cleanup (copy, new_seq);
2213 break;
2214
2215 default:
2216 gcc_unreachable ();
2217 }
2218 }
2219
2220 /* Make copy of operands. */
2221 if (num_ops > 0)
2222 {
2223 for (i = 0; i < num_ops; i++)
2224 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2225
ccacdf06 2226 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2227 if (gimple_has_ops (stmt))
2228 {
2229 gimple_set_def_ops (copy, NULL);
2230 gimple_set_use_ops (copy, NULL);
726a989a
RB
2231 }
2232
2233 if (gimple_has_mem_ops (stmt))
2234 {
5006671f
RG
2235 gimple_set_vdef (copy, gimple_vdef (stmt));
2236 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2237 }
2238
5006671f
RG
2239 /* SSA operands need to be updated. */
2240 gimple_set_modified (copy, true);
726a989a
RB
2241 }
2242
2243 return copy;
2244}
2245
2246
2247/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2248 a MODIFIED field. */
2249
2250void
2251gimple_set_modified (gimple s, bool modifiedp)
2252{
2253 if (gimple_has_ops (s))
0f8c63cc 2254 s->gsbase.modified = (unsigned) modifiedp;
726a989a
RB
2255}
2256
2257
2258/* Return true if statement S has side-effects. We consider a
2259 statement to have side effects if:
2260
2261 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2262 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2263
2264bool
2265gimple_has_side_effects (const_gimple s)
2266{
2267 unsigned i;
2268
b5b8b0ac
AO
2269 if (is_gimple_debug (s))
2270 return false;
2271
726a989a
RB
2272 /* We don't have to scan the arguments to check for
2273 volatile arguments, though, at present, we still
2274 do a scan to check for TREE_SIDE_EFFECTS. */
2275 if (gimple_has_volatile_ops (s))
2276 return true;
2277
2278 if (is_gimple_call (s))
2279 {
2280 unsigned nargs = gimple_call_num_args (s);
2281
2282 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2283 return true;
2284 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2285 /* An infinite loop is considered a side effect. */
2286 return true;
2287
2288 if (gimple_call_lhs (s)
2289 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2290 {
2291 gcc_assert (gimple_has_volatile_ops (s));
2292 return true;
2293 }
2294
2295 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2296 return true;
2297
2298 for (i = 0; i < nargs; i++)
2299 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2300 {
2301 gcc_assert (gimple_has_volatile_ops (s));
2302 return true;
2303 }
2304
2305 return false;
2306 }
2307 else
2308 {
2309 for (i = 0; i < gimple_num_ops (s); i++)
2310 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2311 {
2312 gcc_assert (gimple_has_volatile_ops (s));
2313 return true;
2314 }
2315 }
2316
2317 return false;
2318}
2319
2320/* Return true if the RHS of statement S has side effects.
2321 We may use it to determine if it is admissable to replace
2322 an assignment or call with a copy of a previously-computed
dd5a833e 2323 value. In such cases, side-effects due to the LHS are
726a989a
RB
2324 preserved. */
2325
2326bool
2327gimple_rhs_has_side_effects (const_gimple s)
2328{
2329 unsigned i;
2330
2331 if (is_gimple_call (s))
2332 {
2333 unsigned nargs = gimple_call_num_args (s);
2334
2335 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2336 return true;
2337
2338 /* We cannot use gimple_has_volatile_ops here,
2339 because we must ignore a volatile LHS. */
2340 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2341 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2342 {
2343 gcc_assert (gimple_has_volatile_ops (s));
2344 return true;
2345 }
2346
2347 for (i = 0; i < nargs; i++)
2348 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2349 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2350 return true;
2351
2352 return false;
2353 }
2354 else if (is_gimple_assign (s))
2355 {
2356 /* Skip the first operand, the LHS. */
2357 for (i = 1; i < gimple_num_ops (s); i++)
2358 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2359 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2360 {
2361 gcc_assert (gimple_has_volatile_ops (s));
2362 return true;
2363 }
2364 }
b5b8b0ac
AO
2365 else if (is_gimple_debug (s))
2366 return false;
726a989a
RB
2367 else
2368 {
2369 /* For statements without an LHS, examine all arguments. */
2370 for (i = 0; i < gimple_num_ops (s); i++)
2371 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2372 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2373 {
2374 gcc_assert (gimple_has_volatile_ops (s));
2375 return true;
2376 }
2377 }
2378
2379 return false;
2380}
2381
726a989a 2382/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
e1fd038a
SP
2383 Return true if S can trap. When INCLUDE_MEM is true, check whether
2384 the memory operations could trap. When INCLUDE_STORES is true and
2385 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
726a989a 2386
e1fd038a
SP
2387bool
2388gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
726a989a 2389{
726a989a
RB
2390 tree t, div = NULL_TREE;
2391 enum tree_code op;
2392
e1fd038a
SP
2393 if (include_mem)
2394 {
2395 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
726a989a 2396
e1fd038a
SP
2397 for (i = start; i < gimple_num_ops (s); i++)
2398 if (tree_could_trap_p (gimple_op (s, i)))
2399 return true;
2400 }
726a989a
RB
2401
2402 switch (gimple_code (s))
2403 {
2404 case GIMPLE_ASM:
2405 return gimple_asm_volatile_p (s);
2406
2407 case GIMPLE_CALL:
2408 t = gimple_call_fndecl (s);
2409 /* Assume that calls to weak functions may trap. */
2410 if (!t || !DECL_P (t) || DECL_WEAK (t))
2411 return true;
2412 return false;
2413
2414 case GIMPLE_ASSIGN:
2415 t = gimple_expr_type (s);
2416 op = gimple_assign_rhs_code (s);
2417 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2418 div = gimple_assign_rhs2 (s);
2419 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2420 (INTEGRAL_TYPE_P (t)
2421 && TYPE_OVERFLOW_TRAPS (t)),
2422 div));
2423
2424 default:
2425 break;
2426 }
2427
2428 return false;
726a989a
RB
2429}
2430
726a989a
RB
2431/* Return true if statement S can trap. */
2432
2433bool
2434gimple_could_trap_p (gimple s)
2435{
e1fd038a 2436 return gimple_could_trap_p_1 (s, true, true);
726a989a
RB
2437}
2438
726a989a
RB
2439/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2440
2441bool
2442gimple_assign_rhs_could_trap_p (gimple s)
2443{
2444 gcc_assert (is_gimple_assign (s));
e1fd038a 2445 return gimple_could_trap_p_1 (s, true, false);
726a989a
RB
2446}
2447
2448
2449/* Print debugging information for gimple stmts generated. */
2450
2451void
2452dump_gimple_statistics (void)
2453{
2454#ifdef GATHER_STATISTICS
2455 int i, total_tuples = 0, total_bytes = 0;
2456
2457 fprintf (stderr, "\nGIMPLE statements\n");
2458 fprintf (stderr, "Kind Stmts Bytes\n");
2459 fprintf (stderr, "---------------------------------------\n");
2460 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2461 {
2462 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2463 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2464 total_tuples += gimple_alloc_counts[i];
2465 total_bytes += gimple_alloc_sizes[i];
2466 }
2467 fprintf (stderr, "---------------------------------------\n");
2468 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2469 fprintf (stderr, "---------------------------------------\n");
2470#else
2471 fprintf (stderr, "No gimple statistics\n");
2472#endif
2473}
2474
2475
726a989a
RB
2476/* Return the number of operands needed on the RHS of a GIMPLE
2477 assignment for an expression with tree code CODE. */
2478
2479unsigned
2480get_gimple_rhs_num_ops (enum tree_code code)
2481{
2482 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2483
2484 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2485 return 1;
2486 else if (rhs_class == GIMPLE_BINARY_RHS)
2487 return 2;
0354c0c7
BS
2488 else if (rhs_class == GIMPLE_TERNARY_RHS)
2489 return 3;
726a989a
RB
2490 else
2491 gcc_unreachable ();
2492}
2493
2494#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2495 (unsigned char) \
2496 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2497 : ((TYPE) == tcc_binary \
2498 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2499 : ((TYPE) == tcc_constant \
2500 || (TYPE) == tcc_declaration \
2501 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2502 : ((SYM) == TRUTH_AND_EXPR \
2503 || (SYM) == TRUTH_OR_EXPR \
2504 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2505 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
0354c0c7 2506 : ((SYM) == WIDEN_MULT_PLUS_EXPR \
16949072 2507 || (SYM) == WIDEN_MULT_MINUS_EXPR \
f471fe72
RG
2508 || (SYM) == DOT_PROD_EXPR \
2509 || (SYM) == REALIGN_LOAD_EXPR \
16949072 2510 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
726a989a
RB
2511 : ((SYM) == COND_EXPR \
2512 || (SYM) == CONSTRUCTOR \
2513 || (SYM) == OBJ_TYPE_REF \
2514 || (SYM) == ASSERT_EXPR \
2515 || (SYM) == ADDR_EXPR \
2516 || (SYM) == WITH_SIZE_EXPR \
726a989a 2517 || (SYM) == SSA_NAME \
f471fe72 2518 || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS \
726a989a
RB
2519 : GIMPLE_INVALID_RHS),
2520#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2521
2522const unsigned char gimple_rhs_class_table[] = {
2523#include "all-tree.def"
2524};
2525
2526#undef DEFTREECODE
2527#undef END_OF_BASE_TREE_CODES
2528
2529/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2530
2531/* Validation of GIMPLE expressions. */
2532
726a989a
RB
2533/* Returns true iff T is a valid RHS for an assignment to a renamed
2534 user -- or front-end generated artificial -- variable. */
2535
2536bool
2537is_gimple_reg_rhs (tree t)
2538{
ba4d8f9d 2539 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
726a989a
RB
2540}
2541
2542/* Returns true iff T is a valid RHS for an assignment to an un-renamed
2543 LHS, or for a call argument. */
2544
2545bool
2546is_gimple_mem_rhs (tree t)
2547{
2548 /* If we're dealing with a renamable type, either source or dest must be
2549 a renamed variable. */
2550 if (is_gimple_reg_type (TREE_TYPE (t)))
2551 return is_gimple_val (t);
2552 else
ba4d8f9d 2553 return is_gimple_val (t) || is_gimple_lvalue (t);
726a989a
RB
2554}
2555
2556/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2557
2558bool
2559is_gimple_lvalue (tree t)
2560{
2561 return (is_gimple_addressable (t)
2562 || TREE_CODE (t) == WITH_SIZE_EXPR
2563 /* These are complex lvalues, but don't have addresses, so they
2564 go here. */
2565 || TREE_CODE (t) == BIT_FIELD_REF);
2566}
2567
2568/* Return true if T is a GIMPLE condition. */
2569
2570bool
2571is_gimple_condexpr (tree t)
2572{
2573 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
f9613c9a 2574 && !tree_could_throw_p (t)
726a989a
RB
2575 && is_gimple_val (TREE_OPERAND (t, 0))
2576 && is_gimple_val (TREE_OPERAND (t, 1))));
2577}
2578
2579/* Return true if T is something whose address can be taken. */
2580
2581bool
2582is_gimple_addressable (tree t)
2583{
70f34814
RG
2584 return (is_gimple_id (t) || handled_component_p (t)
2585 || TREE_CODE (t) == MEM_REF);
726a989a
RB
2586}
2587
2588/* Return true if T is a valid gimple constant. */
2589
2590bool
2591is_gimple_constant (const_tree t)
2592{
2593 switch (TREE_CODE (t))
2594 {
2595 case INTEGER_CST:
2596 case REAL_CST:
2597 case FIXED_CST:
2598 case STRING_CST:
2599 case COMPLEX_CST:
2600 case VECTOR_CST:
2601 return true;
2602
2603 /* Vector constant constructors are gimple invariant. */
2604 case CONSTRUCTOR:
2605 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2606 return TREE_CONSTANT (t);
2607 else
2608 return false;
2609
2610 default:
2611 return false;
2612 }
2613}
2614
2615/* Return true if T is a gimple address. */
2616
2617bool
2618is_gimple_address (const_tree t)
2619{
2620 tree op;
2621
2622 if (TREE_CODE (t) != ADDR_EXPR)
2623 return false;
2624
2625 op = TREE_OPERAND (t, 0);
2626 while (handled_component_p (op))
2627 {
2628 if ((TREE_CODE (op) == ARRAY_REF
2629 || TREE_CODE (op) == ARRAY_RANGE_REF)
2630 && !is_gimple_val (TREE_OPERAND (op, 1)))
2631 return false;
2632
2633 op = TREE_OPERAND (op, 0);
2634 }
2635
70f34814 2636 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
726a989a
RB
2637 return true;
2638
2639 switch (TREE_CODE (op))
2640 {
2641 case PARM_DECL:
2642 case RESULT_DECL:
2643 case LABEL_DECL:
2644 case FUNCTION_DECL:
2645 case VAR_DECL:
2646 case CONST_DECL:
2647 return true;
2648
2649 default:
2650 return false;
2651 }
2652}
2653
00fc2333
JH
2654/* Strip out all handled components that produce invariant
2655 offsets. */
726a989a 2656
00fc2333
JH
2657static const_tree
2658strip_invariant_refs (const_tree op)
726a989a 2659{
726a989a
RB
2660 while (handled_component_p (op))
2661 {
2662 switch (TREE_CODE (op))
2663 {
2664 case ARRAY_REF:
2665 case ARRAY_RANGE_REF:
2666 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2667 || TREE_OPERAND (op, 2) != NULL_TREE
2668 || TREE_OPERAND (op, 3) != NULL_TREE)
00fc2333 2669 return NULL;
726a989a
RB
2670 break;
2671
2672 case COMPONENT_REF:
2673 if (TREE_OPERAND (op, 2) != NULL_TREE)
00fc2333 2674 return NULL;
726a989a
RB
2675 break;
2676
2677 default:;
2678 }
2679 op = TREE_OPERAND (op, 0);
2680 }
2681
00fc2333
JH
2682 return op;
2683}
2684
2685/* Return true if T is a gimple invariant address. */
2686
2687bool
2688is_gimple_invariant_address (const_tree t)
2689{
2690 const_tree op;
2691
2692 if (TREE_CODE (t) != ADDR_EXPR)
2693 return false;
2694
2695 op = strip_invariant_refs (TREE_OPERAND (t, 0));
70f34814
RG
2696 if (!op)
2697 return false;
00fc2333 2698
70f34814
RG
2699 if (TREE_CODE (op) == MEM_REF)
2700 {
2701 const_tree op0 = TREE_OPERAND (op, 0);
2702 return (TREE_CODE (op0) == ADDR_EXPR
2703 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2704 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2705 }
2706
2707 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
00fc2333
JH
2708}
2709
2710/* Return true if T is a gimple invariant address at IPA level
2711 (so addresses of variables on stack are not allowed). */
2712
2713bool
2714is_gimple_ip_invariant_address (const_tree t)
2715{
2716 const_tree op;
2717
2718 if (TREE_CODE (t) != ADDR_EXPR)
2719 return false;
2720
2721 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2722
2723 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
726a989a
RB
2724}
2725
2726/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2727 form of function invariant. */
2728
2729bool
2730is_gimple_min_invariant (const_tree t)
2731{
2732 if (TREE_CODE (t) == ADDR_EXPR)
2733 return is_gimple_invariant_address (t);
2734
2735 return is_gimple_constant (t);
2736}
2737
00fc2333
JH
2738/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2739 form of gimple minimal invariant. */
2740
2741bool
2742is_gimple_ip_invariant (const_tree t)
2743{
2744 if (TREE_CODE (t) == ADDR_EXPR)
2745 return is_gimple_ip_invariant_address (t);
2746
2747 return is_gimple_constant (t);
2748}
2749
726a989a
RB
2750/* Return true if T looks like a valid GIMPLE statement. */
2751
2752bool
2753is_gimple_stmt (tree t)
2754{
2755 const enum tree_code code = TREE_CODE (t);
2756
2757 switch (code)
2758 {
2759 case NOP_EXPR:
2760 /* The only valid NOP_EXPR is the empty statement. */
2761 return IS_EMPTY_STMT (t);
2762
2763 case BIND_EXPR:
2764 case COND_EXPR:
2765 /* These are only valid if they're void. */
2766 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2767
2768 case SWITCH_EXPR:
2769 case GOTO_EXPR:
2770 case RETURN_EXPR:
2771 case LABEL_EXPR:
2772 case CASE_LABEL_EXPR:
2773 case TRY_CATCH_EXPR:
2774 case TRY_FINALLY_EXPR:
2775 case EH_FILTER_EXPR:
2776 case CATCH_EXPR:
726a989a 2777 case ASM_EXPR:
726a989a
RB
2778 case STATEMENT_LIST:
2779 case OMP_PARALLEL:
2780 case OMP_FOR:
2781 case OMP_SECTIONS:
2782 case OMP_SECTION:
2783 case OMP_SINGLE:
2784 case OMP_MASTER:
2785 case OMP_ORDERED:
2786 case OMP_CRITICAL:
2787 case OMP_TASK:
2788 /* These are always void. */
2789 return true;
2790
2791 case CALL_EXPR:
2792 case MODIFY_EXPR:
2793 case PREDICT_EXPR:
2794 /* These are valid regardless of their type. */
2795 return true;
2796
2797 default:
2798 return false;
2799 }
2800}
2801
2802/* Return true if T is a variable. */
2803
2804bool
2805is_gimple_variable (tree t)
2806{
2807 return (TREE_CODE (t) == VAR_DECL
2808 || TREE_CODE (t) == PARM_DECL
2809 || TREE_CODE (t) == RESULT_DECL
2810 || TREE_CODE (t) == SSA_NAME);
2811}
2812
2813/* Return true if T is a GIMPLE identifier (something with an address). */
2814
2815bool
2816is_gimple_id (tree t)
2817{
2818 return (is_gimple_variable (t)
2819 || TREE_CODE (t) == FUNCTION_DECL
2820 || TREE_CODE (t) == LABEL_DECL
2821 || TREE_CODE (t) == CONST_DECL
2822 /* Allow string constants, since they are addressable. */
2823 || TREE_CODE (t) == STRING_CST);
2824}
2825
2826/* Return true if TYPE is a suitable type for a scalar register variable. */
2827
2828bool
2829is_gimple_reg_type (tree type)
2830{
4636b850 2831 return !AGGREGATE_TYPE_P (type);
726a989a
RB
2832}
2833
2834/* Return true if T is a non-aggregate register variable. */
2835
2836bool
2837is_gimple_reg (tree t)
2838{
2839 if (TREE_CODE (t) == SSA_NAME)
2840 t = SSA_NAME_VAR (t);
2841
726a989a
RB
2842 if (!is_gimple_variable (t))
2843 return false;
2844
2845 if (!is_gimple_reg_type (TREE_TYPE (t)))
2846 return false;
2847
2848 /* A volatile decl is not acceptable because we can't reuse it as
2849 needed. We need to copy it into a temp first. */
2850 if (TREE_THIS_VOLATILE (t))
2851 return false;
2852
2853 /* We define "registers" as things that can be renamed as needed,
2854 which with our infrastructure does not apply to memory. */
2855 if (needs_to_live_in_memory (t))
2856 return false;
2857
2858 /* Hard register variables are an interesting case. For those that
2859 are call-clobbered, we don't know where all the calls are, since
2860 we don't (want to) take into account which operations will turn
2861 into libcalls at the rtl level. For those that are call-saved,
2862 we don't currently model the fact that calls may in fact change
2863 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2864 level, and so miss variable changes that might imply. All around,
2865 it seems safest to not do too much optimization with these at the
2866 tree level at all. We'll have to rely on the rtl optimizers to
2867 clean this up, as there we've got all the appropriate bits exposed. */
2868 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2869 return false;
2870
4636b850
RG
2871 /* Complex and vector values must have been put into SSA-like form.
2872 That is, no assignments to the individual components. */
2873 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2874 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2875 return DECL_GIMPLE_REG_P (t);
2876
726a989a
RB
2877 return true;
2878}
2879
2880
726a989a
RB
2881/* Return true if T is a GIMPLE variable whose address is not needed. */
2882
2883bool
2884is_gimple_non_addressable (tree t)
2885{
2886 if (TREE_CODE (t) == SSA_NAME)
2887 t = SSA_NAME_VAR (t);
2888
2889 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2890}
2891
2892/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2893
2894bool
2895is_gimple_val (tree t)
2896{
2897 /* Make loads from volatiles and memory vars explicit. */
2898 if (is_gimple_variable (t)
2899 && is_gimple_reg_type (TREE_TYPE (t))
2900 && !is_gimple_reg (t))
2901 return false;
2902
726a989a
RB
2903 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2904}
2905
2906/* Similarly, but accept hard registers as inputs to asm statements. */
2907
2908bool
2909is_gimple_asm_val (tree t)
2910{
2911 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2912 return true;
2913
2914 return is_gimple_val (t);
2915}
2916
2917/* Return true if T is a GIMPLE minimal lvalue. */
2918
2919bool
2920is_gimple_min_lval (tree t)
2921{
ba4d8f9d
RG
2922 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2923 return false;
70f34814 2924 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
726a989a
RB
2925}
2926
726a989a
RB
2927/* Return true if T is a valid function operand of a CALL_EXPR. */
2928
2929bool
2930is_gimple_call_addr (tree t)
2931{
2932 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2933}
2934
70f34814
RG
2935/* Return true if T is a valid address operand of a MEM_REF. */
2936
2937bool
2938is_gimple_mem_ref_addr (tree t)
2939{
2940 return (is_gimple_reg (t)
2941 || TREE_CODE (t) == INTEGER_CST
2942 || (TREE_CODE (t) == ADDR_EXPR
2943 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2944 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2945}
2946
726a989a
RB
2947/* If T makes a function call, return the corresponding CALL_EXPR operand.
2948 Otherwise, return NULL_TREE. */
2949
2950tree
2951get_call_expr_in (tree t)
2952{
2953 if (TREE_CODE (t) == MODIFY_EXPR)
2954 t = TREE_OPERAND (t, 1);
2955 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2956 t = TREE_OPERAND (t, 0);
2957 if (TREE_CODE (t) == CALL_EXPR)
2958 return t;
2959 return NULL_TREE;
2960}
2961
2962
2963/* Given a memory reference expression T, return its base address.
2964 The base address of a memory reference expression is the main
2965 object being referenced. For instance, the base address for
2966 'array[i].fld[j]' is 'array'. You can think of this as stripping
2967 away the offset part from a memory address.
2968
2969 This function calls handled_component_p to strip away all the inner
2970 parts of the memory reference until it reaches the base object. */
2971
2972tree
2973get_base_address (tree t)
2974{
2975 while (handled_component_p (t))
2976 t = TREE_OPERAND (t, 0);
b8698a0f 2977
4d948885
RG
2978 if ((TREE_CODE (t) == MEM_REF
2979 || TREE_CODE (t) == TARGET_MEM_REF)
70f34814
RG
2980 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2981 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2982
b3b9f3d0
JH
2983 if (TREE_CODE (t) == SSA_NAME
2984 || DECL_P (t)
726a989a
RB
2985 || TREE_CODE (t) == STRING_CST
2986 || TREE_CODE (t) == CONSTRUCTOR
70f34814 2987 || INDIRECT_REF_P (t)
4d948885
RG
2988 || TREE_CODE (t) == MEM_REF
2989 || TREE_CODE (t) == TARGET_MEM_REF)
726a989a
RB
2990 return t;
2991 else
2992 return NULL_TREE;
2993}
2994
2995void
2996recalculate_side_effects (tree t)
2997{
2998 enum tree_code code = TREE_CODE (t);
2999 int len = TREE_OPERAND_LENGTH (t);
3000 int i;
3001
3002 switch (TREE_CODE_CLASS (code))
3003 {
3004 case tcc_expression:
3005 switch (code)
3006 {
3007 case INIT_EXPR:
3008 case MODIFY_EXPR:
3009 case VA_ARG_EXPR:
3010 case PREDECREMENT_EXPR:
3011 case PREINCREMENT_EXPR:
3012 case POSTDECREMENT_EXPR:
3013 case POSTINCREMENT_EXPR:
3014 /* All of these have side-effects, no matter what their
3015 operands are. */
3016 return;
3017
3018 default:
3019 break;
3020 }
3021 /* Fall through. */
3022
3023 case tcc_comparison: /* a comparison expression */
3024 case tcc_unary: /* a unary arithmetic expression */
3025 case tcc_binary: /* a binary arithmetic expression */
3026 case tcc_reference: /* a reference */
3027 case tcc_vl_exp: /* a function call */
3028 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3029 for (i = 0; i < len; ++i)
3030 {
3031 tree op = TREE_OPERAND (t, i);
3032 if (op && TREE_SIDE_EFFECTS (op))
3033 TREE_SIDE_EFFECTS (t) = 1;
3034 }
3035 break;
3036
13f95bdb
EB
3037 case tcc_constant:
3038 /* No side-effects. */
3039 return;
3040
726a989a 3041 default:
726a989a
RB
3042 gcc_unreachable ();
3043 }
3044}
3045
3046/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3047 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3048 we failed to create one. */
3049
3050tree
3051canonicalize_cond_expr_cond (tree t)
3052{
b66a1bac
RG
3053 /* Strip conversions around boolean operations. */
3054 if (CONVERT_EXPR_P (t)
3055 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
3056 t = TREE_OPERAND (t, 0);
3057
726a989a 3058 /* For (bool)x use x != 0. */
b66a1bac
RG
3059 if (CONVERT_EXPR_P (t)
3060 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
726a989a
RB
3061 {
3062 tree top0 = TREE_OPERAND (t, 0);
3063 t = build2 (NE_EXPR, TREE_TYPE (t),
3064 top0, build_int_cst (TREE_TYPE (top0), 0));
3065 }
3066 /* For !x use x == 0. */
3067 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3068 {
3069 tree top0 = TREE_OPERAND (t, 0);
3070 t = build2 (EQ_EXPR, TREE_TYPE (t),
3071 top0, build_int_cst (TREE_TYPE (top0), 0));
3072 }
3073 /* For cmp ? 1 : 0 use cmp. */
3074 else if (TREE_CODE (t) == COND_EXPR
3075 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3076 && integer_onep (TREE_OPERAND (t, 1))
3077 && integer_zerop (TREE_OPERAND (t, 2)))
3078 {
3079 tree top0 = TREE_OPERAND (t, 0);
3080 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3081 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3082 }
3083
3084 if (is_gimple_condexpr (t))
3085 return t;
3086
3087 return NULL_TREE;
3088}
3089
e6c99067
DN
3090/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3091 the positions marked by the set ARGS_TO_SKIP. */
3092
c6f7cfc1 3093gimple
5c0466b5 3094gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3095{
3096 int i;
3097 tree fn = gimple_call_fn (stmt);
3098 int nargs = gimple_call_num_args (stmt);
3099 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3100 gimple new_stmt;
3101
3102 for (i = 0; i < nargs; i++)
3103 if (!bitmap_bit_p (args_to_skip, i))
3104 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3105
3106 new_stmt = gimple_build_call_vec (fn, vargs);
3107 VEC_free (tree, heap, vargs);
3108 if (gimple_call_lhs (stmt))
3109 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3110
5006671f
RG
3111 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3112 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3113
c6f7cfc1
JH
3114 gimple_set_block (new_stmt, gimple_block (stmt));
3115 if (gimple_has_location (stmt))
3116 gimple_set_location (new_stmt, gimple_location (stmt));
8d2adc24 3117 gimple_call_copy_flags (new_stmt, stmt);
c6f7cfc1 3118 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
5006671f
RG
3119
3120 gimple_set_modified (new_stmt, true);
3121
c6f7cfc1
JH
3122 return new_stmt;
3123}
3124
5006671f 3125
a844a60b 3126static hashval_t gimple_type_hash_1 (const void *, enum gtc_mode);
d7f09764
DN
3127
3128/* Structure used to maintain a cache of some type pairs compared by
3129 gimple_types_compatible_p when comparing aggregate types. There are
c4fcd06a 3130 three possible values for SAME_P:
d7f09764
DN
3131
3132 -2: The pair (T1, T2) has just been inserted in the table.
d7f09764
DN
3133 0: T1 and T2 are different types.
3134 1: T1 and T2 are the same type.
3135
c4fcd06a
RG
3136 The two elements in the SAME_P array are indexed by the comparison
3137 mode gtc_mode. */
3138
d7f09764
DN
3139struct type_pair_d
3140{
88ca1146
RG
3141 unsigned int uid1;
3142 unsigned int uid2;
c4fcd06a 3143 signed char same_p[2];
d7f09764
DN
3144};
3145typedef struct type_pair_d *type_pair_t;
3146
d4398a43
RG
3147DEF_VEC_P(type_pair_t);
3148DEF_VEC_ALLOC_P(type_pair_t,heap);
3149
d7f09764
DN
3150/* Return a hash value for the type pair pointed-to by P. */
3151
3152static hashval_t
3153type_pair_hash (const void *p)
3154{
3155 const struct type_pair_d *pair = (const struct type_pair_d *) p;
88ca1146
RG
3156 hashval_t val1 = pair->uid1;
3157 hashval_t val2 = pair->uid2;
d7f09764
DN
3158 return (iterative_hash_hashval_t (val2, val1)
3159 ^ iterative_hash_hashval_t (val1, val2));
3160}
3161
3162/* Compare two type pairs pointed-to by P1 and P2. */
3163
3164static int
3165type_pair_eq (const void *p1, const void *p2)
3166{
3167 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3168 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
88ca1146
RG
3169 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
3170 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
d7f09764
DN
3171}
3172
3173/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3174 entry if none existed. */
3175
3176static type_pair_t
88ca1146 3177lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
d7f09764
DN
3178{
3179 struct type_pair_d pair;
3180 type_pair_t p;
3181 void **slot;
3182
3183 if (*visited_p == NULL)
88ca1146
RG
3184 {
3185 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3186 gcc_obstack_init (ob_p);
3187 }
d7f09764 3188
88ca1146
RG
3189 pair.uid1 = TYPE_UID (t1);
3190 pair.uid2 = TYPE_UID (t2);
d7f09764
DN
3191 slot = htab_find_slot (*visited_p, &pair, INSERT);
3192
3193 if (*slot)
3194 p = *((type_pair_t *) slot);
3195 else
3196 {
88ca1146
RG
3197 p = XOBNEW (ob_p, struct type_pair_d);
3198 p->uid1 = TYPE_UID (t1);
3199 p->uid2 = TYPE_UID (t2);
c4fcd06a
RG
3200 p->same_p[0] = -2;
3201 p->same_p[1] = -2;
d7f09764
DN
3202 *slot = (void *) p;
3203 }
3204
3205 return p;
3206}
3207
d4398a43
RG
3208/* Per pointer state for the SCC finding. The on_sccstack flag
3209 is not strictly required, it is true when there is no hash value
3210 recorded for the type and false otherwise. But querying that
3211 is slower. */
3212
3213struct sccs
3214{
3215 unsigned int dfsnum;
3216 unsigned int low;
3217 bool on_sccstack;
3218 union {
3219 hashval_t hash;
c4fcd06a 3220 signed char same_p;
d4398a43
RG
3221 } u;
3222};
3223
3224static unsigned int next_dfs_num;
3225static unsigned int gtc_next_dfs_num;
d7f09764 3226
4490cae6
RG
3227
3228/* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3229
3230typedef struct GTY(()) gimple_type_leader_entry_s {
3231 tree type;
3232 tree leader;
3233} gimple_type_leader_entry;
3234
3235#define GIMPLE_TYPE_LEADER_SIZE 16381
e89964e3
MM
3236static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3237 gimple_type_leader_entry *gimple_type_leader;
4490cae6
RG
3238
3239/* Lookup an existing leader for T and return it or NULL_TREE, if
3240 there is none in the cache. */
3241
3242static tree
3243gimple_lookup_type_leader (tree t)
3244{
3245 gimple_type_leader_entry *leader;
3246
3247 if (!gimple_type_leader)
3248 return NULL_TREE;
3249
3250 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3251 if (leader->type != t)
3252 return NULL_TREE;
3253
3254 return leader->leader;
3255}
3256
77785f4f
RG
3257/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3258 true then if any type has no name return false, otherwise return
3259 true if both types have no names. */
d7f09764
DN
3260
3261static bool
77785f4f 3262compare_type_names_p (tree t1, tree t2, bool for_completion_p)
d7f09764
DN
3263{
3264 tree name1 = TYPE_NAME (t1);
3265 tree name2 = TYPE_NAME (t2);
3266
77785f4f
RG
3267 /* Consider anonymous types all unique for completion. */
3268 if (for_completion_p
3269 && (!name1 || !name2))
d7f09764
DN
3270 return false;
3271
77785f4f 3272 if (name1 && TREE_CODE (name1) == TYPE_DECL)
d7f09764
DN
3273 {
3274 name1 = DECL_NAME (name1);
77785f4f
RG
3275 if (for_completion_p
3276 && !name1)
d7f09764
DN
3277 return false;
3278 }
77785f4f 3279 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
d7f09764 3280
77785f4f 3281 if (name2 && TREE_CODE (name2) == TYPE_DECL)
d7f09764
DN
3282 {
3283 name2 = DECL_NAME (name2);
77785f4f
RG
3284 if (for_completion_p
3285 && !name2)
d7f09764
DN
3286 return false;
3287 }
77785f4f 3288 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
d7f09764
DN
3289
3290 /* Identifiers can be compared with pointer equality rather
3291 than a string comparison. */
3292 if (name1 == name2)
3293 return true;
3294
3295 return false;
3296}
3297
d025732d
EB
3298/* Return true if the field decls F1 and F2 are at the same offset.
3299
91f2fae8 3300 This is intended to be used on GIMPLE types only. */
d7f09764 3301
1e4bc4eb 3302bool
d025732d 3303gimple_compare_field_offset (tree f1, tree f2)
d7f09764
DN
3304{
3305 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
d025732d
EB
3306 {
3307 tree offset1 = DECL_FIELD_OFFSET (f1);
3308 tree offset2 = DECL_FIELD_OFFSET (f2);
3309 return ((offset1 == offset2
3310 /* Once gimplification is done, self-referential offsets are
3311 instantiated as operand #2 of the COMPONENT_REF built for
3312 each access and reset. Therefore, they are not relevant
3313 anymore and fields are interchangeable provided that they
3314 represent the same access. */
3315 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3316 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3317 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3318 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3319 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3320 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3321 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3322 || operand_equal_p (offset1, offset2, 0))
3323 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3324 DECL_FIELD_BIT_OFFSET (f2)));
3325 }
d7f09764
DN
3326
3327 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3328 should be, so handle differing ones specially by decomposing
3329 the offset into a byte and bit offset manually. */
3330 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3331 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3332 {
3333 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3334 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3335 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3336 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3337 + bit_offset1 / BITS_PER_UNIT);
3338 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3339 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3340 + bit_offset2 / BITS_PER_UNIT);
3341 if (byte_offset1 != byte_offset2)
3342 return false;
3343 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3344 }
3345
3346 return false;
3347}
3348
f5d6836a
RG
3349/* If the type T1 and the type T2 are a complete and an incomplete
3350 variant of the same type return true. */
bcee752e
RG
3351
3352static bool
f5d6836a 3353gimple_compatible_complete_and_incomplete_subtype_p (tree t1, tree t2)
bcee752e 3354{
bcee752e
RG
3355 /* If one pointer points to an incomplete type variant of
3356 the other pointed-to type they are the same. */
3357 if (TREE_CODE (t1) == TREE_CODE (t2)
3358 && RECORD_OR_UNION_TYPE_P (t1)
3359 && (!COMPLETE_TYPE_P (t1)
3360 || !COMPLETE_TYPE_P (t2))
3361 && TYPE_QUALS (t1) == TYPE_QUALS (t2)
3362 && compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3363 TYPE_MAIN_VARIANT (t2), true))
f5d6836a 3364 return true;
bcee752e
RG
3365 return false;
3366}
3367
d4398a43 3368static bool
c4fcd06a
RG
3369gimple_types_compatible_p_1 (tree, tree, enum gtc_mode, type_pair_t,
3370 VEC(type_pair_t, heap) **,
d4398a43 3371 struct pointer_map_t *, struct obstack *);
d7f09764 3372
d4398a43
RG
3373/* DFS visit the edge from the callers type pair with state *STATE to
3374 the pair T1, T2 while operating in FOR_MERGING_P mode.
3375 Update the merging status if it is not part of the SCC containing the
3376 callers pair and return it.
3377 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3378
3379static bool
c4fcd06a 3380gtc_visit (tree t1, tree t2, enum gtc_mode mode,
d4398a43
RG
3381 struct sccs *state,
3382 VEC(type_pair_t, heap) **sccstack,
3383 struct pointer_map_t *sccstate,
3384 struct obstack *sccstate_obstack)
d7f09764 3385{
d4398a43
RG
3386 struct sccs *cstate = NULL;
3387 type_pair_t p;
3388 void **slot;
d7f09764
DN
3389
3390 /* Check first for the obvious case of pointer identity. */
3391 if (t1 == t2)
d4398a43 3392 return true;
d7f09764
DN
3393
3394 /* Check that we have two types to compare. */
3395 if (t1 == NULL_TREE || t2 == NULL_TREE)
d4398a43 3396 return false;
d7f09764 3397
35e3a6e9
RG
3398 /* If the types have been previously registered and found equal
3399 they still are. */
4490cae6
RG
3400 if (mode == GTC_MERGE)
3401 {
3402 tree leader1 = gimple_lookup_type_leader (t1);
3403 tree leader2 = gimple_lookup_type_leader (t2);
3404 if (leader1 == t2
3405 || t1 == leader2
3406 || (leader1 && leader1 == leader2))
3407 return true;
3408 }
3409 else if (mode == GTC_DIAG)
3410 {
3411 if (TYPE_CANONICAL (t1)
3412 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3413 return true;
3414 }
35e3a6e9 3415
d7f09764
DN
3416 /* Can't be the same type if the types don't have the same code. */
3417 if (TREE_CODE (t1) != TREE_CODE (t2))
d4398a43 3418 return false;
b0cc341f
RG
3419
3420 /* Can't be the same type if they have different CV qualifiers. */
3421 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
d4398a43 3422 return false;
d7f09764
DN
3423
3424 /* Void types are always the same. */
3425 if (TREE_CODE (t1) == VOID_TYPE)
d4398a43 3426 return true;
d7f09764 3427
c9549072 3428 /* Do some simple checks before doing three hashtable queries. */
b0cc341f
RG
3429 if (INTEGRAL_TYPE_P (t1)
3430 || SCALAR_FLOAT_TYPE_P (t1)
3431 || FIXED_POINT_TYPE_P (t1)
3432 || TREE_CODE (t1) == VECTOR_TYPE
b23dc2c0
RG
3433 || TREE_CODE (t1) == COMPLEX_TYPE
3434 || TREE_CODE (t1) == OFFSET_TYPE)
b0cc341f
RG
3435 {
3436 /* Can't be the same type if they have different alignment,
3437 sign, precision or mode. */
3438 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3439 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3440 || TYPE_MODE (t1) != TYPE_MODE (t2)
3441 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
d4398a43 3442 return false;
b0cc341f
RG
3443
3444 if (TREE_CODE (t1) == INTEGER_TYPE
3445 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3446 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
d4398a43 3447 return false;
b0cc341f
RG
3448
3449 /* That's all we need to check for float and fixed-point types. */
3450 if (SCALAR_FLOAT_TYPE_P (t1)
3451 || FIXED_POINT_TYPE_P (t1))
d4398a43 3452 return true;
b0cc341f
RG
3453
3454 /* For integral types fall thru to more complex checks. */
3455 }
d7f09764 3456
c9549072
EB
3457 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3458 {
3459 /* Can't be the same type if they have different alignment or mode. */
3460 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3461 || TYPE_MODE (t1) != TYPE_MODE (t2))
d4398a43 3462 return false;
c9549072
EB
3463 }
3464
d7f09764
DN
3465 /* If the hash values of t1 and t2 are different the types can't
3466 possibly be the same. This helps keeping the type-pair hashtable
3467 small, only tracking comparisons for hash collisions. */
a844a60b 3468 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
d4398a43 3469 return false;
d7f09764 3470
d4398a43 3471 /* Allocate a new cache entry for this comparison. */
c4fcd06a
RG
3472 p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
3473 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
d7f09764
DN
3474 {
3475 /* We have already decided whether T1 and T2 are the
3476 same, return the cached result. */
c4fcd06a 3477 return p->same_p[mode] == 1;
d7f09764 3478 }
d4398a43 3479
d4398a43
RG
3480 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3481 cstate = (struct sccs *)*slot;
67701d1d 3482 /* Not yet visited. DFS recurse. */
d4398a43 3483 if (!cstate)
d7f09764 3484 {
67701d1d
RG
3485 gimple_types_compatible_p_1 (t1, t2, mode, p,
3486 sccstack, sccstate, sccstate_obstack);
3487 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
d4398a43 3488 state->low = MIN (state->low, cstate->low);
d7f09764 3489 }
67701d1d 3490 /* If the type is still on the SCC stack adjust the parents low. */
d4398a43
RG
3491 if (cstate->dfsnum < state->dfsnum
3492 && cstate->on_sccstack)
3493 state->low = MIN (cstate->dfsnum, state->low);
d7f09764 3494
67701d1d
RG
3495 /* Return the current lattice value. We start with an equality
3496 assumption so types part of a SCC will be optimistically
3497 treated equal unless proven otherwise. */
3498 return cstate->u.same_p;
d4398a43
RG
3499}
3500
3501/* Worker for gimple_types_compatible.
3502 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3503
3504static bool
c4fcd06a
RG
3505gimple_types_compatible_p_1 (tree t1, tree t2, enum gtc_mode mode,
3506 type_pair_t p,
d4398a43
RG
3507 VEC(type_pair_t, heap) **sccstack,
3508 struct pointer_map_t *sccstate,
3509 struct obstack *sccstate_obstack)
3510{
d4398a43
RG
3511 struct sccs *state;
3512
c4fcd06a 3513 gcc_assert (p->same_p[mode] == -2);
d7f09764 3514
d4398a43
RG
3515 state = XOBNEW (sccstate_obstack, struct sccs);
3516 *pointer_map_insert (sccstate, p) = state;
3517
3518 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3519 state->dfsnum = gtc_next_dfs_num++;
3520 state->low = state->dfsnum;
3521 state->on_sccstack = true;
67701d1d
RG
3522 /* Start with an equality assumption. As we DFS recurse into child
3523 SCCs this assumption may get revisited. */
3524 state->u.same_p = 1;
d7f09764
DN
3525
3526 /* If their attributes are not the same they can't be the same type. */
3527 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3528 goto different_types;
3529
d7f09764
DN
3530 /* Do type-specific comparisons. */
3531 switch (TREE_CODE (t1))
3532 {
d4398a43
RG
3533 case VECTOR_TYPE:
3534 case COMPLEX_TYPE:
c4fcd06a 3535 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
d4398a43
RG
3536 state, sccstack, sccstate, sccstate_obstack))
3537 goto different_types;
3538 goto same_types;
3539
d7f09764
DN
3540 case ARRAY_TYPE:
3541 /* Array types are the same if the element types are the same and
3542 the number of elements are the same. */
c4fcd06a 3543 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
d4398a43 3544 state, sccstack, sccstate, sccstate_obstack)
b0cc341f
RG
3545 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3546 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
d7f09764
DN
3547 goto different_types;
3548 else
3549 {
3550 tree i1 = TYPE_DOMAIN (t1);
3551 tree i2 = TYPE_DOMAIN (t2);
3552
3553 /* For an incomplete external array, the type domain can be
3554 NULL_TREE. Check this condition also. */
3555 if (i1 == NULL_TREE && i2 == NULL_TREE)
3556 goto same_types;
3557 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3558 goto different_types;
3559 /* If for a complete array type the possibly gimplified sizes
3560 are different the types are different. */
3561 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3562 || (TYPE_SIZE (i1)
3563 && TYPE_SIZE (i2)
3564 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3565 goto different_types;
3566 else
3567 {
3568 tree min1 = TYPE_MIN_VALUE (i1);
3569 tree min2 = TYPE_MIN_VALUE (i2);
3570 tree max1 = TYPE_MAX_VALUE (i1);
3571 tree max2 = TYPE_MAX_VALUE (i2);
3572
3573 /* The minimum/maximum values have to be the same. */
3574 if ((min1 == min2
f56000ed
EB
3575 || (min1 && min2
3576 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3577 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3578 || operand_equal_p (min1, min2, 0))))
d7f09764 3579 && (max1 == max2
f56000ed
EB
3580 || (max1 && max2
3581 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3582 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3583 || operand_equal_p (max1, max2, 0)))))
d7f09764
DN
3584 goto same_types;
3585 else
3586 goto different_types;
3587 }
3588 }
3589
3590 case METHOD_TYPE:
3591 /* Method types should belong to the same class. */
d4398a43 3592 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
c4fcd06a 3593 mode, state, sccstack, sccstate, sccstate_obstack))
d7f09764
DN
3594 goto different_types;
3595
3596 /* Fallthru */
3597
3598 case FUNCTION_TYPE:
3599 /* Function types are the same if the return type and arguments types
3600 are the same. */
c4fcd06a 3601 if ((mode != GTC_DIAG
f5d6836a
RG
3602 || !gimple_compatible_complete_and_incomplete_subtype_p
3603 (TREE_TYPE (t1), TREE_TYPE (t2)))
c4fcd06a 3604 && !gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
d4398a43 3605 state, sccstack, sccstate, sccstate_obstack))
bcee752e
RG
3606 goto different_types;
3607
ac9a30ae 3608 if (!comp_type_attributes (t1, t2))
d7f09764 3609 goto different_types;
bcee752e
RG
3610
3611 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3612 goto same_types;
d7f09764
DN
3613 else
3614 {
bcee752e 3615 tree parms1, parms2;
d7f09764 3616
bcee752e
RG
3617 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3618 parms1 && parms2;
3619 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
d7f09764 3620 {
c4fcd06a 3621 if ((mode == GTC_MERGE
f5d6836a
RG
3622 || !gimple_compatible_complete_and_incomplete_subtype_p
3623 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
c4fcd06a 3624 && !gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2), mode,
d4398a43 3625 state, sccstack, sccstate, sccstate_obstack))
d7f09764 3626 goto different_types;
d7f09764 3627 }
bcee752e
RG
3628
3629 if (parms1 || parms2)
3630 goto different_types;
3631
3632 goto same_types;
d7f09764
DN
3633 }
3634
b23dc2c0
RG
3635 case OFFSET_TYPE:
3636 {
c4fcd06a 3637 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
d4398a43
RG
3638 state, sccstack, sccstate, sccstate_obstack)
3639 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
c4fcd06a 3640 TYPE_OFFSET_BASETYPE (t2), mode,
d4398a43 3641 state, sccstack, sccstate, sccstate_obstack))
b23dc2c0
RG
3642 goto different_types;
3643
3644 goto same_types;
3645 }
3646
d7f09764
DN
3647 case POINTER_TYPE:
3648 case REFERENCE_TYPE:
e575382e
RG
3649 {
3650 /* If the two pointers have different ref-all attributes,
3651 they can't be the same type. */
3652 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3653 goto different_types;
d7f09764 3654
e575382e
RG
3655 /* If one pointer points to an incomplete type variant of
3656 the other pointed-to type they are the same. */
c4fcd06a 3657 if (mode == GTC_DIAG
f5d6836a
RG
3658 && gimple_compatible_complete_and_incomplete_subtype_p
3659 (TREE_TYPE (t1), TREE_TYPE (t2)))
bcee752e 3660 goto same_types;
e575382e
RG
3661
3662 /* Otherwise, pointer and reference types are the same if the
3663 pointed-to types are the same. */
c4fcd06a 3664 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
d4398a43 3665 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3666 goto same_types;
3667
3668 goto different_types;
3669 }
d7f09764 3670
1e85e720
RG
3671 case NULLPTR_TYPE:
3672 /* There is only one decltype(nullptr). */
3673 goto same_types;
3674
b0cc341f
RG
3675 case INTEGER_TYPE:
3676 case BOOLEAN_TYPE:
3677 {
3678 tree min1 = TYPE_MIN_VALUE (t1);
3679 tree max1 = TYPE_MAX_VALUE (t1);
3680 tree min2 = TYPE_MIN_VALUE (t2);
3681 tree max2 = TYPE_MAX_VALUE (t2);
3682 bool min_equal_p = false;
3683 bool max_equal_p = false;
3684
3685 /* If either type has a minimum value, the other type must
3686 have the same. */
3687 if (min1 == NULL_TREE && min2 == NULL_TREE)
3688 min_equal_p = true;
3689 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3690 min_equal_p = true;
3691
3692 /* Likewise, if either type has a maximum value, the other
3693 type must have the same. */
3694 if (max1 == NULL_TREE && max2 == NULL_TREE)
3695 max_equal_p = true;
3696 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3697 max_equal_p = true;
3698
3699 if (!min_equal_p || !max_equal_p)
3700 goto different_types;
3701
3702 goto same_types;
3703 }
3704
d7f09764 3705 case ENUMERAL_TYPE:
e575382e 3706 {
b0cc341f
RG
3707 /* FIXME lto, we cannot check bounds on enumeral types because
3708 different front ends will produce different values.
3709 In C, enumeral types are integers, while in C++ each element
3710 will have its own symbolic value. We should decide how enums
3711 are to be represented in GIMPLE and have each front end lower
3712 to that. */
e575382e 3713 tree v1, v2;
d7f09764 3714
b0cc341f 3715 /* For enumeral types, all the values must be the same. */
e575382e
RG
3716 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3717 goto same_types;
d7f09764 3718
e575382e
RG
3719 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3720 v1 && v2;
3721 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3722 {
3723 tree c1 = TREE_VALUE (v1);
3724 tree c2 = TREE_VALUE (v2);
d7f09764 3725
e575382e
RG
3726 if (TREE_CODE (c1) == CONST_DECL)
3727 c1 = DECL_INITIAL (c1);
d7f09764 3728
e575382e
RG
3729 if (TREE_CODE (c2) == CONST_DECL)
3730 c2 = DECL_INITIAL (c2);
d7f09764 3731
e575382e
RG
3732 if (tree_int_cst_equal (c1, c2) != 1)
3733 goto different_types;
abe36b81
EB
3734
3735 if (mode == GTC_MERGE && TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3736 goto different_types;
e575382e 3737 }
d7f09764 3738
e575382e
RG
3739 /* If one enumeration has more values than the other, they
3740 are not the same. */
3741 if (v1 || v2)
3742 goto different_types;
d7f09764 3743
e575382e
RG
3744 goto same_types;
3745 }
d7f09764
DN
3746
3747 case RECORD_TYPE:
3748 case UNION_TYPE:
3749 case QUAL_UNION_TYPE:
e575382e
RG
3750 {
3751 tree f1, f2;
d7f09764 3752
e575382e 3753 /* The struct tags shall compare equal. */
a844a60b
RG
3754 if (mode == GTC_MERGE
3755 && !compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3756 TYPE_MAIN_VARIANT (t2), false))
e575382e 3757 goto different_types;
77785f4f 3758
e575382e
RG
3759 /* For aggregate types, all the fields must be the same. */
3760 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3761 f1 && f2;
3762 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3763 {
3764 /* The fields must have the same name, offset and type. */
a844a60b
RG
3765 if ((mode == GTC_MERGE
3766 && DECL_NAME (f1) != DECL_NAME (f2))
b0cc341f 3767 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
d025732d 3768 || !gimple_compare_field_offset (f1, f2)
c4fcd06a 3769 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2), mode,
d4398a43 3770 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3771 goto different_types;
3772 }
d7f09764 3773
e575382e
RG
3774 /* If one aggregate has more fields than the other, they
3775 are not the same. */
3776 if (f1 || f2)
3777 goto different_types;
d7f09764 3778
e575382e
RG
3779 goto same_types;
3780 }
d7f09764 3781
d7f09764 3782 default:
b0cc341f 3783 gcc_unreachable ();
d7f09764
DN
3784 }
3785
3786 /* Common exit path for types that are not compatible. */
3787different_types:
d4398a43
RG
3788 state->u.same_p = 0;
3789 goto pop;
d7f09764
DN
3790
3791 /* Common exit path for types that are compatible. */
3792same_types:
67701d1d 3793 gcc_assert (state->u.same_p == 1);
d7f09764 3794
d4398a43
RG
3795pop:
3796 if (state->low == state->dfsnum)
3797 {
3798 type_pair_t x;
d7f09764 3799
67701d1d
RG
3800 /* Pop off the SCC and set its cache values to the final
3801 comparison result. */
d4398a43
RG
3802 do
3803 {
3804 struct sccs *cstate;
3805 x = VEC_pop (type_pair_t, *sccstack);
3806 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3807 cstate->on_sccstack = false;
67701d1d 3808 x->same_p[mode] = state->u.same_p;
d4398a43
RG
3809 }
3810 while (x != p);
3811 }
d7f09764 3812
d4398a43
RG
3813 return state->u.same_p;
3814}
d7f09764 3815
d4398a43
RG
3816/* Return true iff T1 and T2 are structurally identical. When
3817 FOR_MERGING_P is true the an incomplete type and a complete type
3818 are considered different, otherwise they are considered compatible. */
d7f09764 3819
d4398a43 3820bool
c4fcd06a 3821gimple_types_compatible_p (tree t1, tree t2, enum gtc_mode mode)
d7f09764 3822{
d4398a43
RG
3823 VEC(type_pair_t, heap) *sccstack = NULL;
3824 struct pointer_map_t *sccstate;
3825 struct obstack sccstate_obstack;
3826 type_pair_t p = NULL;
3827 bool res;
3828
3829 /* Before starting to set up the SCC machinery handle simple cases. */
3830
3831 /* Check first for the obvious case of pointer identity. */
3832 if (t1 == t2)
3833 return true;
3834
3835 /* Check that we have two types to compare. */
3836 if (t1 == NULL_TREE || t2 == NULL_TREE)
3837 return false;
3838
3839 /* If the types have been previously registered and found equal
3840 they still are. */
4490cae6
RG
3841 if (mode == GTC_MERGE)
3842 {
3843 tree leader1 = gimple_lookup_type_leader (t1);
3844 tree leader2 = gimple_lookup_type_leader (t2);
3845 if (leader1 == t2
3846 || t1 == leader2
3847 || (leader1 && leader1 == leader2))
3848 return true;
3849 }
3850 else if (mode == GTC_DIAG)
3851 {
3852 if (TYPE_CANONICAL (t1)
3853 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3854 return true;
3855 }
d4398a43
RG
3856
3857 /* Can't be the same type if the types don't have the same code. */
3858 if (TREE_CODE (t1) != TREE_CODE (t2))
3859 return false;
3860
3861 /* Can't be the same type if they have different CV qualifiers. */
3862 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3863 return false;
3864
3865 /* Void types are always the same. */
3866 if (TREE_CODE (t1) == VOID_TYPE)
3867 return true;
3868
3869 /* Do some simple checks before doing three hashtable queries. */
3870 if (INTEGRAL_TYPE_P (t1)
3871 || SCALAR_FLOAT_TYPE_P (t1)
3872 || FIXED_POINT_TYPE_P (t1)
3873 || TREE_CODE (t1) == VECTOR_TYPE
3874 || TREE_CODE (t1) == COMPLEX_TYPE
3875 || TREE_CODE (t1) == OFFSET_TYPE)
3876 {
3877 /* Can't be the same type if they have different alignment,
3878 sign, precision or mode. */
3879 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3880 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3881 || TYPE_MODE (t1) != TYPE_MODE (t2)
3882 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3883 return false;
3884
3885 if (TREE_CODE (t1) == INTEGER_TYPE
3886 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3887 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3888 return false;
3889
3890 /* That's all we need to check for float and fixed-point types. */
3891 if (SCALAR_FLOAT_TYPE_P (t1)
3892 || FIXED_POINT_TYPE_P (t1))
3893 return true;
3894
3895 /* For integral types fall thru to more complex checks. */
3896 }
3897
3898 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3899 {
3900 /* Can't be the same type if they have different alignment or mode. */
3901 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3902 || TYPE_MODE (t1) != TYPE_MODE (t2))
3903 return false;
3904 }
3905
3906 /* If the hash values of t1 and t2 are different the types can't
3907 possibly be the same. This helps keeping the type-pair hashtable
3908 small, only tracking comparisons for hash collisions. */
a844a60b 3909 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
d4398a43
RG
3910 return false;
3911
3912 /* If we've visited this type pair before (in the case of aggregates
3913 with self-referential types), and we made a decision, return it. */
c4fcd06a
RG
3914 p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
3915 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
d4398a43
RG
3916 {
3917 /* We have already decided whether T1 and T2 are the
3918 same, return the cached result. */
c4fcd06a 3919 return p->same_p[mode] == 1;
d4398a43
RG
3920 }
3921
3922 /* Now set up the SCC machinery for the comparison. */
3923 gtc_next_dfs_num = 1;
3924 sccstate = pointer_map_create ();
3925 gcc_obstack_init (&sccstate_obstack);
c4fcd06a 3926 res = gimple_types_compatible_p_1 (t1, t2, mode, p,
d4398a43
RG
3927 &sccstack, sccstate, &sccstate_obstack);
3928 VEC_free (type_pair_t, heap, sccstack);
3929 pointer_map_destroy (sccstate);
3930 obstack_free (&sccstate_obstack, NULL);
3931
3932 return res;
3933}
d7f09764 3934
d7f09764
DN
3935
3936static hashval_t
3937iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
a844a60b
RG
3938 struct pointer_map_t *, struct obstack *,
3939 enum gtc_mode);
d7f09764
DN
3940
3941/* DFS visit the edge from the callers type with state *STATE to T.
3942 Update the callers type hash V with the hash for T if it is not part
3943 of the SCC containing the callers type and return it.
3944 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3945
3946static hashval_t
3947visit (tree t, struct sccs *state, hashval_t v,
3948 VEC (tree, heap) **sccstack,
3949 struct pointer_map_t *sccstate,
a844a60b 3950 struct obstack *sccstate_obstack, enum gtc_mode mode)
d7f09764
DN
3951{
3952 struct sccs *cstate = NULL;
0f443ad0 3953 struct tree_int_map m;
d7f09764
DN
3954 void **slot;
3955
3956 /* If there is a hash value recorded for this type then it can't
3957 possibly be part of our parent SCC. Simply mix in its hash. */
0f443ad0 3958 m.base.from = t;
a844a60b
RG
3959 if ((slot = htab_find_slot (mode == GTC_MERGE
3960 ? type_hash_cache : canonical_type_hash_cache,
3961 &m, NO_INSERT))
0f443ad0
RG
3962 && *slot)
3963 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
d7f09764
DN
3964
3965 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3966 cstate = (struct sccs *)*slot;
3967 if (!cstate)
3968 {
3969 hashval_t tem;
3970 /* Not yet visited. DFS recurse. */
3971 tem = iterative_hash_gimple_type (t, v,
a844a60b
RG
3972 sccstack, sccstate, sccstate_obstack,
3973 mode);
d7f09764
DN
3974 if (!cstate)
3975 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3976 state->low = MIN (state->low, cstate->low);
3977 /* If the type is no longer on the SCC stack and thus is not part
3978 of the parents SCC mix in its hash value. Otherwise we will
3979 ignore the type for hashing purposes and return the unaltered
3980 hash value. */
3981 if (!cstate->on_sccstack)
3982 return tem;
3983 }
3984 if (cstate->dfsnum < state->dfsnum
3985 && cstate->on_sccstack)
3986 state->low = MIN (cstate->dfsnum, state->low);
3987
3988 /* We are part of our parents SCC, skip this type during hashing
3989 and return the unaltered hash value. */
3990 return v;
3991}
3992
77785f4f 3993/* Hash NAME with the previous hash value V and return it. */
d7f09764
DN
3994
3995static hashval_t
77785f4f 3996iterative_hash_name (tree name, hashval_t v)
d7f09764 3997{
d7f09764
DN
3998 if (!name)
3999 return v;
4000 if (TREE_CODE (name) == TYPE_DECL)
4001 name = DECL_NAME (name);
4002 if (!name)
4003 return v;
4004 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
d7f09764
DN
4005 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4006}
4007
4008/* Returning a hash value for gimple type TYPE combined with VAL.
4009 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4010
4011 To hash a type we end up hashing in types that are reachable.
4012 Through pointers we can end up with cycles which messes up the
4013 required property that we need to compute the same hash value
4014 for structurally equivalent types. To avoid this we have to
4015 hash all types in a cycle (the SCC) in a commutative way. The
4016 easiest way is to not mix in the hashes of the SCC members at
4017 all. To make this work we have to delay setting the hash
4018 values of the SCC until it is complete. */
4019
4020static hashval_t
4021iterative_hash_gimple_type (tree type, hashval_t val,
4022 VEC(tree, heap) **sccstack,
4023 struct pointer_map_t *sccstate,
a844a60b
RG
4024 struct obstack *sccstate_obstack,
4025 enum gtc_mode mode)
d7f09764
DN
4026{
4027 hashval_t v;
4028 void **slot;
4029 struct sccs *state;
4030
0f443ad0 4031 /* Not visited during this DFS walk. */
77a74ed7 4032 gcc_checking_assert (!pointer_map_contains (sccstate, type));
d7f09764
DN
4033 state = XOBNEW (sccstate_obstack, struct sccs);
4034 *pointer_map_insert (sccstate, type) = state;
4035
4036 VEC_safe_push (tree, heap, *sccstack, type);
4037 state->dfsnum = next_dfs_num++;
4038 state->low = state->dfsnum;
4039 state->on_sccstack = true;
4040
4041 /* Combine a few common features of types so that types are grouped into
4042 smaller sets; when searching for existing matching types to merge,
4043 only existing types having the same features as the new type will be
4044 checked. */
4045 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4046 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4047 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4048
4049 /* Do not hash the types size as this will cause differences in
4050 hash values for the complete vs. the incomplete type variant. */
4051
4052 /* Incorporate common features of numerical types. */
4053 if (INTEGRAL_TYPE_P (type)
4054 || SCALAR_FLOAT_TYPE_P (type)
4055 || FIXED_POINT_TYPE_P (type))
4056 {
4057 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4058 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4059 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4060 }
4061
4062 /* For pointer and reference types, fold in information about the type
4063 pointed to but do not recurse into possibly incomplete types to
4064 avoid hash differences for complete vs. incomplete types. */
4065 if (POINTER_TYPE_P (type))
4066 {
021ed367 4067 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
d7f09764
DN
4068 {
4069 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
77785f4f 4070 v = iterative_hash_name
a844a60b 4071 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
d7f09764
DN
4072 }
4073 else
4074 v = visit (TREE_TYPE (type), state, v,
a844a60b 4075 sccstack, sccstate, sccstate_obstack, mode);
d7f09764
DN
4076 }
4077
f798226d
RG
4078 /* For integer types hash the types min/max values and the string flag. */
4079 if (TREE_CODE (type) == INTEGER_TYPE)
4080 {
429c98c9
RG
4081 /* OMP lowering can introduce error_mark_node in place of
4082 random local decls in types. */
4083 if (TYPE_MIN_VALUE (type) != error_mark_node)
4084 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4085 if (TYPE_MAX_VALUE (type) != error_mark_node)
4086 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
f798226d
RG
4087 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4088 }
4089
4090 /* For array types hash their domain and the string flag. */
4091 if (TREE_CODE (type) == ARRAY_TYPE
4092 && TYPE_DOMAIN (type))
4093 {
4094 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4095 v = visit (TYPE_DOMAIN (type), state, v,
a844a60b 4096 sccstack, sccstate, sccstate_obstack, mode);
f798226d
RG
4097 }
4098
4099 /* Recurse for aggregates with a single element type. */
d7f09764
DN
4100 if (TREE_CODE (type) == ARRAY_TYPE
4101 || TREE_CODE (type) == COMPLEX_TYPE
4102 || TREE_CODE (type) == VECTOR_TYPE)
4103 v = visit (TREE_TYPE (type), state, v,
a844a60b 4104 sccstack, sccstate, sccstate_obstack, mode);
d7f09764
DN
4105
4106 /* Incorporate function return and argument types. */
4107 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4108 {
4109 unsigned na;
4110 tree p;
4111
4112 /* For method types also incorporate their parent class. */
4113 if (TREE_CODE (type) == METHOD_TYPE)
4114 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
a844a60b 4115 sccstack, sccstate, sccstate_obstack, mode);
d7f09764 4116
bcee752e
RG
4117 /* For result types allow mismatch in completeness. */
4118 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4119 {
4120 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4121 v = iterative_hash_name
a844a60b 4122 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
bcee752e
RG
4123 }
4124 else
4125 v = visit (TREE_TYPE (type), state, v,
a844a60b 4126 sccstack, sccstate, sccstate_obstack, mode);
d7f09764
DN
4127
4128 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4129 {
bcee752e
RG
4130 /* For argument types allow mismatch in completeness. */
4131 if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
4132 {
4133 v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
4134 v = iterative_hash_name
a844a60b 4135 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
bcee752e
RG
4136 }
4137 else
4138 v = visit (TREE_VALUE (p), state, v,
a844a60b 4139 sccstack, sccstate, sccstate_obstack, mode);
d7f09764
DN
4140 na++;
4141 }
4142
4143 v = iterative_hash_hashval_t (na, v);
4144 }
4145
4146 if (TREE_CODE (type) == RECORD_TYPE
4147 || TREE_CODE (type) == UNION_TYPE
4148 || TREE_CODE (type) == QUAL_UNION_TYPE)
4149 {
4150 unsigned nf;
4151 tree f;
4152
a844a60b
RG
4153 if (mode == GTC_MERGE)
4154 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
d7f09764
DN
4155
4156 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4157 {
a844a60b
RG
4158 if (mode == GTC_MERGE)
4159 v = iterative_hash_name (DECL_NAME (f), v);
d7f09764 4160 v = visit (TREE_TYPE (f), state, v,
a844a60b 4161 sccstack, sccstate, sccstate_obstack, mode);
d7f09764
DN
4162 nf++;
4163 }
4164
4165 v = iterative_hash_hashval_t (nf, v);
4166 }
4167
4168 /* Record hash for us. */
d4398a43 4169 state->u.hash = v;
d7f09764
DN
4170
4171 /* See if we found an SCC. */
4172 if (state->low == state->dfsnum)
4173 {
4174 tree x;
4175
4176 /* Pop off the SCC and set its hash values. */
4177 do
4178 {
4179 struct sccs *cstate;
0f443ad0 4180 struct tree_int_map *m = ggc_alloc_cleared_tree_int_map ();
d7f09764 4181 x = VEC_pop (tree, *sccstack);
d7f09764
DN
4182 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4183 cstate->on_sccstack = false;
0f443ad0
RG
4184 m->base.from = x;
4185 m->to = cstate->u.hash;
a844a60b
RG
4186 slot = htab_find_slot (mode == GTC_MERGE
4187 ? type_hash_cache : canonical_type_hash_cache,
4188 m, INSERT);
0f443ad0
RG
4189 gcc_assert (!*slot);
4190 *slot = (void *) m;
d7f09764
DN
4191 }
4192 while (x != type);
4193 }
4194
4195 return iterative_hash_hashval_t (v, val);
4196}
4197
4198
4199/* Returns a hash value for P (assumed to be a type). The hash value
4200 is computed using some distinguishing features of the type. Note
4201 that we cannot use pointer hashing here as we may be dealing with
4202 two distinct instances of the same type.
4203
4204 This function should produce the same hash value for two compatible
4205 types according to gimple_types_compatible_p. */
4206
4207static hashval_t
a844a60b 4208gimple_type_hash_1 (const void *p, enum gtc_mode mode)
d7f09764 4209{
ddd4d0e1 4210 const_tree t = (const_tree) p;
d7f09764
DN
4211 VEC(tree, heap) *sccstack = NULL;
4212 struct pointer_map_t *sccstate;
4213 struct obstack sccstate_obstack;
4214 hashval_t val;
4215 void **slot;
0f443ad0 4216 struct tree_int_map m;
d7f09764 4217
a844a60b
RG
4218 if (mode == GTC_MERGE
4219 && type_hash_cache == NULL)
0f443ad0
RG
4220 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4221 tree_int_map_eq, NULL);
a844a60b
RG
4222 else if (mode == GTC_DIAG
4223 && canonical_type_hash_cache == NULL)
4224 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4225 tree_int_map_eq, NULL);
d7f09764 4226
0f443ad0 4227 m.base.from = CONST_CAST_TREE (t);
a844a60b
RG
4228 if ((slot = htab_find_slot (mode == GTC_MERGE
4229 ? type_hash_cache : canonical_type_hash_cache,
4230 &m, NO_INSERT))
0f443ad0
RG
4231 && *slot)
4232 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
d7f09764
DN
4233
4234 /* Perform a DFS walk and pre-hash all reachable types. */
4235 next_dfs_num = 1;
4236 sccstate = pointer_map_create ();
4237 gcc_obstack_init (&sccstate_obstack);
ddd4d0e1 4238 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
a844a60b
RG
4239 &sccstack, sccstate, &sccstate_obstack,
4240 mode);
d7f09764
DN
4241 VEC_free (tree, heap, sccstack);
4242 pointer_map_destroy (sccstate);
4243 obstack_free (&sccstate_obstack, NULL);
4244
4245 return val;
4246}
4247
a844a60b
RG
4248static hashval_t
4249gimple_type_hash (const void *p)
4250{
4251 return gimple_type_hash_1 (p, GTC_MERGE);
4252}
4253
4254static hashval_t
4255gimple_canonical_type_hash (const void *p)
4256{
4257 return gimple_type_hash_1 (p, GTC_DIAG);
4258}
4259
d7f09764
DN
4260
4261/* Returns nonzero if P1 and P2 are equal. */
4262
4263static int
4264gimple_type_eq (const void *p1, const void *p2)
4265{
4266 const_tree t1 = (const_tree) p1;
4267 const_tree t2 = (const_tree) p2;
f5d6836a 4268 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
c4fcd06a 4269 CONST_CAST_TREE (t2), GTC_MERGE);
d7f09764
DN
4270}
4271
4272
4273/* Register type T in the global type table gimple_types.
4274 If another type T', compatible with T, already existed in
4275 gimple_types then return T', otherwise return T. This is used by
4276 LTO to merge identical types read from different TUs. */
4277
4278tree
4279gimple_register_type (tree t)
4280{
4281 void **slot;
4490cae6 4282 gimple_type_leader_entry *leader;
1d85701e 4283 tree mv_leader = NULL_TREE;
d7f09764
DN
4284
4285 gcc_assert (TYPE_P (t));
4286
4490cae6
RG
4287 if (!gimple_type_leader)
4288 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4289 (GIMPLE_TYPE_LEADER_SIZE);
4290 /* If we registered this type before return the cached result. */
4291 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4292 if (leader->type == t)
4293 return leader->leader;
4a2ac96f 4294
20d36f0e
RG
4295 /* Always register the main variant first. This is important so we
4296 pick up the non-typedef variants as canonical, otherwise we'll end
4297 up taking typedef ids for structure tags during comparison. */
4298 if (TYPE_MAIN_VARIANT (t) != t)
1d85701e 4299 mv_leader = gimple_register_type (TYPE_MAIN_VARIANT (t));
20d36f0e 4300
d7f09764 4301 if (gimple_types == NULL)
0f443ad0 4302 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
d7f09764
DN
4303
4304 slot = htab_find_slot (gimple_types, t, INSERT);
4305 if (*slot
4306 && *(tree *)slot != t)
4307 {
4308 tree new_type = (tree) *((tree *) slot);
4309
4310 /* Do not merge types with different addressability. */
4311 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
4312
4313 /* If t is not its main variant then make t unreachable from its
4314 main variant list. Otherwise we'd queue up a lot of duplicates
4315 there. */
4316 if (t != TYPE_MAIN_VARIANT (t))
4317 {
4318 tree tem = TYPE_MAIN_VARIANT (t);
4319 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4320 tem = TYPE_NEXT_VARIANT (tem);
4321 if (tem)
4322 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4323 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4324 }
4325
4326 /* If we are a pointer then remove us from the pointer-to or
4327 reference-to chain. Otherwise we'd queue up a lot of duplicates
4328 there. */
4329 if (TREE_CODE (t) == POINTER_TYPE)
4330 {
4331 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
4332 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
4333 else
4334 {
4335 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
4336 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
4337 tem = TYPE_NEXT_PTR_TO (tem);
4338 if (tem)
4339 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
4340 }
4341 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
4342 }
4343 else if (TREE_CODE (t) == REFERENCE_TYPE)
4344 {
4345 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
4346 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
4347 else
4348 {
4349 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
4350 while (tem && TYPE_NEXT_REF_TO (tem) != t)
4351 tem = TYPE_NEXT_REF_TO (tem);
4352 if (tem)
4353 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
4354 }
4355 TYPE_NEXT_REF_TO (t) = NULL_TREE;
4356 }
4357
4490cae6
RG
4358 leader->type = t;
4359 leader->leader = new_type;
d7f09764
DN
4360 t = new_type;
4361 }
4362 else
4a2ac96f 4363 {
4490cae6
RG
4364 leader->type = t;
4365 leader->leader = t;
1d85701e
RG
4366 /* We're the type leader. Make our TYPE_MAIN_VARIANT valid. */
4367 if (TYPE_MAIN_VARIANT (t) != t
4368 && TYPE_MAIN_VARIANT (t) != mv_leader)
4369 {
4370 /* Remove us from our main variant list as we are not the variant
4371 leader and the variant leader will change. */
4372 tree tem = TYPE_MAIN_VARIANT (t);
4373 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4374 tem = TYPE_NEXT_VARIANT (tem);
4375 if (tem)
4376 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4377 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4378 /* Adjust our main variant. Linking us into its variant list
4379 will happen at fixup time. */
4380 TYPE_MAIN_VARIANT (t) = mv_leader;
4381 }
4490cae6
RG
4382 *slot = (void *) t;
4383 }
4384
4385 return t;
4386}
4387
4388
4389/* Returns nonzero if P1 and P2 are equal. */
4390
4391static int
4392gimple_canonical_type_eq (const void *p1, const void *p2)
4393{
4394 const_tree t1 = (const_tree) p1;
4395 const_tree t2 = (const_tree) p2;
4396 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4397 CONST_CAST_TREE (t2), GTC_DIAG);
4398}
4399
4400/* Register type T in the global type table gimple_types.
4401 If another type T', compatible with T, already existed in
4402 gimple_types then return T', otherwise return T. This is used by
4403 LTO to merge identical types read from different TUs. */
4404
4405tree
4406gimple_register_canonical_type (tree t)
4407{
4408 void **slot;
1d85701e 4409 tree orig_t = t;
4490cae6
RG
4410
4411 gcc_assert (TYPE_P (t));
4412
4413 if (TYPE_CANONICAL (t))
4414 return TYPE_CANONICAL (t);
4415
3c760b86
RG
4416 /* Always register the type itself first so that if it turns out
4417 to be the canonical type it will be the one we merge to as well. */
4418 t = gimple_register_type (t);
4419
4490cae6
RG
4420 /* Always register the main variant first. This is important so we
4421 pick up the non-typedef variants as canonical, otherwise we'll end
4422 up taking typedef ids for structure tags during comparison. */
4423 if (TYPE_MAIN_VARIANT (t) != t)
4424 gimple_register_canonical_type (TYPE_MAIN_VARIANT (t));
4425
4426 if (gimple_canonical_types == NULL)
a844a60b 4427 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4490cae6
RG
4428 gimple_canonical_type_eq, 0);
4429
4430 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4431 if (*slot
4432 && *(tree *)slot != t)
4433 {
4434 tree new_type = (tree) *((tree *) slot);
4435
4436 TYPE_CANONICAL (t) = new_type;
4437 t = new_type;
4438 }
4439 else
4440 {
4441 TYPE_CANONICAL (t) = t;
4a2ac96f
RG
4442 *slot = (void *) t;
4443 }
d7f09764 4444
1d85701e
RG
4445 /* Also cache the canonical type in the non-leaders. */
4446 TYPE_CANONICAL (orig_t) = t;
4447
d7f09764
DN
4448 return t;
4449}
4450
4451
4452/* Show statistics on references to the global type table gimple_types. */
4453
4454void
4455print_gimple_types_stats (void)
4456{
4457 if (gimple_types)
4458 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4459 "%ld searches, %ld collisions (ratio: %f)\n",
4460 (long) htab_size (gimple_types),
4461 (long) htab_elements (gimple_types),
4462 (long) gimple_types->searches,
4463 (long) gimple_types->collisions,
4464 htab_collisions (gimple_types));
4465 else
4466 fprintf (stderr, "GIMPLE type table is empty\n");
a844a60b
RG
4467 if (type_hash_cache)
4468 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4469 "%ld searches, %ld collisions (ratio: %f)\n",
4470 (long) htab_size (type_hash_cache),
4471 (long) htab_elements (type_hash_cache),
4472 (long) type_hash_cache->searches,
4473 (long) type_hash_cache->collisions,
4474 htab_collisions (type_hash_cache));
4475 else
4476 fprintf (stderr, "GIMPLE type hash table is empty\n");
4490cae6
RG
4477 if (gimple_canonical_types)
4478 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4479 "%ld searches, %ld collisions (ratio: %f)\n",
4480 (long) htab_size (gimple_canonical_types),
4481 (long) htab_elements (gimple_canonical_types),
4482 (long) gimple_canonical_types->searches,
4483 (long) gimple_canonical_types->collisions,
4484 htab_collisions (gimple_canonical_types));
4485 else
4486 fprintf (stderr, "GIMPLE canonical type table is empty\n");
a844a60b
RG
4487 if (canonical_type_hash_cache)
4488 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
0f443ad0 4489 "%ld searches, %ld collisions (ratio: %f)\n",
a844a60b
RG
4490 (long) htab_size (canonical_type_hash_cache),
4491 (long) htab_elements (canonical_type_hash_cache),
4492 (long) canonical_type_hash_cache->searches,
4493 (long) canonical_type_hash_cache->collisions,
4494 htab_collisions (canonical_type_hash_cache));
0f443ad0 4495 else
a844a60b 4496 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
d7f09764 4497 if (gtc_visited)
c4fcd06a 4498 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
0d0bfe17 4499 "elements, %ld searches, %ld collisions (ratio: %f)\n",
d7f09764
DN
4500 (long) htab_size (gtc_visited),
4501 (long) htab_elements (gtc_visited),
4502 (long) gtc_visited->searches,
4503 (long) gtc_visited->collisions,
4504 htab_collisions (gtc_visited));
4505 else
4506 fprintf (stderr, "GIMPLE type comparison table is empty\n");
4507}
4508
0d0bfe17
RG
4509/* Free the gimple type hashtables used for LTO type merging. */
4510
4511void
4512free_gimple_type_tables (void)
4513{
4514 /* Last chance to print stats for the tables. */
4515 if (flag_lto_report)
4516 print_gimple_types_stats ();
4517
4518 if (gimple_types)
4519 {
4520 htab_delete (gimple_types);
4521 gimple_types = NULL;
4522 }
4490cae6
RG
4523 if (gimple_canonical_types)
4524 {
4525 htab_delete (gimple_canonical_types);
4526 gimple_canonical_types = NULL;
4527 }
0d0bfe17
RG
4528 if (type_hash_cache)
4529 {
0f443ad0 4530 htab_delete (type_hash_cache);
0d0bfe17
RG
4531 type_hash_cache = NULL;
4532 }
a844a60b
RG
4533 if (canonical_type_hash_cache)
4534 {
4535 htab_delete (canonical_type_hash_cache);
4536 canonical_type_hash_cache = NULL;
4537 }
0d0bfe17
RG
4538 if (gtc_visited)
4539 {
4540 htab_delete (gtc_visited);
88ca1146 4541 obstack_free (&gtc_ob, NULL);
0d0bfe17
RG
4542 gtc_visited = NULL;
4543 }
4490cae6 4544 gimple_type_leader = NULL;
0d0bfe17
RG
4545}
4546
d7f09764
DN
4547
4548/* Return a type the same as TYPE except unsigned or
4549 signed according to UNSIGNEDP. */
4550
4551static tree
4552gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4553{
4554 tree type1;
4555
4556 type1 = TYPE_MAIN_VARIANT (type);
4557 if (type1 == signed_char_type_node
4558 || type1 == char_type_node
4559 || type1 == unsigned_char_type_node)
4560 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4561 if (type1 == integer_type_node || type1 == unsigned_type_node)
4562 return unsignedp ? unsigned_type_node : integer_type_node;
4563 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4564 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4565 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4566 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4567 if (type1 == long_long_integer_type_node
4568 || type1 == long_long_unsigned_type_node)
4569 return unsignedp
4570 ? long_long_unsigned_type_node
4571 : long_long_integer_type_node;
a6766312
KT
4572 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4573 return unsignedp
4574 ? int128_unsigned_type_node
4575 : int128_integer_type_node;
d7f09764
DN
4576#if HOST_BITS_PER_WIDE_INT >= 64
4577 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4578 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4579#endif
4580 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4581 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4582 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4583 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4584 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4585 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4586 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4587 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4588
4589#define GIMPLE_FIXED_TYPES(NAME) \
4590 if (type1 == short_ ## NAME ## _type_node \
4591 || type1 == unsigned_short_ ## NAME ## _type_node) \
4592 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4593 : short_ ## NAME ## _type_node; \
4594 if (type1 == NAME ## _type_node \
4595 || type1 == unsigned_ ## NAME ## _type_node) \
4596 return unsignedp ? unsigned_ ## NAME ## _type_node \
4597 : NAME ## _type_node; \
4598 if (type1 == long_ ## NAME ## _type_node \
4599 || type1 == unsigned_long_ ## NAME ## _type_node) \
4600 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4601 : long_ ## NAME ## _type_node; \
4602 if (type1 == long_long_ ## NAME ## _type_node \
4603 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4604 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4605 : long_long_ ## NAME ## _type_node;
4606
4607#define GIMPLE_FIXED_MODE_TYPES(NAME) \
4608 if (type1 == NAME ## _type_node \
4609 || type1 == u ## NAME ## _type_node) \
4610 return unsignedp ? u ## NAME ## _type_node \
4611 : NAME ## _type_node;
4612
4613#define GIMPLE_FIXED_TYPES_SAT(NAME) \
4614 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4615 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4616 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4617 : sat_ ## short_ ## NAME ## _type_node; \
4618 if (type1 == sat_ ## NAME ## _type_node \
4619 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4620 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4621 : sat_ ## NAME ## _type_node; \
4622 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4623 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4624 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4625 : sat_ ## long_ ## NAME ## _type_node; \
4626 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4627 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4628 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4629 : sat_ ## long_long_ ## NAME ## _type_node;
4630
4631#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4632 if (type1 == sat_ ## NAME ## _type_node \
4633 || type1 == sat_ ## u ## NAME ## _type_node) \
4634 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4635 : sat_ ## NAME ## _type_node;
4636
4637 GIMPLE_FIXED_TYPES (fract);
4638 GIMPLE_FIXED_TYPES_SAT (fract);
4639 GIMPLE_FIXED_TYPES (accum);
4640 GIMPLE_FIXED_TYPES_SAT (accum);
4641
4642 GIMPLE_FIXED_MODE_TYPES (qq);
4643 GIMPLE_FIXED_MODE_TYPES (hq);
4644 GIMPLE_FIXED_MODE_TYPES (sq);
4645 GIMPLE_FIXED_MODE_TYPES (dq);
4646 GIMPLE_FIXED_MODE_TYPES (tq);
4647 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4648 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4649 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4650 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4651 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4652 GIMPLE_FIXED_MODE_TYPES (ha);
4653 GIMPLE_FIXED_MODE_TYPES (sa);
4654 GIMPLE_FIXED_MODE_TYPES (da);
4655 GIMPLE_FIXED_MODE_TYPES (ta);
4656 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4657 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4658 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4659 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4660
4661 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4662 the precision; they have precision set to match their range, but
4663 may use a wider mode to match an ABI. If we change modes, we may
4664 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4665 the precision as well, so as to yield correct results for
4666 bit-field types. C++ does not have these separate bit-field
4667 types, and producing a signed or unsigned variant of an
4668 ENUMERAL_TYPE may cause other problems as well. */
4669 if (!INTEGRAL_TYPE_P (type)
4670 || TYPE_UNSIGNED (type) == unsignedp)
4671 return type;
4672
4673#define TYPE_OK(node) \
4674 (TYPE_MODE (type) == TYPE_MODE (node) \
4675 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4676 if (TYPE_OK (signed_char_type_node))
4677 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4678 if (TYPE_OK (integer_type_node))
4679 return unsignedp ? unsigned_type_node : integer_type_node;
4680 if (TYPE_OK (short_integer_type_node))
4681 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4682 if (TYPE_OK (long_integer_type_node))
4683 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4684 if (TYPE_OK (long_long_integer_type_node))
4685 return (unsignedp
4686 ? long_long_unsigned_type_node
4687 : long_long_integer_type_node);
a6766312
KT
4688 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4689 return (unsignedp
4690 ? int128_unsigned_type_node
4691 : int128_integer_type_node);
d7f09764
DN
4692
4693#if HOST_BITS_PER_WIDE_INT >= 64
4694 if (TYPE_OK (intTI_type_node))
4695 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4696#endif
4697 if (TYPE_OK (intDI_type_node))
4698 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4699 if (TYPE_OK (intSI_type_node))
4700 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4701 if (TYPE_OK (intHI_type_node))
4702 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4703 if (TYPE_OK (intQI_type_node))
4704 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4705
4706#undef GIMPLE_FIXED_TYPES
4707#undef GIMPLE_FIXED_MODE_TYPES
4708#undef GIMPLE_FIXED_TYPES_SAT
4709#undef GIMPLE_FIXED_MODE_TYPES_SAT
4710#undef TYPE_OK
4711
4712 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4713}
4714
4715
4716/* Return an unsigned type the same as TYPE in other respects. */
4717
4718tree
4719gimple_unsigned_type (tree type)
4720{
4721 return gimple_signed_or_unsigned_type (true, type);
4722}
4723
4724
4725/* Return a signed type the same as TYPE in other respects. */
4726
4727tree
4728gimple_signed_type (tree type)
4729{
4730 return gimple_signed_or_unsigned_type (false, type);
4731}
4732
4733
4734/* Return the typed-based alias set for T, which may be an expression
4735 or a type. Return -1 if we don't do anything special. */
4736
4737alias_set_type
4738gimple_get_alias_set (tree t)
4739{
4740 tree u;
4741
4742 /* Permit type-punning when accessing a union, provided the access
4743 is directly through the union. For example, this code does not
4744 permit taking the address of a union member and then storing
4745 through it. Even the type-punning allowed here is a GCC
4746 extension, albeit a common and useful one; the C standard says
4747 that such accesses have implementation-defined behavior. */
4748 for (u = t;
4749 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4750 u = TREE_OPERAND (u, 0))
4751 if (TREE_CODE (u) == COMPONENT_REF
4752 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4753 return 0;
4754
4755 /* That's all the expressions we handle specially. */
4756 if (!TYPE_P (t))
4757 return -1;
4758
4759 /* For convenience, follow the C standard when dealing with
4760 character types. Any object may be accessed via an lvalue that
4761 has character type. */
4762 if (t == char_type_node
4763 || t == signed_char_type_node
4764 || t == unsigned_char_type_node)
4765 return 0;
4766
4767 /* Allow aliasing between signed and unsigned variants of the same
4768 type. We treat the signed variant as canonical. */
4769 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4770 {
4771 tree t1 = gimple_signed_type (t);
4772
4773 /* t1 == t can happen for boolean nodes which are always unsigned. */
4774 if (t1 != t)
4775 return get_alias_set (t1);
4776 }
d7f09764
DN
4777
4778 return -1;
4779}
4780
4781
5006671f
RG
4782/* Data structure used to count the number of dereferences to PTR
4783 inside an expression. */
4784struct count_ptr_d
4785{
4786 tree ptr;
4787 unsigned num_stores;
4788 unsigned num_loads;
4789};
4790
4791/* Helper for count_uses_and_derefs. Called by walk_tree to look for
4792 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4793
4794static tree
4795count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4796{
4797 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4798 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4799
4800 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4801 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4802 the address of 'fld' as 'ptr + offsetof(fld)'. */
4803 if (TREE_CODE (*tp) == ADDR_EXPR)
4804 {
4805 *walk_subtrees = 0;
4806 return NULL_TREE;
4807 }
4808
70f34814 4809 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5006671f
RG
4810 {
4811 if (wi_p->is_lhs)
4812 count_p->num_stores++;
4813 else
4814 count_p->num_loads++;
4815 }
4816
4817 return NULL_TREE;
4818}
4819
4820/* Count the number of direct and indirect uses for pointer PTR in
4821 statement STMT. The number of direct uses is stored in
4822 *NUM_USES_P. Indirect references are counted separately depending
4823 on whether they are store or load operations. The counts are
4824 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4825
4826void
4827count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4828 unsigned *num_loads_p, unsigned *num_stores_p)
4829{
4830 ssa_op_iter i;
4831 tree use;
4832
4833 *num_uses_p = 0;
4834 *num_loads_p = 0;
4835 *num_stores_p = 0;
4836
4837 /* Find out the total number of uses of PTR in STMT. */
4838 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4839 if (use == ptr)
4840 (*num_uses_p)++;
4841
4842 /* Now count the number of indirect references to PTR. This is
4843 truly awful, but we don't have much choice. There are no parent
4844 pointers inside INDIRECT_REFs, so an expression like
4845 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4846 find all the indirect and direct uses of x_1 inside. The only
4847 shortcut we can take is the fact that GIMPLE only allows
4848 INDIRECT_REFs inside the expressions below. */
4849 if (is_gimple_assign (stmt)
4850 || gimple_code (stmt) == GIMPLE_RETURN
4851 || gimple_code (stmt) == GIMPLE_ASM
4852 || is_gimple_call (stmt))
4853 {
4854 struct walk_stmt_info wi;
4855 struct count_ptr_d count;
4856
4857 count.ptr = ptr;
4858 count.num_stores = 0;
4859 count.num_loads = 0;
4860
4861 memset (&wi, 0, sizeof (wi));
4862 wi.info = &count;
4863 walk_gimple_op (stmt, count_ptr_derefs, &wi);
4864
4865 *num_stores_p = count.num_stores;
4866 *num_loads_p = count.num_loads;
4867 }
4868
4869 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4870}
4871
346ef3fa
RG
4872/* From a tree operand OP return the base of a load or store operation
4873 or NULL_TREE if OP is not a load or a store. */
4874
4875static tree
4876get_base_loadstore (tree op)
4877{
4878 while (handled_component_p (op))
4879 op = TREE_OPERAND (op, 0);
4880 if (DECL_P (op)
4881 || INDIRECT_REF_P (op)
70f34814 4882 || TREE_CODE (op) == MEM_REF
346ef3fa
RG
4883 || TREE_CODE (op) == TARGET_MEM_REF)
4884 return op;
4885 return NULL_TREE;
4886}
4887
4888/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4889 VISIT_ADDR if non-NULL on loads, store and address-taken operands
4890 passing the STMT, the base of the operand and DATA to it. The base
4891 will be either a decl, an indirect reference (including TARGET_MEM_REF)
4892 or the argument of an address expression.
4893 Returns the results of these callbacks or'ed. */
4894
4895bool
4896walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4897 bool (*visit_load)(gimple, tree, void *),
4898 bool (*visit_store)(gimple, tree, void *),
4899 bool (*visit_addr)(gimple, tree, void *))
4900{
4901 bool ret = false;
4902 unsigned i;
4903 if (gimple_assign_single_p (stmt))
4904 {
4905 tree lhs, rhs;
4906 if (visit_store)
4907 {
4908 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
4909 if (lhs)
4910 ret |= visit_store (stmt, lhs, data);
4911 }
4912 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
4913 while (handled_component_p (rhs))
4914 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
4915 if (visit_addr)
4916 {
4917 if (TREE_CODE (rhs) == ADDR_EXPR)
4918 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4919 else if (TREE_CODE (rhs) == TARGET_MEM_REF
4920 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
4921 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
4922 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
4923 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
4924 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
4925 0), data);
fff1894c
AB
4926 lhs = gimple_assign_lhs (stmt);
4927 if (TREE_CODE (lhs) == TARGET_MEM_REF
fff1894c
AB
4928 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
4929 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
4930 }
4931 if (visit_load)
4932 {
4933 rhs = get_base_loadstore (rhs);
4934 if (rhs)
4935 ret |= visit_load (stmt, rhs, data);
4936 }
4937 }
4938 else if (visit_addr
4939 && (is_gimple_assign (stmt)
4d7a65ea 4940 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
4941 {
4942 for (i = 0; i < gimple_num_ops (stmt); ++i)
4943 if (gimple_op (stmt, i)
4944 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
4945 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
4946 }
4947 else if (is_gimple_call (stmt))
4948 {
4949 if (visit_store)
4950 {
4951 tree lhs = gimple_call_lhs (stmt);
4952 if (lhs)
4953 {
4954 lhs = get_base_loadstore (lhs);
4955 if (lhs)
4956 ret |= visit_store (stmt, lhs, data);
4957 }
4958 }
4959 if (visit_load || visit_addr)
4960 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4961 {
4962 tree rhs = gimple_call_arg (stmt, i);
4963 if (visit_addr
4964 && TREE_CODE (rhs) == ADDR_EXPR)
4965 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4966 else if (visit_load)
4967 {
4968 rhs = get_base_loadstore (rhs);
4969 if (rhs)
4970 ret |= visit_load (stmt, rhs, data);
4971 }
4972 }
4973 if (visit_addr
4974 && gimple_call_chain (stmt)
4975 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
4976 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
4977 data);
1d24fdd9
RG
4978 if (visit_addr
4979 && gimple_call_return_slot_opt_p (stmt)
4980 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 4981 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 4982 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
4983 }
4984 else if (gimple_code (stmt) == GIMPLE_ASM)
4985 {
4986 unsigned noutputs;
4987 const char *constraint;
4988 const char **oconstraints;
4989 bool allows_mem, allows_reg, is_inout;
4990 noutputs = gimple_asm_noutputs (stmt);
4991 oconstraints = XALLOCAVEC (const char *, noutputs);
4992 if (visit_store || visit_addr)
4993 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
4994 {
4995 tree link = gimple_asm_output_op (stmt, i);
4996 tree op = get_base_loadstore (TREE_VALUE (link));
4997 if (op && visit_store)
4998 ret |= visit_store (stmt, op, data);
4999 if (visit_addr)
5000 {
5001 constraint = TREE_STRING_POINTER
5002 (TREE_VALUE (TREE_PURPOSE (link)));
5003 oconstraints[i] = constraint;
5004 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5005 &allows_reg, &is_inout);
5006 if (op && !allows_reg && allows_mem)
5007 ret |= visit_addr (stmt, op, data);
5008 }
5009 }
5010 if (visit_load || visit_addr)
5011 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5012 {
5013 tree link = gimple_asm_input_op (stmt, i);
5014 tree op = TREE_VALUE (link);
5015 if (visit_addr
5016 && TREE_CODE (op) == ADDR_EXPR)
5017 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5018 else if (visit_load || visit_addr)
5019 {
5020 op = get_base_loadstore (op);
5021 if (op)
5022 {
5023 if (visit_load)
5024 ret |= visit_load (stmt, op, data);
5025 if (visit_addr)
5026 {
5027 constraint = TREE_STRING_POINTER
5028 (TREE_VALUE (TREE_PURPOSE (link)));
5029 parse_input_constraint (&constraint, 0, 0, noutputs,
5030 0, oconstraints,
5031 &allows_mem, &allows_reg);
5032 if (!allows_reg && allows_mem)
5033 ret |= visit_addr (stmt, op, data);
5034 }
5035 }
5036 }
5037 }
5038 }
5039 else if (gimple_code (stmt) == GIMPLE_RETURN)
5040 {
5041 tree op = gimple_return_retval (stmt);
5042 if (op)
5043 {
5044 if (visit_addr
5045 && TREE_CODE (op) == ADDR_EXPR)
5046 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5047 else if (visit_load)
5048 {
5049 op = get_base_loadstore (op);
5050 if (op)
5051 ret |= visit_load (stmt, op, data);
5052 }
5053 }
5054 }
5055 else if (visit_addr
5056 && gimple_code (stmt) == GIMPLE_PHI)
5057 {
5058 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5059 {
5060 tree op = PHI_ARG_DEF (stmt, i);
5061 if (TREE_CODE (op) == ADDR_EXPR)
5062 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5063 }
5064 }
5065
5066 return ret;
5067}
5068
5069/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5070 should make a faster clone for this case. */
5071
5072bool
5073walk_stmt_load_store_ops (gimple stmt, void *data,
5074 bool (*visit_load)(gimple, tree, void *),
5075 bool (*visit_store)(gimple, tree, void *))
5076{
5077 return walk_stmt_load_store_addr_ops (stmt, data,
5078 visit_load, visit_store, NULL);
5079}
5080
ccacdf06
RG
5081/* Helper for gimple_ior_addresses_taken_1. */
5082
5083static bool
5084gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5085 tree addr, void *data)
5086{
5087 bitmap addresses_taken = (bitmap)data;
2ea9dc64
RG
5088 addr = get_base_address (addr);
5089 if (addr
5090 && DECL_P (addr))
ccacdf06
RG
5091 {
5092 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5093 return true;
5094 }
5095 return false;
5096}
5097
5098/* Set the bit for the uid of all decls that have their address taken
5099 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5100 were any in this stmt. */
5101
5102bool
5103gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5104{
5105 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5106 gimple_ior_addresses_taken_1);
5107}
5108
4537ec0c
DN
5109
5110/* Return a printable name for symbol DECL. */
5111
5112const char *
5113gimple_decl_printable_name (tree decl, int verbosity)
5114{
98b2dfbb
RG
5115 if (!DECL_NAME (decl))
5116 return NULL;
4537ec0c
DN
5117
5118 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5119 {
5120 const char *str, *mangled_str;
5121 int dmgl_opts = DMGL_NO_OPTS;
5122
5123 if (verbosity >= 2)
5124 {
5125 dmgl_opts = DMGL_VERBOSE
4537ec0c
DN
5126 | DMGL_ANSI
5127 | DMGL_GNU_V3
5128 | DMGL_RET_POSTFIX;
5129 if (TREE_CODE (decl) == FUNCTION_DECL)
5130 dmgl_opts |= DMGL_PARAMS;
5131 }
5132
5133 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5134 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5135 return (str) ? str : mangled_str;
5136 }
5137
5138 return IDENTIFIER_POINTER (DECL_NAME (decl));
5139}
5140
c54c785d
JH
5141/* Return true when STMT is builtins call to CODE. */
5142
5143bool
5144gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5145{
5146 tree fndecl;
5147 return (is_gimple_call (stmt)
5148 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5149 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5150 && DECL_FUNCTION_CODE (fndecl) == code);
5151}
5152
edcdea5b
NF
5153/* Return true if STMT clobbers memory. STMT is required to be a
5154 GIMPLE_ASM. */
5155
5156bool
5157gimple_asm_clobbers_memory_p (const_gimple stmt)
5158{
5159 unsigned i;
5160
5161 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5162 {
5163 tree op = gimple_asm_clobber_op (stmt, i);
5164 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5165 return true;
5166 }
5167
5168 return false;
5169}
726a989a 5170#include "gt-gimple.h"
This page took 1.527333 seconds and 5 git commands to generate.