]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple.c
ipa.c (enqueue_cgraph_node): Update comment; do not re-enqueue nodes already in queue.
[gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
6a4d4e8a 3 Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
d7f09764 26#include "target.h"
726a989a
RB
27#include "tree.h"
28#include "ggc.h"
726a989a
RB
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "gimple.h"
38d2336a 32#include "toplev.h"
726a989a
RB
33#include "diagnostic.h"
34#include "tree-flow.h"
35#include "value-prof.h"
36#include "flags.h"
d7f09764 37#include "alias.h"
4537ec0c 38#include "demangle.h"
726a989a 39
d7f09764
DN
40/* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44static htab_t gimple_types;
45static struct pointer_map_t *type_hash_cache;
46
47/* Global type comparison cache. */
48static htab_t gtc_visited;
88ca1146 49static struct obstack gtc_ob;
726a989a 50
f2c4a81c 51/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
52 of the structure. Therefore, the offset required to find the
53 operands vector the size of the structure minus the size of the 1
54 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
55#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
56 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 57EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
58#include "gsstruct.def"
59};
60#undef DEFGSSTRUCT
61
62#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
63static const size_t gsstruct_code_size[] = {
64#include "gsstruct.def"
65};
66#undef DEFGSSTRUCT
67
68#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
69const char *const gimple_code_name[] = {
70#include "gimple.def"
71};
72#undef DEFGSCODE
73
74#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
75EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
76#include "gimple.def"
77};
78#undef DEFGSCODE
79
80#ifdef GATHER_STATISTICS
81/* Gimple stats. */
82
83int gimple_alloc_counts[(int) gimple_alloc_kind_all];
84int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
85
86/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
87static const char * const gimple_alloc_kind_names[] = {
88 "assignments",
89 "phi nodes",
90 "conditionals",
91 "sequences",
92 "everything else"
93};
94
95#endif /* GATHER_STATISTICS */
96
97/* A cache of gimple_seq objects. Sequences are created and destroyed
98 fairly often during gimplification. */
99static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
100
101/* Private API manipulation functions shared only with some
102 other files. */
103extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
104extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
105
106/* Gimple tuple constructors.
107 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
108 be passed a NULL to start with an empty sequence. */
109
110/* Set the code for statement G to CODE. */
111
112static inline void
113gimple_set_code (gimple g, enum gimple_code code)
114{
115 g->gsbase.code = code;
116}
117
726a989a
RB
118/* Return the number of bytes needed to hold a GIMPLE statement with
119 code CODE. */
120
f2c4a81c 121static inline size_t
726a989a
RB
122gimple_size (enum gimple_code code)
123{
f2c4a81c 124 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
125}
126
726a989a
RB
127/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
128 operands. */
129
d7f09764 130gimple
726a989a
RB
131gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
132{
133 size_t size;
134 gimple stmt;
135
136 size = gimple_size (code);
137 if (num_ops > 0)
138 size += sizeof (tree) * (num_ops - 1);
139
140#ifdef GATHER_STATISTICS
141 {
142 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
143 gimple_alloc_counts[(int) kind]++;
144 gimple_alloc_sizes[(int) kind] += size;
145 }
146#endif
147
148 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
149 gimple_set_code (stmt, code);
150 gimple_set_num_ops (stmt, num_ops);
151
152 /* Do not call gimple_set_modified here as it has other side
153 effects and this tuple is still not completely built. */
154 stmt->gsbase.modified = 1;
155
156 return stmt;
157}
158
159/* Set SUBCODE to be the code of the expression computed by statement G. */
160
161static inline void
162gimple_set_subcode (gimple g, unsigned subcode)
163{
164 /* We only have 16 bits for the RHS code. Assert that we are not
165 overflowing it. */
166 gcc_assert (subcode < (1 << 16));
167 g->gsbase.subcode = subcode;
168}
169
170
171
172/* Build a tuple with operands. CODE is the statement to build (which
173 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
b8698a0f 174 for the new tuple. NUM_OPS is the number of operands to allocate. */
726a989a
RB
175
176#define gimple_build_with_ops(c, s, n) \
177 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
178
179static gimple
b5b8b0ac 180gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
181 unsigned num_ops MEM_STAT_DECL)
182{
183 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
184 gimple_set_subcode (s, subcode);
185
186 return s;
187}
188
189
190/* Build a GIMPLE_RETURN statement returning RETVAL. */
191
192gimple
193gimple_build_return (tree retval)
194{
bbbbb16a 195 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
196 if (retval)
197 gimple_return_set_retval (s, retval);
198 return s;
199}
200
d086d311
RG
201/* Reset alias information on call S. */
202
203void
204gimple_call_reset_alias_info (gimple s)
205{
206 if (gimple_call_flags (s) & ECF_CONST)
207 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
208 else
209 pt_solution_reset (gimple_call_use_set (s));
210 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
211 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
212 else
213 pt_solution_reset (gimple_call_clobber_set (s));
214}
215
726a989a
RB
216/* Helper for gimple_build_call, gimple_build_call_vec and
217 gimple_build_call_from_tree. Build the basic components of a
218 GIMPLE_CALL statement to function FN with NARGS arguments. */
219
220static inline gimple
221gimple_build_call_1 (tree fn, unsigned nargs)
222{
bbbbb16a 223 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
224 if (TREE_CODE (fn) == FUNCTION_DECL)
225 fn = build_fold_addr_expr (fn);
726a989a 226 gimple_set_op (s, 1, fn);
d086d311 227 gimple_call_reset_alias_info (s);
726a989a
RB
228 return s;
229}
230
231
232/* Build a GIMPLE_CALL statement to function FN with the arguments
233 specified in vector ARGS. */
234
235gimple
236gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
237{
238 unsigned i;
239 unsigned nargs = VEC_length (tree, args);
240 gimple call = gimple_build_call_1 (fn, nargs);
241
242 for (i = 0; i < nargs; i++)
243 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
244
245 return call;
246}
247
248
249/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
250 arguments. The ... are the arguments. */
251
252gimple
253gimple_build_call (tree fn, unsigned nargs, ...)
254{
255 va_list ap;
256 gimple call;
257 unsigned i;
258
259 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
260
261 call = gimple_build_call_1 (fn, nargs);
262
263 va_start (ap, nargs);
264 for (i = 0; i < nargs; i++)
265 gimple_call_set_arg (call, i, va_arg (ap, tree));
266 va_end (ap);
267
268 return call;
269}
270
271
272/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
273 assumed to be in GIMPLE form already. Minimal checking is done of
274 this fact. */
275
276gimple
277gimple_build_call_from_tree (tree t)
278{
279 unsigned i, nargs;
280 gimple call;
281 tree fndecl = get_callee_fndecl (t);
282
283 gcc_assert (TREE_CODE (t) == CALL_EXPR);
284
285 nargs = call_expr_nargs (t);
286 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
287
288 for (i = 0; i < nargs; i++)
289 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
290
291 gimple_set_block (call, TREE_BLOCK (t));
292
293 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
294 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
295 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
296 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
297 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
298 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
299 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
d665b6e5 300 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
301
302 return call;
303}
304
305
306/* Extract the operands and code for expression EXPR into *SUBCODE_P,
307 *OP1_P and *OP2_P respectively. */
308
309void
310extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
311 tree *op2_p)
312{
82d6e6fc 313 enum gimple_rhs_class grhs_class;
726a989a
RB
314
315 *subcode_p = TREE_CODE (expr);
82d6e6fc 316 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 317
82d6e6fc 318 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
319 {
320 *op1_p = TREE_OPERAND (expr, 0);
321 *op2_p = TREE_OPERAND (expr, 1);
322 }
82d6e6fc 323 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
324 {
325 *op1_p = TREE_OPERAND (expr, 0);
326 *op2_p = NULL_TREE;
327 }
82d6e6fc 328 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
329 {
330 *op1_p = expr;
331 *op2_p = NULL_TREE;
332 }
333 else
334 gcc_unreachable ();
335}
336
337
338/* Build a GIMPLE_ASSIGN statement.
339
340 LHS of the assignment.
341 RHS of the assignment which can be unary or binary. */
342
343gimple
344gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
345{
346 enum tree_code subcode;
347 tree op1, op2;
348
349 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
350 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
351 PASS_MEM_STAT);
352}
353
354
355/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
356 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
357 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
358
359gimple
360gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
361 tree op2 MEM_STAT_DECL)
362{
363 unsigned num_ops;
364 gimple p;
365
366 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
367 code). */
368 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
b8698a0f 369
b5b8b0ac 370 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
371 PASS_MEM_STAT);
372 gimple_assign_set_lhs (p, lhs);
373 gimple_assign_set_rhs1 (p, op1);
374 if (op2)
375 {
376 gcc_assert (num_ops > 2);
377 gimple_assign_set_rhs2 (p, op2);
378 }
379
380 return p;
381}
382
383
384/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
385
386 DST/SRC are the destination and source respectively. You can pass
387 ungimplified trees in DST or SRC, in which case they will be
388 converted to a gimple operand if necessary.
389
390 This function returns the newly created GIMPLE_ASSIGN tuple. */
391
5fd8300b 392gimple
726a989a 393gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
b8698a0f 394{
726a989a
RB
395 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
396 gimplify_and_add (t, seq_p);
397 ggc_free (t);
398 return gimple_seq_last_stmt (*seq_p);
399}
400
401
402/* Build a GIMPLE_COND statement.
403
404 PRED is the condition used to compare LHS and the RHS.
405 T_LABEL is the label to jump to if the condition is true.
406 F_LABEL is the label to jump to otherwise. */
407
408gimple
409gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
410 tree t_label, tree f_label)
411{
412 gimple p;
413
414 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
415 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
416 gimple_cond_set_lhs (p, lhs);
417 gimple_cond_set_rhs (p, rhs);
418 gimple_cond_set_true_label (p, t_label);
419 gimple_cond_set_false_label (p, f_label);
420 return p;
421}
422
423
424/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
425
426void
427gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
428 tree *lhs_p, tree *rhs_p)
429{
db3927fb 430 location_t loc = EXPR_LOCATION (cond);
726a989a
RB
431 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
432 || TREE_CODE (cond) == TRUTH_NOT_EXPR
433 || is_gimple_min_invariant (cond)
434 || SSA_VAR_P (cond));
435
436 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
437
438 /* Canonicalize conditionals of the form 'if (!VAL)'. */
439 if (*code_p == TRUTH_NOT_EXPR)
440 {
441 *code_p = EQ_EXPR;
442 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
db3927fb 443 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
726a989a
RB
444 }
445 /* Canonicalize conditionals of the form 'if (VAL)' */
446 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
447 {
448 *code_p = NE_EXPR;
449 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
db3927fb 450 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
726a989a
RB
451 }
452}
453
454
455/* Build a GIMPLE_COND statement from the conditional expression tree
456 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
457
458gimple
459gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
460{
461 enum tree_code code;
462 tree lhs, rhs;
463
464 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
465 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
466}
467
468/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
469 boolean expression tree COND. */
470
471void
472gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
473{
474 enum tree_code code;
475 tree lhs, rhs;
476
477 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
478 gimple_cond_set_condition (stmt, code, lhs, rhs);
479}
480
481/* Build a GIMPLE_LABEL statement for LABEL. */
482
483gimple
484gimple_build_label (tree label)
485{
bbbbb16a 486 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
487 gimple_label_set_label (p, label);
488 return p;
489}
490
491/* Build a GIMPLE_GOTO statement to label DEST. */
492
493gimple
494gimple_build_goto (tree dest)
495{
bbbbb16a 496 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
497 gimple_goto_set_dest (p, dest);
498 return p;
499}
500
501
502/* Build a GIMPLE_NOP statement. */
503
b8698a0f 504gimple
726a989a
RB
505gimple_build_nop (void)
506{
507 return gimple_alloc (GIMPLE_NOP, 0);
508}
509
510
511/* Build a GIMPLE_BIND statement.
512 VARS are the variables in BODY.
513 BLOCK is the containing block. */
514
515gimple
516gimple_build_bind (tree vars, gimple_seq body, tree block)
517{
518 gimple p = gimple_alloc (GIMPLE_BIND, 0);
519 gimple_bind_set_vars (p, vars);
520 if (body)
521 gimple_bind_set_body (p, body);
522 if (block)
523 gimple_bind_set_block (p, block);
524 return p;
525}
526
527/* Helper function to set the simple fields of a asm stmt.
528
529 STRING is a pointer to a string that is the asm blocks assembly code.
530 NINPUT is the number of register inputs.
531 NOUTPUT is the number of register outputs.
532 NCLOBBERS is the number of clobbered registers.
533 */
534
535static inline gimple
b8698a0f 536gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
1c384bf1 537 unsigned nclobbers, unsigned nlabels)
726a989a
RB
538{
539 gimple p;
540 int size = strlen (string);
541
1c384bf1
RH
542 /* ASMs with labels cannot have outputs. This should have been
543 enforced by the front end. */
544 gcc_assert (nlabels == 0 || noutputs == 0);
545
bbbbb16a 546 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
1c384bf1 547 ninputs + noutputs + nclobbers + nlabels);
726a989a
RB
548
549 p->gimple_asm.ni = ninputs;
550 p->gimple_asm.no = noutputs;
551 p->gimple_asm.nc = nclobbers;
1c384bf1 552 p->gimple_asm.nl = nlabels;
726a989a
RB
553 p->gimple_asm.string = ggc_alloc_string (string, size);
554
555#ifdef GATHER_STATISTICS
556 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
557#endif
b8698a0f 558
726a989a
RB
559 return p;
560}
561
562/* Build a GIMPLE_ASM statement.
563
564 STRING is the assembly code.
565 NINPUT is the number of register inputs.
566 NOUTPUT is the number of register outputs.
567 NCLOBBERS is the number of clobbered registers.
568 INPUTS is a vector of the input register parameters.
569 OUTPUTS is a vector of the output register parameters.
1c384bf1
RH
570 CLOBBERS is a vector of the clobbered register parameters.
571 LABELS is a vector of destination labels. */
726a989a
RB
572
573gimple
b8698a0f 574gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
1c384bf1
RH
575 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
576 VEC(tree,gc)* labels)
726a989a
RB
577{
578 gimple p;
579 unsigned i;
580
581 p = gimple_build_asm_1 (string,
582 VEC_length (tree, inputs),
b8698a0f 583 VEC_length (tree, outputs),
1c384bf1
RH
584 VEC_length (tree, clobbers),
585 VEC_length (tree, labels));
b8698a0f 586
726a989a
RB
587 for (i = 0; i < VEC_length (tree, inputs); i++)
588 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
589
590 for (i = 0; i < VEC_length (tree, outputs); i++)
591 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
592
593 for (i = 0; i < VEC_length (tree, clobbers); i++)
594 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
b8698a0f 595
1c384bf1
RH
596 for (i = 0; i < VEC_length (tree, labels); i++)
597 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
b8698a0f 598
726a989a
RB
599 return p;
600}
601
602/* Build a GIMPLE_CATCH statement.
603
604 TYPES are the catch types.
605 HANDLER is the exception handler. */
606
607gimple
608gimple_build_catch (tree types, gimple_seq handler)
609{
610 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
611 gimple_catch_set_types (p, types);
612 if (handler)
613 gimple_catch_set_handler (p, handler);
614
615 return p;
616}
617
618/* Build a GIMPLE_EH_FILTER statement.
619
620 TYPES are the filter's types.
621 FAILURE is the filter's failure action. */
622
623gimple
624gimple_build_eh_filter (tree types, gimple_seq failure)
625{
626 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
627 gimple_eh_filter_set_types (p, types);
628 if (failure)
629 gimple_eh_filter_set_failure (p, failure);
630
631 return p;
632}
633
1d65f45c
RH
634/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
635
636gimple
637gimple_build_eh_must_not_throw (tree decl)
638{
639 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 1);
640
641 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
642 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
d7f09764 643 gimple_eh_must_not_throw_set_fndecl (p, decl);
1d65f45c
RH
644
645 return p;
646}
647
726a989a
RB
648/* Build a GIMPLE_TRY statement.
649
650 EVAL is the expression to evaluate.
651 CLEANUP is the cleanup expression.
652 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
653 whether this is a try/catch or a try/finally respectively. */
654
655gimple
656gimple_build_try (gimple_seq eval, gimple_seq cleanup,
657 enum gimple_try_flags kind)
658{
659 gimple p;
660
661 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
662 p = gimple_alloc (GIMPLE_TRY, 0);
663 gimple_set_subcode (p, kind);
664 if (eval)
665 gimple_try_set_eval (p, eval);
666 if (cleanup)
667 gimple_try_set_cleanup (p, cleanup);
668
669 return p;
670}
671
672/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
673
674 CLEANUP is the cleanup expression. */
675
676gimple
677gimple_build_wce (gimple_seq cleanup)
678{
679 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
680 if (cleanup)
681 gimple_wce_set_cleanup (p, cleanup);
682
683 return p;
684}
685
686
1d65f45c 687/* Build a GIMPLE_RESX statement. */
726a989a
RB
688
689gimple
690gimple_build_resx (int region)
691{
1d65f45c
RH
692 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
693 p->gimple_eh_ctrl.region = region;
726a989a
RB
694 return p;
695}
696
697
698/* The helper for constructing a gimple switch statement.
699 INDEX is the switch's index.
700 NLABELS is the number of labels in the switch excluding the default.
701 DEFAULT_LABEL is the default label for the switch statement. */
702
b8698a0f 703gimple
1d65f45c 704gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
705{
706 /* nlabels + 1 default label + 1 index. */
bbbbb16a 707 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
1d65f45c 708 1 + (default_label != NULL) + nlabels);
726a989a 709 gimple_switch_set_index (p, index);
1d65f45c
RH
710 if (default_label)
711 gimple_switch_set_default_label (p, default_label);
726a989a
RB
712 return p;
713}
714
715
716/* Build a GIMPLE_SWITCH statement.
717
718 INDEX is the switch's index.
b8698a0f 719 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
726a989a
RB
720 ... are the labels excluding the default. */
721
b8698a0f 722gimple
726a989a
RB
723gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
724{
725 va_list al;
1d65f45c
RH
726 unsigned i, offset;
727 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a
RB
728
729 /* Store the rest of the labels. */
730 va_start (al, default_label);
1d65f45c
RH
731 offset = (default_label != NULL);
732 for (i = 0; i < nlabels; i++)
733 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
726a989a
RB
734 va_end (al);
735
736 return p;
737}
738
739
740/* Build a GIMPLE_SWITCH statement.
741
742 INDEX is the switch's index.
743 DEFAULT_LABEL is the default label
744 ARGS is a vector of labels excluding the default. */
745
746gimple
747gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
748{
1d65f45c
RH
749 unsigned i, offset, nlabels = VEC_length (tree, args);
750 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 751
1d65f45c
RH
752 /* Copy the labels from the vector to the switch statement. */
753 offset = (default_label != NULL);
754 for (i = 0; i < nlabels; i++)
755 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
726a989a
RB
756
757 return p;
758}
759
1d65f45c
RH
760/* Build a GIMPLE_EH_DISPATCH statement. */
761
762gimple
763gimple_build_eh_dispatch (int region)
764{
765 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
766 p->gimple_eh_ctrl.region = region;
767 return p;
768}
726a989a 769
b5b8b0ac
AO
770/* Build a new GIMPLE_DEBUG_BIND statement.
771
772 VAR is bound to VALUE; block and location are taken from STMT. */
773
774gimple
775gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
776{
777 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
778 (unsigned)GIMPLE_DEBUG_BIND, 2
779 PASS_MEM_STAT);
780
781 gimple_debug_bind_set_var (p, var);
782 gimple_debug_bind_set_value (p, value);
783 if (stmt)
784 {
785 gimple_set_block (p, gimple_block (stmt));
786 gimple_set_location (p, gimple_location (stmt));
787 }
788
789 return p;
790}
791
792
726a989a
RB
793/* Build a GIMPLE_OMP_CRITICAL statement.
794
795 BODY is the sequence of statements for which only one thread can execute.
796 NAME is optional identifier for this critical block. */
797
b8698a0f 798gimple
726a989a
RB
799gimple_build_omp_critical (gimple_seq body, tree name)
800{
801 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
802 gimple_omp_critical_set_name (p, name);
803 if (body)
804 gimple_omp_set_body (p, body);
805
806 return p;
807}
808
809/* Build a GIMPLE_OMP_FOR statement.
810
811 BODY is sequence of statements inside the for loop.
b8698a0f 812 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
726a989a
RB
813 lastprivate, reductions, ordered, schedule, and nowait.
814 COLLAPSE is the collapse count.
815 PRE_BODY is the sequence of statements that are loop invariant. */
816
817gimple
818gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
819 gimple_seq pre_body)
820{
821 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
822 if (body)
823 gimple_omp_set_body (p, body);
824 gimple_omp_for_set_clauses (p, clauses);
825 p->gimple_omp_for.collapse = collapse;
826 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
827 if (pre_body)
828 gimple_omp_for_set_pre_body (p, pre_body);
829
830 return p;
831}
832
833
834/* Build a GIMPLE_OMP_PARALLEL statement.
835
836 BODY is sequence of statements which are executed in parallel.
837 CLAUSES, are the OMP parallel construct's clauses.
838 CHILD_FN is the function created for the parallel threads to execute.
839 DATA_ARG are the shared data argument(s). */
840
b8698a0f
L
841gimple
842gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
726a989a
RB
843 tree data_arg)
844{
845 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
846 if (body)
847 gimple_omp_set_body (p, body);
848 gimple_omp_parallel_set_clauses (p, clauses);
849 gimple_omp_parallel_set_child_fn (p, child_fn);
850 gimple_omp_parallel_set_data_arg (p, data_arg);
851
852 return p;
853}
854
855
856/* Build a GIMPLE_OMP_TASK statement.
857
858 BODY is sequence of statements which are executed by the explicit task.
859 CLAUSES, are the OMP parallel construct's clauses.
860 CHILD_FN is the function created for the parallel threads to execute.
861 DATA_ARG are the shared data argument(s).
862 COPY_FN is the optional function for firstprivate initialization.
863 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
864
b8698a0f 865gimple
726a989a
RB
866gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
867 tree data_arg, tree copy_fn, tree arg_size,
868 tree arg_align)
869{
870 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
871 if (body)
872 gimple_omp_set_body (p, body);
873 gimple_omp_task_set_clauses (p, clauses);
874 gimple_omp_task_set_child_fn (p, child_fn);
875 gimple_omp_task_set_data_arg (p, data_arg);
876 gimple_omp_task_set_copy_fn (p, copy_fn);
877 gimple_omp_task_set_arg_size (p, arg_size);
878 gimple_omp_task_set_arg_align (p, arg_align);
879
880 return p;
881}
882
883
884/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
885
886 BODY is the sequence of statements in the section. */
887
888gimple
889gimple_build_omp_section (gimple_seq body)
890{
891 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
892 if (body)
893 gimple_omp_set_body (p, body);
894
895 return p;
896}
897
898
899/* Build a GIMPLE_OMP_MASTER statement.
900
901 BODY is the sequence of statements to be executed by just the master. */
902
b8698a0f 903gimple
726a989a
RB
904gimple_build_omp_master (gimple_seq body)
905{
906 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
907 if (body)
908 gimple_omp_set_body (p, body);
909
910 return p;
911}
912
913
914/* Build a GIMPLE_OMP_CONTINUE statement.
915
916 CONTROL_DEF is the definition of the control variable.
917 CONTROL_USE is the use of the control variable. */
918
b8698a0f 919gimple
726a989a
RB
920gimple_build_omp_continue (tree control_def, tree control_use)
921{
922 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
923 gimple_omp_continue_set_control_def (p, control_def);
924 gimple_omp_continue_set_control_use (p, control_use);
925 return p;
926}
927
928/* Build a GIMPLE_OMP_ORDERED statement.
929
930 BODY is the sequence of statements inside a loop that will executed in
931 sequence. */
932
b8698a0f 933gimple
726a989a
RB
934gimple_build_omp_ordered (gimple_seq body)
935{
936 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
937 if (body)
938 gimple_omp_set_body (p, body);
939
940 return p;
941}
942
943
944/* Build a GIMPLE_OMP_RETURN statement.
945 WAIT_P is true if this is a non-waiting return. */
946
b8698a0f 947gimple
726a989a
RB
948gimple_build_omp_return (bool wait_p)
949{
950 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
951 if (wait_p)
952 gimple_omp_return_set_nowait (p);
953
954 return p;
955}
956
957
958/* Build a GIMPLE_OMP_SECTIONS statement.
959
960 BODY is a sequence of section statements.
961 CLAUSES are any of the OMP sections contsruct's clauses: private,
962 firstprivate, lastprivate, reduction, and nowait. */
963
b8698a0f 964gimple
726a989a
RB
965gimple_build_omp_sections (gimple_seq body, tree clauses)
966{
967 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
968 if (body)
969 gimple_omp_set_body (p, body);
970 gimple_omp_sections_set_clauses (p, clauses);
971
972 return p;
973}
974
975
976/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
977
978gimple
979gimple_build_omp_sections_switch (void)
980{
981 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
982}
983
984
985/* Build a GIMPLE_OMP_SINGLE statement.
986
987 BODY is the sequence of statements that will be executed once.
988 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
989 copyprivate, nowait. */
990
b8698a0f 991gimple
726a989a
RB
992gimple_build_omp_single (gimple_seq body, tree clauses)
993{
994 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
995 if (body)
996 gimple_omp_set_body (p, body);
997 gimple_omp_single_set_clauses (p, clauses);
998
999 return p;
1000}
1001
1002
726a989a
RB
1003/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1004
1005gimple
1006gimple_build_omp_atomic_load (tree lhs, tree rhs)
1007{
1008 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1009 gimple_omp_atomic_load_set_lhs (p, lhs);
1010 gimple_omp_atomic_load_set_rhs (p, rhs);
1011 return p;
1012}
1013
1014/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1015
1016 VAL is the value we are storing. */
1017
1018gimple
1019gimple_build_omp_atomic_store (tree val)
1020{
1021 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1022 gimple_omp_atomic_store_set_val (p, val);
1023 return p;
1024}
1025
1026/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1027 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1028
1029gimple
1030gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1031{
1032 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1033 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1034 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1035 gimple_predict_set_predictor (p, predictor);
1036 gimple_predict_set_outcome (p, outcome);
1037 return p;
1038}
1039
cea094ed 1040#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1041/* Complain of a gimple type mismatch and die. */
1042
1043void
1044gimple_check_failed (const_gimple gs, const char *file, int line,
1045 const char *function, enum gimple_code code,
1046 enum tree_code subcode)
1047{
1048 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1049 gimple_code_name[code],
1050 tree_code_name[subcode],
1051 gimple_code_name[gimple_code (gs)],
1052 gs->gsbase.subcode > 0
1053 ? tree_code_name[gs->gsbase.subcode]
1054 : "",
1055 function, trim_filename (file), line);
1056}
726a989a
RB
1057#endif /* ENABLE_GIMPLE_CHECKING */
1058
1059
1060/* Allocate a new GIMPLE sequence in GC memory and return it. If
1061 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1062 instead. */
1063
1064gimple_seq
1065gimple_seq_alloc (void)
1066{
1067 gimple_seq seq = gimple_seq_cache;
1068 if (seq)
1069 {
1070 gimple_seq_cache = gimple_seq_cache->next_free;
1071 gcc_assert (gimple_seq_cache != seq);
1072 memset (seq, 0, sizeof (*seq));
1073 }
1074 else
1075 {
1076 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1077#ifdef GATHER_STATISTICS
1078 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1079 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1080#endif
1081 }
1082
1083 return seq;
1084}
1085
1086/* Return SEQ to the free pool of GIMPLE sequences. */
1087
1088void
1089gimple_seq_free (gimple_seq seq)
1090{
1091 if (seq == NULL)
1092 return;
1093
1094 gcc_assert (gimple_seq_first (seq) == NULL);
1095 gcc_assert (gimple_seq_last (seq) == NULL);
1096
1097 /* If this triggers, it's a sign that the same list is being freed
1098 twice. */
1099 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
b8698a0f 1100
726a989a
RB
1101 /* Add SEQ to the pool of free sequences. */
1102 seq->next_free = gimple_seq_cache;
1103 gimple_seq_cache = seq;
1104}
1105
1106
1107/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1108 *SEQ_P is NULL, a new sequence is allocated. */
1109
1110void
1111gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1112{
1113 gimple_stmt_iterator si;
1114
1115 if (gs == NULL)
1116 return;
1117
1118 if (*seq_p == NULL)
1119 *seq_p = gimple_seq_alloc ();
1120
1121 si = gsi_last (*seq_p);
1122 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1123}
1124
1125
1126/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1127 NULL, a new sequence is allocated. */
1128
1129void
1130gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1131{
1132 gimple_stmt_iterator si;
1133
1134 if (src == NULL)
1135 return;
1136
1137 if (*dst_p == NULL)
1138 *dst_p = gimple_seq_alloc ();
1139
1140 si = gsi_last (*dst_p);
1141 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1142}
1143
1144
1145/* Helper function of empty_body_p. Return true if STMT is an empty
1146 statement. */
1147
1148static bool
1149empty_stmt_p (gimple stmt)
1150{
1151 if (gimple_code (stmt) == GIMPLE_NOP)
1152 return true;
1153 if (gimple_code (stmt) == GIMPLE_BIND)
1154 return empty_body_p (gimple_bind_body (stmt));
1155 return false;
1156}
1157
1158
1159/* Return true if BODY contains nothing but empty statements. */
1160
1161bool
1162empty_body_p (gimple_seq body)
1163{
1164 gimple_stmt_iterator i;
1165
726a989a
RB
1166 if (gimple_seq_empty_p (body))
1167 return true;
1168 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1169 if (!empty_stmt_p (gsi_stmt (i))
1170 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1171 return false;
1172
1173 return true;
1174}
1175
1176
1177/* Perform a deep copy of sequence SRC and return the result. */
1178
1179gimple_seq
1180gimple_seq_copy (gimple_seq src)
1181{
1182 gimple_stmt_iterator gsi;
82d6e6fc 1183 gimple_seq new_seq = gimple_seq_alloc ();
726a989a
RB
1184 gimple stmt;
1185
1186 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1187 {
1188 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1189 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1190 }
1191
82d6e6fc 1192 return new_seq;
726a989a
RB
1193}
1194
1195
1196/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1197 on each one. WI is as in walk_gimple_stmt.
b8698a0f 1198
726a989a
RB
1199 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1200 value is stored in WI->CALLBACK_RESULT and the statement that
1201 produced the value is returned.
1202
1203 Otherwise, all the statements are walked and NULL returned. */
1204
1205gimple
1206walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1207 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1208{
1209 gimple_stmt_iterator gsi;
1210
1211 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1212 {
1213 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1214 if (ret)
1215 {
1216 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1217 to hold it. */
1218 gcc_assert (wi);
1219 wi->callback_result = ret;
1220 return gsi_stmt (gsi);
1221 }
1222 }
1223
1224 if (wi)
1225 wi->callback_result = NULL_TREE;
1226
1227 return NULL;
1228}
1229
1230
1231/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1232
1233static tree
1234walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1235 struct walk_stmt_info *wi)
1236{
1c384bf1 1237 tree ret, op;
726a989a
RB
1238 unsigned noutputs;
1239 const char **oconstraints;
1c384bf1 1240 unsigned i, n;
726a989a
RB
1241 const char *constraint;
1242 bool allows_mem, allows_reg, is_inout;
1243
1244 noutputs = gimple_asm_noutputs (stmt);
1245 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1246
1247 if (wi)
1248 wi->is_lhs = true;
1249
1250 for (i = 0; i < noutputs; i++)
1251 {
1c384bf1 1252 op = gimple_asm_output_op (stmt, i);
726a989a
RB
1253 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1254 oconstraints[i] = constraint;
1255 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1256 &is_inout);
1257 if (wi)
1258 wi->val_only = (allows_reg || !allows_mem);
1259 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1260 if (ret)
1261 return ret;
1262 }
1263
1c384bf1
RH
1264 n = gimple_asm_ninputs (stmt);
1265 for (i = 0; i < n; i++)
726a989a 1266 {
1c384bf1 1267 op = gimple_asm_input_op (stmt, i);
726a989a
RB
1268 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1269 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1270 oconstraints, &allows_mem, &allows_reg);
1271 if (wi)
1c384bf1
RH
1272 {
1273 wi->val_only = (allows_reg || !allows_mem);
1274 /* Although input "m" is not really a LHS, we need a lvalue. */
1275 wi->is_lhs = !wi->val_only;
1276 }
726a989a
RB
1277 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1278 if (ret)
1279 return ret;
1280 }
1281
1282 if (wi)
1283 {
1284 wi->is_lhs = false;
1285 wi->val_only = true;
1286 }
1287
1c384bf1
RH
1288 n = gimple_asm_nlabels (stmt);
1289 for (i = 0; i < n; i++)
1290 {
1291 op = gimple_asm_label_op (stmt, i);
1292 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1293 if (ret)
1294 return ret;
1295 }
1296
726a989a
RB
1297 return NULL_TREE;
1298}
1299
1300
1301/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1302 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1303
1304 CALLBACK_OP is called on each operand of STMT via walk_tree.
1305 Additional parameters to walk_tree must be stored in WI. For each operand
1306 OP, walk_tree is called as:
1307
1308 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1309
1310 If CALLBACK_OP returns non-NULL for an operand, the remaining
1311 operands are not scanned.
1312
1313 The return value is that returned by the last call to walk_tree, or
1314 NULL_TREE if no CALLBACK_OP is specified. */
1315
6a4d4e8a 1316tree
726a989a
RB
1317walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1318 struct walk_stmt_info *wi)
1319{
1320 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1321 unsigned i;
1322 tree ret = NULL_TREE;
1323
1324 switch (gimple_code (stmt))
1325 {
1326 case GIMPLE_ASSIGN:
cb3d597d
EB
1327 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1328 is a register variable, we may use a COMPONENT_REF on the RHS. */
726a989a 1329 if (wi)
cb3d597d
EB
1330 {
1331 tree lhs = gimple_assign_lhs (stmt);
1332 wi->val_only
1333 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1334 || !gimple_assign_single_p (stmt);
1335 }
726a989a
RB
1336
1337 for (i = 1; i < gimple_num_ops (stmt); i++)
1338 {
1339 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1340 pset);
1341 if (ret)
1342 return ret;
1343 }
1344
1345 /* Walk the LHS. If the RHS is appropriate for a memory, we
1346 may use a COMPONENT_REF on the LHS. */
1347 if (wi)
1348 {
1349 /* If the RHS has more than 1 operand, it is not appropriate
1350 for the memory. */
1351 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1352 || !gimple_assign_single_p (stmt);
1353 wi->is_lhs = true;
1354 }
1355
1356 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1357 if (ret)
1358 return ret;
1359
1360 if (wi)
1361 {
1362 wi->val_only = true;
1363 wi->is_lhs = false;
1364 }
1365 break;
1366
1367 case GIMPLE_CALL:
1368 if (wi)
1369 wi->is_lhs = false;
1370
1371 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1372 if (ret)
1373 return ret;
1374
1375 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1376 if (ret)
1377 return ret;
1378
1379 for (i = 0; i < gimple_call_num_args (stmt); i++)
1380 {
1381 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1382 pset);
1383 if (ret)
1384 return ret;
1385 }
1386
1387 if (wi)
1388 wi->is_lhs = true;
1389
1390 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1391 if (ret)
1392 return ret;
1393
1394 if (wi)
1395 wi->is_lhs = false;
1396 break;
1397
1398 case GIMPLE_CATCH:
1399 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1400 pset);
1401 if (ret)
1402 return ret;
1403 break;
1404
1405 case GIMPLE_EH_FILTER:
1406 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1407 pset);
1408 if (ret)
1409 return ret;
1410 break;
1411
726a989a
RB
1412 case GIMPLE_ASM:
1413 ret = walk_gimple_asm (stmt, callback_op, wi);
1414 if (ret)
1415 return ret;
1416 break;
1417
1418 case GIMPLE_OMP_CONTINUE:
1419 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1420 callback_op, wi, pset);
1421 if (ret)
1422 return ret;
1423
1424 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1425 callback_op, wi, pset);
1426 if (ret)
1427 return ret;
1428 break;
1429
1430 case GIMPLE_OMP_CRITICAL:
1431 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1432 pset);
1433 if (ret)
1434 return ret;
1435 break;
1436
1437 case GIMPLE_OMP_FOR:
1438 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1439 pset);
1440 if (ret)
1441 return ret;
1442 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1443 {
1444 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1445 wi, pset);
1446 if (ret)
1447 return ret;
1448 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1449 wi, pset);
1450 if (ret)
1451 return ret;
1452 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1453 wi, pset);
1454 if (ret)
1455 return ret;
1456 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1457 wi, pset);
1458 }
1459 if (ret)
1460 return ret;
1461 break;
1462
1463 case GIMPLE_OMP_PARALLEL:
1464 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1465 wi, pset);
1466 if (ret)
1467 return ret;
1468 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1469 wi, pset);
1470 if (ret)
1471 return ret;
1472 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1473 wi, pset);
1474 if (ret)
1475 return ret;
1476 break;
1477
1478 case GIMPLE_OMP_TASK:
1479 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1480 wi, pset);
1481 if (ret)
1482 return ret;
1483 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1484 wi, pset);
1485 if (ret)
1486 return ret;
1487 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1488 wi, pset);
1489 if (ret)
1490 return ret;
1491 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1492 wi, pset);
1493 if (ret)
1494 return ret;
1495 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1496 wi, pset);
1497 if (ret)
1498 return ret;
1499 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1500 wi, pset);
1501 if (ret)
1502 return ret;
1503 break;
1504
1505 case GIMPLE_OMP_SECTIONS:
1506 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1507 wi, pset);
1508 if (ret)
1509 return ret;
1510
1511 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1512 wi, pset);
1513 if (ret)
1514 return ret;
1515
1516 break;
1517
1518 case GIMPLE_OMP_SINGLE:
1519 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1520 pset);
1521 if (ret)
1522 return ret;
1523 break;
1524
1525 case GIMPLE_OMP_ATOMIC_LOAD:
1526 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1527 pset);
1528 if (ret)
1529 return ret;
1530
1531 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1532 pset);
1533 if (ret)
1534 return ret;
1535 break;
1536
1537 case GIMPLE_OMP_ATOMIC_STORE:
1538 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1539 wi, pset);
1540 if (ret)
1541 return ret;
1542 break;
1543
1544 /* Tuples that do not have operands. */
1545 case GIMPLE_NOP:
1546 case GIMPLE_RESX:
1547 case GIMPLE_OMP_RETURN:
1548 case GIMPLE_PREDICT:
1549 break;
1550
1551 default:
1552 {
1553 enum gimple_statement_structure_enum gss;
1554 gss = gimple_statement_structure (stmt);
1555 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1556 for (i = 0; i < gimple_num_ops (stmt); i++)
1557 {
1558 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1559 if (ret)
1560 return ret;
1561 }
1562 }
1563 break;
1564 }
1565
1566 return NULL_TREE;
1567}
1568
1569
1570/* Walk the current statement in GSI (optionally using traversal state
1571 stored in WI). If WI is NULL, no state is kept during traversal.
1572 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1573 that it has handled all the operands of the statement, its return
1574 value is returned. Otherwise, the return value from CALLBACK_STMT
1575 is discarded and its operands are scanned.
1576
1577 If CALLBACK_STMT is NULL or it didn't handle the operands,
1578 CALLBACK_OP is called on each operand of the statement via
1579 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1580 operand, the remaining operands are not scanned. In this case, the
1581 return value from CALLBACK_OP is returned.
1582
1583 In any other case, NULL_TREE is returned. */
1584
1585tree
1586walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1587 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1588{
1589 gimple ret;
1590 tree tree_ret;
1591 gimple stmt = gsi_stmt (*gsi);
1592
1593 if (wi)
1594 wi->gsi = *gsi;
1595
1596 if (wi && wi->want_locations && gimple_has_location (stmt))
1597 input_location = gimple_location (stmt);
1598
1599 ret = NULL;
1600
1601 /* Invoke the statement callback. Return if the callback handled
1602 all of STMT operands by itself. */
1603 if (callback_stmt)
1604 {
1605 bool handled_ops = false;
1606 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1607 if (handled_ops)
1608 return tree_ret;
1609
1610 /* If CALLBACK_STMT did not handle operands, it should not have
1611 a value to return. */
1612 gcc_assert (tree_ret == NULL);
1613
1614 /* Re-read stmt in case the callback changed it. */
1615 stmt = gsi_stmt (*gsi);
1616 }
1617
1618 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1619 if (callback_op)
1620 {
1621 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1622 if (tree_ret)
1623 return tree_ret;
1624 }
1625
1626 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1627 switch (gimple_code (stmt))
1628 {
1629 case GIMPLE_BIND:
1630 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1631 callback_op, wi);
1632 if (ret)
1633 return wi->callback_result;
1634 break;
1635
1636 case GIMPLE_CATCH:
1637 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1638 callback_op, wi);
1639 if (ret)
1640 return wi->callback_result;
1641 break;
1642
1643 case GIMPLE_EH_FILTER:
1644 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1645 callback_op, wi);
1646 if (ret)
1647 return wi->callback_result;
1648 break;
1649
1650 case GIMPLE_TRY:
1651 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1652 wi);
1653 if (ret)
1654 return wi->callback_result;
1655
1656 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1657 callback_op, wi);
1658 if (ret)
1659 return wi->callback_result;
1660 break;
1661
1662 case GIMPLE_OMP_FOR:
1663 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1664 callback_op, wi);
1665 if (ret)
1666 return wi->callback_result;
1667
1668 /* FALL THROUGH. */
1669 case GIMPLE_OMP_CRITICAL:
1670 case GIMPLE_OMP_MASTER:
1671 case GIMPLE_OMP_ORDERED:
1672 case GIMPLE_OMP_SECTION:
1673 case GIMPLE_OMP_PARALLEL:
1674 case GIMPLE_OMP_TASK:
1675 case GIMPLE_OMP_SECTIONS:
1676 case GIMPLE_OMP_SINGLE:
1677 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1678 wi);
1679 if (ret)
1680 return wi->callback_result;
1681 break;
1682
1683 case GIMPLE_WITH_CLEANUP_EXPR:
1684 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1685 callback_op, wi);
1686 if (ret)
1687 return wi->callback_result;
1688 break;
1689
1690 default:
1691 gcc_assert (!gimple_has_substatements (stmt));
1692 break;
1693 }
1694
1695 return NULL;
1696}
1697
1698
1699/* Set sequence SEQ to be the GIMPLE body for function FN. */
1700
1701void
1702gimple_set_body (tree fndecl, gimple_seq seq)
1703{
1704 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1705 if (fn == NULL)
1706 {
1707 /* If FNDECL still does not have a function structure associated
1708 with it, then it does not make sense for it to receive a
1709 GIMPLE body. */
1710 gcc_assert (seq == NULL);
1711 }
1712 else
1713 fn->gimple_body = seq;
1714}
1715
1716
1717/* Return the body of GIMPLE statements for function FN. */
1718
1719gimple_seq
1720gimple_body (tree fndecl)
1721{
1722 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1723 return fn ? fn->gimple_body : NULL;
1724}
1725
39ecc018
JH
1726/* Return true when FNDECL has Gimple body either in unlowered
1727 or CFG form. */
1728bool
1729gimple_has_body_p (tree fndecl)
1730{
1731 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1732 return (gimple_body (fndecl) || (fn && fn->cfg));
1733}
726a989a
RB
1734
1735/* Detect flags from a GIMPLE_CALL. This is just like
1736 call_expr_flags, but for gimple tuples. */
1737
1738int
1739gimple_call_flags (const_gimple stmt)
1740{
1741 int flags;
1742 tree decl = gimple_call_fndecl (stmt);
1743 tree t;
1744
1745 if (decl)
1746 flags = flags_from_decl_or_type (decl);
1747 else
1748 {
1749 t = TREE_TYPE (gimple_call_fn (stmt));
1750 if (t && TREE_CODE (t) == POINTER_TYPE)
1751 flags = flags_from_decl_or_type (TREE_TYPE (t));
1752 else
1753 flags = 0;
1754 }
1755
1756 return flags;
1757}
1758
0b7b376d
RG
1759/* Detects argument flags for argument number ARG on call STMT. */
1760
1761int
1762gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1763{
1764 tree type = TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt)));
1765 tree attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1766 if (!attr)
1767 return 0;
1768
1769 attr = TREE_VALUE (TREE_VALUE (attr));
1770 if (1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1771 return 0;
1772
1773 switch (TREE_STRING_POINTER (attr)[1 + arg])
1774 {
1775 case 'x':
1776 case 'X':
1777 return EAF_UNUSED;
1778
1779 case 'R':
1780 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1781
1782 case 'r':
1783 return EAF_NOCLOBBER | EAF_NOESCAPE;
1784
1785 case 'W':
1786 return EAF_DIRECT | EAF_NOESCAPE;
1787
1788 case 'w':
1789 return EAF_NOESCAPE;
1790
1791 case '.':
1792 default:
1793 return 0;
1794 }
1795}
1796
1797/* Detects return flags for the call STMT. */
1798
1799int
1800gimple_call_return_flags (const_gimple stmt)
1801{
1802 tree type;
1803 tree attr = NULL_TREE;
1804
1805 if (gimple_call_flags (stmt) & ECF_MALLOC)
1806 return ERF_NOALIAS;
1807
1808 type = TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt)));
1809 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1810 if (!attr)
1811 return 0;
1812
1813 attr = TREE_VALUE (TREE_VALUE (attr));
1814 if (TREE_STRING_LENGTH (attr) < 1)
1815 return 0;
1816
1817 switch (TREE_STRING_POINTER (attr)[0])
1818 {
1819 case '1':
1820 case '2':
1821 case '3':
1822 case '4':
1823 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1824
1825 case 'm':
1826 return ERF_NOALIAS;
1827
1828 case '.':
1829 default:
1830 return 0;
1831 }
1832}
726a989a
RB
1833
1834/* Return true if GS is a copy assignment. */
1835
1836bool
1837gimple_assign_copy_p (gimple gs)
1838{
1839 return gimple_code (gs) == GIMPLE_ASSIGN
1840 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1841 == GIMPLE_SINGLE_RHS
1842 && is_gimple_val (gimple_op (gs, 1));
1843}
1844
1845
1846/* Return true if GS is a SSA_NAME copy assignment. */
1847
1848bool
1849gimple_assign_ssa_name_copy_p (gimple gs)
1850{
1851 return (gimple_code (gs) == GIMPLE_ASSIGN
1852 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1853 == GIMPLE_SINGLE_RHS)
1854 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1855 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1856}
1857
1858
1859/* Return true if GS is an assignment with a singleton RHS, i.e.,
1860 there is no operator associated with the assignment itself.
1861 Unlike gimple_assign_copy_p, this predicate returns true for
1862 any RHS operand, including those that perform an operation
1863 and do not have the semantics of a copy, such as COND_EXPR. */
1864
1865bool
1866gimple_assign_single_p (gimple gs)
1867{
1868 return (gimple_code (gs) == GIMPLE_ASSIGN
1869 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1870 == GIMPLE_SINGLE_RHS);
1871}
1872
1873/* Return true if GS is an assignment with a unary RHS, but the
1874 operator has no effect on the assigned value. The logic is adapted
1875 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1876 instances in which STRIP_NOPS was previously applied to the RHS of
1877 an assignment.
1878
1879 NOTE: In the use cases that led to the creation of this function
1880 and of gimple_assign_single_p, it is typical to test for either
1881 condition and to proceed in the same manner. In each case, the
1882 assigned value is represented by the single RHS operand of the
1883 assignment. I suspect there may be cases where gimple_assign_copy_p,
1884 gimple_assign_single_p, or equivalent logic is used where a similar
1885 treatment of unary NOPs is appropriate. */
b8698a0f 1886
726a989a
RB
1887bool
1888gimple_assign_unary_nop_p (gimple gs)
1889{
1890 return (gimple_code (gs) == GIMPLE_ASSIGN
1a87cf0c 1891 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
1892 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1893 && gimple_assign_rhs1 (gs) != error_mark_node
1894 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1895 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1896}
1897
1898/* Set BB to be the basic block holding G. */
1899
1900void
1901gimple_set_bb (gimple stmt, basic_block bb)
1902{
1903 stmt->gsbase.bb = bb;
1904
1905 /* If the statement is a label, add the label to block-to-labels map
1906 so that we can speed up edge creation for GIMPLE_GOTOs. */
1907 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1908 {
1909 tree t;
1910 int uid;
1911
1912 t = gimple_label_label (stmt);
1913 uid = LABEL_DECL_UID (t);
1914 if (uid == -1)
1915 {
1916 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1917 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1918 if (old_len <= (unsigned) uid)
1919 {
5006671f 1920 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
1921
1922 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1923 new_len);
1924 }
1925 }
1926
1927 VEC_replace (basic_block, label_to_block_map, uid, bb);
1928 }
1929}
1930
1931
726a989a
RB
1932/* Modify the RHS of the assignment pointed-to by GSI using the
1933 operands in the expression tree EXPR.
1934
1935 NOTE: The statement pointed-to by GSI may be reallocated if it
1936 did not have enough operand slots.
1937
1938 This function is useful to convert an existing tree expression into
1939 the flat representation used for the RHS of a GIMPLE assignment.
1940 It will reallocate memory as needed to expand or shrink the number
1941 of operand slots needed to represent EXPR.
1942
1943 NOTE: If you find yourself building a tree and then calling this
1944 function, you are most certainly doing it the slow way. It is much
1945 better to build a new assignment or to use the function
1946 gimple_assign_set_rhs_with_ops, which does not require an
1947 expression tree to be built. */
1948
1949void
1950gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1951{
1952 enum tree_code subcode;
1953 tree op1, op2;
1954
1955 extract_ops_from_tree (expr, &subcode, &op1, &op2);
1956 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1957}
1958
1959
1960/* Set the RHS of assignment statement pointed-to by GSI to CODE with
1961 operands OP1 and OP2.
1962
1963 NOTE: The statement pointed-to by GSI may be reallocated if it
1964 did not have enough operand slots. */
1965
1966void
1967gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1968 tree op1, tree op2)
1969{
1970 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1971 gimple stmt = gsi_stmt (*gsi);
1972
1973 /* If the new CODE needs more operands, allocate a new statement. */
1974 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1975 {
1976 tree lhs = gimple_assign_lhs (stmt);
1977 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1978 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1979 gsi_replace (gsi, new_stmt, true);
1980 stmt = new_stmt;
1981
1982 /* The LHS needs to be reset as this also changes the SSA name
1983 on the LHS. */
1984 gimple_assign_set_lhs (stmt, lhs);
1985 }
1986
1987 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1988 gimple_set_subcode (stmt, code);
1989 gimple_assign_set_rhs1 (stmt, op1);
1990 if (new_rhs_ops > 1)
1991 gimple_assign_set_rhs2 (stmt, op2);
1992}
1993
1994
1995/* Return the LHS of a statement that performs an assignment,
1996 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1997 for a call to a function that returns no value, or for a
1998 statement other than an assignment or a call. */
1999
2000tree
2001gimple_get_lhs (const_gimple stmt)
2002{
e0c68ce9 2003 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2004
2005 if (code == GIMPLE_ASSIGN)
2006 return gimple_assign_lhs (stmt);
2007 else if (code == GIMPLE_CALL)
2008 return gimple_call_lhs (stmt);
2009 else
2010 return NULL_TREE;
2011}
2012
2013
2014/* Set the LHS of a statement that performs an assignment,
2015 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2016
2017void
2018gimple_set_lhs (gimple stmt, tree lhs)
2019{
e0c68ce9 2020 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2021
2022 if (code == GIMPLE_ASSIGN)
2023 gimple_assign_set_lhs (stmt, lhs);
2024 else if (code == GIMPLE_CALL)
2025 gimple_call_set_lhs (stmt, lhs);
2026 else
2027 gcc_unreachable();
2028}
2029
21cf7180
AO
2030/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2031 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2032 expression with a different value.
2033
2034 This will update any annotations (say debug bind stmts) referring
2035 to the original LHS, so that they use the RHS instead. This is
2036 done even if NLHS and LHS are the same, for it is understood that
2037 the RHS will be modified afterwards, and NLHS will not be assigned
2038 an equivalent value.
2039
2040 Adjusting any non-annotation uses of the LHS, if needed, is a
2041 responsibility of the caller.
2042
2043 The effect of this call should be pretty much the same as that of
2044 inserting a copy of STMT before STMT, and then removing the
2045 original stmt, at which time gsi_remove() would have update
2046 annotations, but using this function saves all the inserting,
2047 copying and removing. */
2048
2049void
2050gimple_replace_lhs (gimple stmt, tree nlhs)
2051{
2052 if (MAY_HAVE_DEBUG_STMTS)
2053 {
2054 tree lhs = gimple_get_lhs (stmt);
2055
2056 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2057
2058 insert_debug_temp_for_var_def (NULL, lhs);
2059 }
2060
2061 gimple_set_lhs (stmt, nlhs);
2062}
726a989a
RB
2063
2064/* Return a deep copy of statement STMT. All the operands from STMT
2065 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2066 and VUSE operand arrays are set to empty in the new copy. */
2067
2068gimple
2069gimple_copy (gimple stmt)
2070{
2071 enum gimple_code code = gimple_code (stmt);
2072 unsigned num_ops = gimple_num_ops (stmt);
2073 gimple copy = gimple_alloc (code, num_ops);
2074 unsigned i;
2075
2076 /* Shallow copy all the fields from STMT. */
2077 memcpy (copy, stmt, gimple_size (code));
2078
2079 /* If STMT has sub-statements, deep-copy them as well. */
2080 if (gimple_has_substatements (stmt))
2081 {
2082 gimple_seq new_seq;
2083 tree t;
2084
2085 switch (gimple_code (stmt))
2086 {
2087 case GIMPLE_BIND:
2088 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2089 gimple_bind_set_body (copy, new_seq);
2090 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2091 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2092 break;
2093
2094 case GIMPLE_CATCH:
2095 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2096 gimple_catch_set_handler (copy, new_seq);
2097 t = unshare_expr (gimple_catch_types (stmt));
2098 gimple_catch_set_types (copy, t);
2099 break;
2100
2101 case GIMPLE_EH_FILTER:
2102 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2103 gimple_eh_filter_set_failure (copy, new_seq);
2104 t = unshare_expr (gimple_eh_filter_types (stmt));
2105 gimple_eh_filter_set_types (copy, t);
2106 break;
2107
2108 case GIMPLE_TRY:
2109 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2110 gimple_try_set_eval (copy, new_seq);
2111 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2112 gimple_try_set_cleanup (copy, new_seq);
2113 break;
2114
2115 case GIMPLE_OMP_FOR:
2116 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2117 gimple_omp_for_set_pre_body (copy, new_seq);
2118 t = unshare_expr (gimple_omp_for_clauses (stmt));
2119 gimple_omp_for_set_clauses (copy, t);
2120 copy->gimple_omp_for.iter
2121 = GGC_NEWVEC (struct gimple_omp_for_iter,
2122 gimple_omp_for_collapse (stmt));
2123 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2124 {
2125 gimple_omp_for_set_cond (copy, i,
2126 gimple_omp_for_cond (stmt, i));
2127 gimple_omp_for_set_index (copy, i,
2128 gimple_omp_for_index (stmt, i));
2129 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2130 gimple_omp_for_set_initial (copy, i, t);
2131 t = unshare_expr (gimple_omp_for_final (stmt, i));
2132 gimple_omp_for_set_final (copy, i, t);
2133 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2134 gimple_omp_for_set_incr (copy, i, t);
2135 }
2136 goto copy_omp_body;
2137
2138 case GIMPLE_OMP_PARALLEL:
2139 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2140 gimple_omp_parallel_set_clauses (copy, t);
2141 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2142 gimple_omp_parallel_set_child_fn (copy, t);
2143 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2144 gimple_omp_parallel_set_data_arg (copy, t);
2145 goto copy_omp_body;
2146
2147 case GIMPLE_OMP_TASK:
2148 t = unshare_expr (gimple_omp_task_clauses (stmt));
2149 gimple_omp_task_set_clauses (copy, t);
2150 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2151 gimple_omp_task_set_child_fn (copy, t);
2152 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2153 gimple_omp_task_set_data_arg (copy, t);
2154 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2155 gimple_omp_task_set_copy_fn (copy, t);
2156 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2157 gimple_omp_task_set_arg_size (copy, t);
2158 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2159 gimple_omp_task_set_arg_align (copy, t);
2160 goto copy_omp_body;
2161
2162 case GIMPLE_OMP_CRITICAL:
2163 t = unshare_expr (gimple_omp_critical_name (stmt));
2164 gimple_omp_critical_set_name (copy, t);
2165 goto copy_omp_body;
2166
2167 case GIMPLE_OMP_SECTIONS:
2168 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2169 gimple_omp_sections_set_clauses (copy, t);
2170 t = unshare_expr (gimple_omp_sections_control (stmt));
2171 gimple_omp_sections_set_control (copy, t);
2172 /* FALLTHRU */
2173
2174 case GIMPLE_OMP_SINGLE:
2175 case GIMPLE_OMP_SECTION:
2176 case GIMPLE_OMP_MASTER:
2177 case GIMPLE_OMP_ORDERED:
2178 copy_omp_body:
2179 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2180 gimple_omp_set_body (copy, new_seq);
2181 break;
2182
2183 case GIMPLE_WITH_CLEANUP_EXPR:
2184 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2185 gimple_wce_set_cleanup (copy, new_seq);
2186 break;
2187
2188 default:
2189 gcc_unreachable ();
2190 }
2191 }
2192
2193 /* Make copy of operands. */
2194 if (num_ops > 0)
2195 {
2196 for (i = 0; i < num_ops; i++)
2197 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2198
ccacdf06 2199 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2200 if (gimple_has_ops (stmt))
2201 {
2202 gimple_set_def_ops (copy, NULL);
2203 gimple_set_use_ops (copy, NULL);
726a989a
RB
2204 }
2205
2206 if (gimple_has_mem_ops (stmt))
2207 {
5006671f
RG
2208 gimple_set_vdef (copy, gimple_vdef (stmt));
2209 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2210 }
2211
5006671f
RG
2212 /* SSA operands need to be updated. */
2213 gimple_set_modified (copy, true);
726a989a
RB
2214 }
2215
2216 return copy;
2217}
2218
2219
2220/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2221 a MODIFIED field. */
2222
2223void
2224gimple_set_modified (gimple s, bool modifiedp)
2225{
2226 if (gimple_has_ops (s))
2227 {
2228 s->gsbase.modified = (unsigned) modifiedp;
2229
2230 if (modifiedp
2231 && cfun->gimple_df
2232 && is_gimple_call (s)
2233 && gimple_call_noreturn_p (s))
2234 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2235 }
2236}
2237
2238
2239/* Return true if statement S has side-effects. We consider a
2240 statement to have side effects if:
2241
2242 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2243 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2244
2245bool
2246gimple_has_side_effects (const_gimple s)
2247{
2248 unsigned i;
2249
b5b8b0ac
AO
2250 if (is_gimple_debug (s))
2251 return false;
2252
726a989a
RB
2253 /* We don't have to scan the arguments to check for
2254 volatile arguments, though, at present, we still
2255 do a scan to check for TREE_SIDE_EFFECTS. */
2256 if (gimple_has_volatile_ops (s))
2257 return true;
2258
2259 if (is_gimple_call (s))
2260 {
2261 unsigned nargs = gimple_call_num_args (s);
2262
2263 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2264 return true;
2265 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2266 /* An infinite loop is considered a side effect. */
2267 return true;
2268
2269 if (gimple_call_lhs (s)
2270 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2271 {
2272 gcc_assert (gimple_has_volatile_ops (s));
2273 return true;
2274 }
2275
2276 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2277 return true;
2278
2279 for (i = 0; i < nargs; i++)
2280 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2281 {
2282 gcc_assert (gimple_has_volatile_ops (s));
2283 return true;
2284 }
2285
2286 return false;
2287 }
2288 else
2289 {
2290 for (i = 0; i < gimple_num_ops (s); i++)
2291 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2292 {
2293 gcc_assert (gimple_has_volatile_ops (s));
2294 return true;
2295 }
2296 }
2297
2298 return false;
2299}
2300
2301/* Return true if the RHS of statement S has side effects.
2302 We may use it to determine if it is admissable to replace
2303 an assignment or call with a copy of a previously-computed
2304 value. In such cases, side-effects due the the LHS are
2305 preserved. */
2306
2307bool
2308gimple_rhs_has_side_effects (const_gimple s)
2309{
2310 unsigned i;
2311
2312 if (is_gimple_call (s))
2313 {
2314 unsigned nargs = gimple_call_num_args (s);
2315
2316 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2317 return true;
2318
2319 /* We cannot use gimple_has_volatile_ops here,
2320 because we must ignore a volatile LHS. */
2321 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2322 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2323 {
2324 gcc_assert (gimple_has_volatile_ops (s));
2325 return true;
2326 }
2327
2328 for (i = 0; i < nargs; i++)
2329 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2330 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2331 return true;
2332
2333 return false;
2334 }
2335 else if (is_gimple_assign (s))
2336 {
2337 /* Skip the first operand, the LHS. */
2338 for (i = 1; i < gimple_num_ops (s); i++)
2339 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2340 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2341 {
2342 gcc_assert (gimple_has_volatile_ops (s));
2343 return true;
2344 }
2345 }
b5b8b0ac
AO
2346 else if (is_gimple_debug (s))
2347 return false;
726a989a
RB
2348 else
2349 {
2350 /* For statements without an LHS, examine all arguments. */
2351 for (i = 0; i < gimple_num_ops (s); i++)
2352 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2353 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2354 {
2355 gcc_assert (gimple_has_volatile_ops (s));
2356 return true;
2357 }
2358 }
2359
2360 return false;
2361}
2362
2363
2364/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2365 Return true if S can trap. If INCLUDE_LHS is true and S is a
2366 GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2367 Otherwise, only the RHS of the assignment is checked. */
2368
2369static bool
2370gimple_could_trap_p_1 (gimple s, bool include_lhs)
2371{
2372 unsigned i, start;
2373 tree t, div = NULL_TREE;
2374 enum tree_code op;
2375
2376 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2377
2378 for (i = start; i < gimple_num_ops (s); i++)
2379 if (tree_could_trap_p (gimple_op (s, i)))
2380 return true;
2381
2382 switch (gimple_code (s))
2383 {
2384 case GIMPLE_ASM:
2385 return gimple_asm_volatile_p (s);
2386
2387 case GIMPLE_CALL:
2388 t = gimple_call_fndecl (s);
2389 /* Assume that calls to weak functions may trap. */
2390 if (!t || !DECL_P (t) || DECL_WEAK (t))
2391 return true;
2392 return false;
2393
2394 case GIMPLE_ASSIGN:
2395 t = gimple_expr_type (s);
2396 op = gimple_assign_rhs_code (s);
2397 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2398 div = gimple_assign_rhs2 (s);
2399 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2400 (INTEGRAL_TYPE_P (t)
2401 && TYPE_OVERFLOW_TRAPS (t)),
2402 div));
2403
2404 default:
2405 break;
2406 }
2407
2408 return false;
2409
2410}
2411
2412
2413/* Return true if statement S can trap. */
2414
2415bool
2416gimple_could_trap_p (gimple s)
2417{
2418 return gimple_could_trap_p_1 (s, true);
2419}
2420
2421
2422/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2423
2424bool
2425gimple_assign_rhs_could_trap_p (gimple s)
2426{
2427 gcc_assert (is_gimple_assign (s));
2428 return gimple_could_trap_p_1 (s, false);
2429}
2430
2431
2432/* Print debugging information for gimple stmts generated. */
2433
2434void
2435dump_gimple_statistics (void)
2436{
2437#ifdef GATHER_STATISTICS
2438 int i, total_tuples = 0, total_bytes = 0;
2439
2440 fprintf (stderr, "\nGIMPLE statements\n");
2441 fprintf (stderr, "Kind Stmts Bytes\n");
2442 fprintf (stderr, "---------------------------------------\n");
2443 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2444 {
2445 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2446 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2447 total_tuples += gimple_alloc_counts[i];
2448 total_bytes += gimple_alloc_sizes[i];
2449 }
2450 fprintf (stderr, "---------------------------------------\n");
2451 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2452 fprintf (stderr, "---------------------------------------\n");
2453#else
2454 fprintf (stderr, "No gimple statistics\n");
2455#endif
2456}
2457
2458
726a989a
RB
2459/* Return the number of operands needed on the RHS of a GIMPLE
2460 assignment for an expression with tree code CODE. */
2461
2462unsigned
2463get_gimple_rhs_num_ops (enum tree_code code)
2464{
2465 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2466
2467 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2468 return 1;
2469 else if (rhs_class == GIMPLE_BINARY_RHS)
2470 return 2;
2471 else
2472 gcc_unreachable ();
2473}
2474
2475#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2476 (unsigned char) \
2477 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2478 : ((TYPE) == tcc_binary \
2479 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2480 : ((TYPE) == tcc_constant \
2481 || (TYPE) == tcc_declaration \
2482 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2483 : ((SYM) == TRUTH_AND_EXPR \
2484 || (SYM) == TRUTH_OR_EXPR \
2485 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2486 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2487 : ((SYM) == COND_EXPR \
2488 || (SYM) == CONSTRUCTOR \
2489 || (SYM) == OBJ_TYPE_REF \
2490 || (SYM) == ASSERT_EXPR \
2491 || (SYM) == ADDR_EXPR \
2492 || (SYM) == WITH_SIZE_EXPR \
726a989a 2493 || (SYM) == SSA_NAME \
726a989a
RB
2494 || (SYM) == POLYNOMIAL_CHREC \
2495 || (SYM) == DOT_PROD_EXPR \
2496 || (SYM) == VEC_COND_EXPR \
2497 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2498 : GIMPLE_INVALID_RHS),
2499#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2500
2501const unsigned char gimple_rhs_class_table[] = {
2502#include "all-tree.def"
2503};
2504
2505#undef DEFTREECODE
2506#undef END_OF_BASE_TREE_CODES
2507
2508/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2509
2510/* Validation of GIMPLE expressions. */
2511
2512/* Return true if OP is an acceptable tree node to be used as a GIMPLE
2513 operand. */
2514
2515bool
2516is_gimple_operand (const_tree op)
2517{
2518 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2519}
2520
726a989a
RB
2521/* Returns true iff T is a valid RHS for an assignment to a renamed
2522 user -- or front-end generated artificial -- variable. */
2523
2524bool
2525is_gimple_reg_rhs (tree t)
2526{
ba4d8f9d 2527 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
726a989a
RB
2528}
2529
2530/* Returns true iff T is a valid RHS for an assignment to an un-renamed
2531 LHS, or for a call argument. */
2532
2533bool
2534is_gimple_mem_rhs (tree t)
2535{
2536 /* If we're dealing with a renamable type, either source or dest must be
2537 a renamed variable. */
2538 if (is_gimple_reg_type (TREE_TYPE (t)))
2539 return is_gimple_val (t);
2540 else
ba4d8f9d 2541 return is_gimple_val (t) || is_gimple_lvalue (t);
726a989a
RB
2542}
2543
2544/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2545
2546bool
2547is_gimple_lvalue (tree t)
2548{
2549 return (is_gimple_addressable (t)
2550 || TREE_CODE (t) == WITH_SIZE_EXPR
2551 /* These are complex lvalues, but don't have addresses, so they
2552 go here. */
2553 || TREE_CODE (t) == BIT_FIELD_REF);
2554}
2555
2556/* Return true if T is a GIMPLE condition. */
2557
2558bool
2559is_gimple_condexpr (tree t)
2560{
2561 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2562 && !tree_could_trap_p (t)
2563 && is_gimple_val (TREE_OPERAND (t, 0))
2564 && is_gimple_val (TREE_OPERAND (t, 1))));
2565}
2566
2567/* Return true if T is something whose address can be taken. */
2568
2569bool
2570is_gimple_addressable (tree t)
2571{
2572 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2573}
2574
2575/* Return true if T is a valid gimple constant. */
2576
2577bool
2578is_gimple_constant (const_tree t)
2579{
2580 switch (TREE_CODE (t))
2581 {
2582 case INTEGER_CST:
2583 case REAL_CST:
2584 case FIXED_CST:
2585 case STRING_CST:
2586 case COMPLEX_CST:
2587 case VECTOR_CST:
2588 return true;
2589
2590 /* Vector constant constructors are gimple invariant. */
2591 case CONSTRUCTOR:
2592 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2593 return TREE_CONSTANT (t);
2594 else
2595 return false;
2596
2597 default:
2598 return false;
2599 }
2600}
2601
2602/* Return true if T is a gimple address. */
2603
2604bool
2605is_gimple_address (const_tree t)
2606{
2607 tree op;
2608
2609 if (TREE_CODE (t) != ADDR_EXPR)
2610 return false;
2611
2612 op = TREE_OPERAND (t, 0);
2613 while (handled_component_p (op))
2614 {
2615 if ((TREE_CODE (op) == ARRAY_REF
2616 || TREE_CODE (op) == ARRAY_RANGE_REF)
2617 && !is_gimple_val (TREE_OPERAND (op, 1)))
2618 return false;
2619
2620 op = TREE_OPERAND (op, 0);
2621 }
2622
2623 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2624 return true;
2625
2626 switch (TREE_CODE (op))
2627 {
2628 case PARM_DECL:
2629 case RESULT_DECL:
2630 case LABEL_DECL:
2631 case FUNCTION_DECL:
2632 case VAR_DECL:
2633 case CONST_DECL:
2634 return true;
2635
2636 default:
2637 return false;
2638 }
2639}
2640
00fc2333
JH
2641/* Strip out all handled components that produce invariant
2642 offsets. */
726a989a 2643
00fc2333
JH
2644static const_tree
2645strip_invariant_refs (const_tree op)
726a989a 2646{
726a989a
RB
2647 while (handled_component_p (op))
2648 {
2649 switch (TREE_CODE (op))
2650 {
2651 case ARRAY_REF:
2652 case ARRAY_RANGE_REF:
2653 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2654 || TREE_OPERAND (op, 2) != NULL_TREE
2655 || TREE_OPERAND (op, 3) != NULL_TREE)
00fc2333 2656 return NULL;
726a989a
RB
2657 break;
2658
2659 case COMPONENT_REF:
2660 if (TREE_OPERAND (op, 2) != NULL_TREE)
00fc2333 2661 return NULL;
726a989a
RB
2662 break;
2663
2664 default:;
2665 }
2666 op = TREE_OPERAND (op, 0);
2667 }
2668
00fc2333
JH
2669 return op;
2670}
2671
2672/* Return true if T is a gimple invariant address. */
2673
2674bool
2675is_gimple_invariant_address (const_tree t)
2676{
2677 const_tree op;
2678
2679 if (TREE_CODE (t) != ADDR_EXPR)
2680 return false;
2681
2682 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2683
2684 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2685}
2686
2687/* Return true if T is a gimple invariant address at IPA level
2688 (so addresses of variables on stack are not allowed). */
2689
2690bool
2691is_gimple_ip_invariant_address (const_tree t)
2692{
2693 const_tree op;
2694
2695 if (TREE_CODE (t) != ADDR_EXPR)
2696 return false;
2697
2698 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2699
2700 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
726a989a
RB
2701}
2702
2703/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2704 form of function invariant. */
2705
2706bool
2707is_gimple_min_invariant (const_tree t)
2708{
2709 if (TREE_CODE (t) == ADDR_EXPR)
2710 return is_gimple_invariant_address (t);
2711
2712 return is_gimple_constant (t);
2713}
2714
00fc2333
JH
2715/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2716 form of gimple minimal invariant. */
2717
2718bool
2719is_gimple_ip_invariant (const_tree t)
2720{
2721 if (TREE_CODE (t) == ADDR_EXPR)
2722 return is_gimple_ip_invariant_address (t);
2723
2724 return is_gimple_constant (t);
2725}
2726
726a989a
RB
2727/* Return true if T looks like a valid GIMPLE statement. */
2728
2729bool
2730is_gimple_stmt (tree t)
2731{
2732 const enum tree_code code = TREE_CODE (t);
2733
2734 switch (code)
2735 {
2736 case NOP_EXPR:
2737 /* The only valid NOP_EXPR is the empty statement. */
2738 return IS_EMPTY_STMT (t);
2739
2740 case BIND_EXPR:
2741 case COND_EXPR:
2742 /* These are only valid if they're void. */
2743 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2744
2745 case SWITCH_EXPR:
2746 case GOTO_EXPR:
2747 case RETURN_EXPR:
2748 case LABEL_EXPR:
2749 case CASE_LABEL_EXPR:
2750 case TRY_CATCH_EXPR:
2751 case TRY_FINALLY_EXPR:
2752 case EH_FILTER_EXPR:
2753 case CATCH_EXPR:
726a989a 2754 case ASM_EXPR:
726a989a
RB
2755 case STATEMENT_LIST:
2756 case OMP_PARALLEL:
2757 case OMP_FOR:
2758 case OMP_SECTIONS:
2759 case OMP_SECTION:
2760 case OMP_SINGLE:
2761 case OMP_MASTER:
2762 case OMP_ORDERED:
2763 case OMP_CRITICAL:
2764 case OMP_TASK:
2765 /* These are always void. */
2766 return true;
2767
2768 case CALL_EXPR:
2769 case MODIFY_EXPR:
2770 case PREDICT_EXPR:
2771 /* These are valid regardless of their type. */
2772 return true;
2773
2774 default:
2775 return false;
2776 }
2777}
2778
2779/* Return true if T is a variable. */
2780
2781bool
2782is_gimple_variable (tree t)
2783{
2784 return (TREE_CODE (t) == VAR_DECL
2785 || TREE_CODE (t) == PARM_DECL
2786 || TREE_CODE (t) == RESULT_DECL
2787 || TREE_CODE (t) == SSA_NAME);
2788}
2789
2790/* Return true if T is a GIMPLE identifier (something with an address). */
2791
2792bool
2793is_gimple_id (tree t)
2794{
2795 return (is_gimple_variable (t)
2796 || TREE_CODE (t) == FUNCTION_DECL
2797 || TREE_CODE (t) == LABEL_DECL
2798 || TREE_CODE (t) == CONST_DECL
2799 /* Allow string constants, since they are addressable. */
2800 || TREE_CODE (t) == STRING_CST);
2801}
2802
2803/* Return true if TYPE is a suitable type for a scalar register variable. */
2804
2805bool
2806is_gimple_reg_type (tree type)
2807{
4636b850 2808 return !AGGREGATE_TYPE_P (type);
726a989a
RB
2809}
2810
2811/* Return true if T is a non-aggregate register variable. */
2812
2813bool
2814is_gimple_reg (tree t)
2815{
2816 if (TREE_CODE (t) == SSA_NAME)
2817 t = SSA_NAME_VAR (t);
2818
726a989a
RB
2819 if (!is_gimple_variable (t))
2820 return false;
2821
2822 if (!is_gimple_reg_type (TREE_TYPE (t)))
2823 return false;
2824
2825 /* A volatile decl is not acceptable because we can't reuse it as
2826 needed. We need to copy it into a temp first. */
2827 if (TREE_THIS_VOLATILE (t))
2828 return false;
2829
2830 /* We define "registers" as things that can be renamed as needed,
2831 which with our infrastructure does not apply to memory. */
2832 if (needs_to_live_in_memory (t))
2833 return false;
2834
2835 /* Hard register variables are an interesting case. For those that
2836 are call-clobbered, we don't know where all the calls are, since
2837 we don't (want to) take into account which operations will turn
2838 into libcalls at the rtl level. For those that are call-saved,
2839 we don't currently model the fact that calls may in fact change
2840 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2841 level, and so miss variable changes that might imply. All around,
2842 it seems safest to not do too much optimization with these at the
2843 tree level at all. We'll have to rely on the rtl optimizers to
2844 clean this up, as there we've got all the appropriate bits exposed. */
2845 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2846 return false;
2847
4636b850
RG
2848 /* Complex and vector values must have been put into SSA-like form.
2849 That is, no assignments to the individual components. */
2850 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2851 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2852 return DECL_GIMPLE_REG_P (t);
2853
726a989a
RB
2854 return true;
2855}
2856
2857
726a989a
RB
2858/* Return true if T is a GIMPLE variable whose address is not needed. */
2859
2860bool
2861is_gimple_non_addressable (tree t)
2862{
2863 if (TREE_CODE (t) == SSA_NAME)
2864 t = SSA_NAME_VAR (t);
2865
2866 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2867}
2868
2869/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2870
2871bool
2872is_gimple_val (tree t)
2873{
2874 /* Make loads from volatiles and memory vars explicit. */
2875 if (is_gimple_variable (t)
2876 && is_gimple_reg_type (TREE_TYPE (t))
2877 && !is_gimple_reg (t))
2878 return false;
2879
726a989a
RB
2880 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2881}
2882
2883/* Similarly, but accept hard registers as inputs to asm statements. */
2884
2885bool
2886is_gimple_asm_val (tree t)
2887{
2888 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2889 return true;
2890
2891 return is_gimple_val (t);
2892}
2893
2894/* Return true if T is a GIMPLE minimal lvalue. */
2895
2896bool
2897is_gimple_min_lval (tree t)
2898{
ba4d8f9d
RG
2899 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2900 return false;
726a989a
RB
2901 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2902}
2903
2904/* Return true if T is a typecast operation. */
2905
2906bool
2907is_gimple_cast (tree t)
2908{
2909 return (CONVERT_EXPR_P (t)
2910 || TREE_CODE (t) == FIX_TRUNC_EXPR);
2911}
2912
2913/* Return true if T is a valid function operand of a CALL_EXPR. */
2914
2915bool
2916is_gimple_call_addr (tree t)
2917{
2918 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2919}
2920
2921/* If T makes a function call, return the corresponding CALL_EXPR operand.
2922 Otherwise, return NULL_TREE. */
2923
2924tree
2925get_call_expr_in (tree t)
2926{
2927 if (TREE_CODE (t) == MODIFY_EXPR)
2928 t = TREE_OPERAND (t, 1);
2929 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2930 t = TREE_OPERAND (t, 0);
2931 if (TREE_CODE (t) == CALL_EXPR)
2932 return t;
2933 return NULL_TREE;
2934}
2935
2936
2937/* Given a memory reference expression T, return its base address.
2938 The base address of a memory reference expression is the main
2939 object being referenced. For instance, the base address for
2940 'array[i].fld[j]' is 'array'. You can think of this as stripping
2941 away the offset part from a memory address.
2942
2943 This function calls handled_component_p to strip away all the inner
2944 parts of the memory reference until it reaches the base object. */
2945
2946tree
2947get_base_address (tree t)
2948{
2949 while (handled_component_p (t))
2950 t = TREE_OPERAND (t, 0);
b8698a0f 2951
726a989a
RB
2952 if (SSA_VAR_P (t)
2953 || TREE_CODE (t) == STRING_CST
2954 || TREE_CODE (t) == CONSTRUCTOR
2955 || INDIRECT_REF_P (t))
2956 return t;
2957 else
2958 return NULL_TREE;
2959}
2960
2961void
2962recalculate_side_effects (tree t)
2963{
2964 enum tree_code code = TREE_CODE (t);
2965 int len = TREE_OPERAND_LENGTH (t);
2966 int i;
2967
2968 switch (TREE_CODE_CLASS (code))
2969 {
2970 case tcc_expression:
2971 switch (code)
2972 {
2973 case INIT_EXPR:
2974 case MODIFY_EXPR:
2975 case VA_ARG_EXPR:
2976 case PREDECREMENT_EXPR:
2977 case PREINCREMENT_EXPR:
2978 case POSTDECREMENT_EXPR:
2979 case POSTINCREMENT_EXPR:
2980 /* All of these have side-effects, no matter what their
2981 operands are. */
2982 return;
2983
2984 default:
2985 break;
2986 }
2987 /* Fall through. */
2988
2989 case tcc_comparison: /* a comparison expression */
2990 case tcc_unary: /* a unary arithmetic expression */
2991 case tcc_binary: /* a binary arithmetic expression */
2992 case tcc_reference: /* a reference */
2993 case tcc_vl_exp: /* a function call */
2994 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2995 for (i = 0; i < len; ++i)
2996 {
2997 tree op = TREE_OPERAND (t, i);
2998 if (op && TREE_SIDE_EFFECTS (op))
2999 TREE_SIDE_EFFECTS (t) = 1;
3000 }
3001 break;
3002
13f95bdb
EB
3003 case tcc_constant:
3004 /* No side-effects. */
3005 return;
3006
726a989a 3007 default:
726a989a
RB
3008 gcc_unreachable ();
3009 }
3010}
3011
3012/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3013 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3014 we failed to create one. */
3015
3016tree
3017canonicalize_cond_expr_cond (tree t)
3018{
b66a1bac
RG
3019 /* Strip conversions around boolean operations. */
3020 if (CONVERT_EXPR_P (t)
3021 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
3022 t = TREE_OPERAND (t, 0);
3023
726a989a 3024 /* For (bool)x use x != 0. */
b66a1bac
RG
3025 if (CONVERT_EXPR_P (t)
3026 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
726a989a
RB
3027 {
3028 tree top0 = TREE_OPERAND (t, 0);
3029 t = build2 (NE_EXPR, TREE_TYPE (t),
3030 top0, build_int_cst (TREE_TYPE (top0), 0));
3031 }
3032 /* For !x use x == 0. */
3033 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3034 {
3035 tree top0 = TREE_OPERAND (t, 0);
3036 t = build2 (EQ_EXPR, TREE_TYPE (t),
3037 top0, build_int_cst (TREE_TYPE (top0), 0));
3038 }
3039 /* For cmp ? 1 : 0 use cmp. */
3040 else if (TREE_CODE (t) == COND_EXPR
3041 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3042 && integer_onep (TREE_OPERAND (t, 1))
3043 && integer_zerop (TREE_OPERAND (t, 2)))
3044 {
3045 tree top0 = TREE_OPERAND (t, 0);
3046 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3047 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3048 }
3049
3050 if (is_gimple_condexpr (t))
3051 return t;
3052
3053 return NULL_TREE;
3054}
3055
e6c99067
DN
3056/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3057 the positions marked by the set ARGS_TO_SKIP. */
3058
c6f7cfc1 3059gimple
5c0466b5 3060gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3061{
3062 int i;
3063 tree fn = gimple_call_fn (stmt);
3064 int nargs = gimple_call_num_args (stmt);
3065 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3066 gimple new_stmt;
3067
3068 for (i = 0; i < nargs; i++)
3069 if (!bitmap_bit_p (args_to_skip, i))
3070 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3071
3072 new_stmt = gimple_build_call_vec (fn, vargs);
3073 VEC_free (tree, heap, vargs);
3074 if (gimple_call_lhs (stmt))
3075 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3076
5006671f
RG
3077 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3078 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3079
c6f7cfc1
JH
3080 gimple_set_block (new_stmt, gimple_block (stmt));
3081 if (gimple_has_location (stmt))
3082 gimple_set_location (new_stmt, gimple_location (stmt));
3083
3084 /* Carry all the flags to the new GIMPLE_CALL. */
3085 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3086 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
3087 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
3088 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
3089 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
3090 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
5006671f
RG
3091
3092 gimple_set_modified (new_stmt, true);
3093
c6f7cfc1
JH
3094 return new_stmt;
3095}
3096
5006671f 3097
d7f09764
DN
3098static hashval_t gimple_type_hash (const void *);
3099
3100/* Structure used to maintain a cache of some type pairs compared by
3101 gimple_types_compatible_p when comparing aggregate types. There are
3102 four possible values for SAME_P:
3103
3104 -2: The pair (T1, T2) has just been inserted in the table.
3105 -1: The pair (T1, T2) is currently being compared.
3106 0: T1 and T2 are different types.
3107 1: T1 and T2 are the same type.
3108
3109 This table is only used when comparing aggregate types to avoid
3110 infinite recursion due to self-referential types. */
3111struct type_pair_d
3112{
88ca1146
RG
3113 unsigned int uid1;
3114 unsigned int uid2;
d7f09764
DN
3115 int same_p;
3116};
3117typedef struct type_pair_d *type_pair_t;
3118
3119/* Return a hash value for the type pair pointed-to by P. */
3120
3121static hashval_t
3122type_pair_hash (const void *p)
3123{
3124 const struct type_pair_d *pair = (const struct type_pair_d *) p;
88ca1146
RG
3125 hashval_t val1 = pair->uid1;
3126 hashval_t val2 = pair->uid2;
d7f09764
DN
3127 return (iterative_hash_hashval_t (val2, val1)
3128 ^ iterative_hash_hashval_t (val1, val2));
3129}
3130
3131/* Compare two type pairs pointed-to by P1 and P2. */
3132
3133static int
3134type_pair_eq (const void *p1, const void *p2)
3135{
3136 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3137 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
88ca1146
RG
3138 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
3139 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
d7f09764
DN
3140}
3141
3142/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3143 entry if none existed. */
3144
3145static type_pair_t
88ca1146 3146lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
d7f09764
DN
3147{
3148 struct type_pair_d pair;
3149 type_pair_t p;
3150 void **slot;
3151
3152 if (*visited_p == NULL)
88ca1146
RG
3153 {
3154 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3155 gcc_obstack_init (ob_p);
3156 }
d7f09764 3157
88ca1146
RG
3158 pair.uid1 = TYPE_UID (t1);
3159 pair.uid2 = TYPE_UID (t2);
d7f09764
DN
3160 slot = htab_find_slot (*visited_p, &pair, INSERT);
3161
3162 if (*slot)
3163 p = *((type_pair_t *) slot);
3164 else
3165 {
88ca1146
RG
3166 p = XOBNEW (ob_p, struct type_pair_d);
3167 p->uid1 = TYPE_UID (t1);
3168 p->uid2 = TYPE_UID (t2);
d7f09764
DN
3169 p->same_p = -2;
3170 *slot = (void *) p;
3171 }
3172
3173 return p;
3174}
3175
3176
77785f4f
RG
3177/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3178 true then if any type has no name return false, otherwise return
3179 true if both types have no names. */
d7f09764
DN
3180
3181static bool
77785f4f 3182compare_type_names_p (tree t1, tree t2, bool for_completion_p)
d7f09764
DN
3183{
3184 tree name1 = TYPE_NAME (t1);
3185 tree name2 = TYPE_NAME (t2);
3186
77785f4f
RG
3187 /* Consider anonymous types all unique for completion. */
3188 if (for_completion_p
3189 && (!name1 || !name2))
d7f09764
DN
3190 return false;
3191
77785f4f 3192 if (name1 && TREE_CODE (name1) == TYPE_DECL)
d7f09764
DN
3193 {
3194 name1 = DECL_NAME (name1);
77785f4f
RG
3195 if (for_completion_p
3196 && !name1)
d7f09764
DN
3197 return false;
3198 }
77785f4f 3199 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
d7f09764 3200
77785f4f 3201 if (name2 && TREE_CODE (name2) == TYPE_DECL)
d7f09764
DN
3202 {
3203 name2 = DECL_NAME (name2);
77785f4f
RG
3204 if (for_completion_p
3205 && !name2)
d7f09764
DN
3206 return false;
3207 }
77785f4f 3208 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
d7f09764
DN
3209
3210 /* Identifiers can be compared with pointer equality rather
3211 than a string comparison. */
3212 if (name1 == name2)
3213 return true;
3214
3215 return false;
3216}
3217
3218/* Return true if the field decls F1 and F2 are at the same offset. */
3219
1e4bc4eb 3220bool
d7f09764
DN
3221compare_field_offset (tree f1, tree f2)
3222{
3223 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3224 return (operand_equal_p (DECL_FIELD_OFFSET (f1),
3225 DECL_FIELD_OFFSET (f2), 0)
3226 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3227 DECL_FIELD_BIT_OFFSET (f2)));
3228
3229 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3230 should be, so handle differing ones specially by decomposing
3231 the offset into a byte and bit offset manually. */
3232 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3233 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3234 {
3235 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3236 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3237 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3238 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3239 + bit_offset1 / BITS_PER_UNIT);
3240 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3241 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3242 + bit_offset2 / BITS_PER_UNIT);
3243 if (byte_offset1 != byte_offset2)
3244 return false;
3245 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3246 }
3247
3248 return false;
3249}
3250
3251/* Return 1 iff T1 and T2 are structurally identical.
3252 Otherwise, return 0. */
3253
e575382e 3254static int
d7f09764
DN
3255gimple_types_compatible_p (tree t1, tree t2)
3256{
3257 type_pair_t p = NULL;
3258
3259 /* Check first for the obvious case of pointer identity. */
3260 if (t1 == t2)
b0cc341f 3261 return 1;
d7f09764
DN
3262
3263 /* Check that we have two types to compare. */
3264 if (t1 == NULL_TREE || t2 == NULL_TREE)
b0cc341f 3265 return 0;
d7f09764
DN
3266
3267 /* Can't be the same type if the types don't have the same code. */
3268 if (TREE_CODE (t1) != TREE_CODE (t2))
b0cc341f
RG
3269 return 0;
3270
3271 /* Can't be the same type if they have different CV qualifiers. */
3272 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3273 return 0;
d7f09764
DN
3274
3275 /* Void types are always the same. */
3276 if (TREE_CODE (t1) == VOID_TYPE)
b0cc341f 3277 return 1;
d7f09764 3278
b0cc341f
RG
3279 /* For numerical types do some simple checks before doing three
3280 hashtable queries. */
3281 if (INTEGRAL_TYPE_P (t1)
3282 || SCALAR_FLOAT_TYPE_P (t1)
3283 || FIXED_POINT_TYPE_P (t1)
3284 || TREE_CODE (t1) == VECTOR_TYPE
b23dc2c0
RG
3285 || TREE_CODE (t1) == COMPLEX_TYPE
3286 || TREE_CODE (t1) == OFFSET_TYPE)
b0cc341f
RG
3287 {
3288 /* Can't be the same type if they have different alignment,
3289 sign, precision or mode. */
3290 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3291 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3292 || TYPE_MODE (t1) != TYPE_MODE (t2)
3293 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3294 return 0;
3295
3296 if (TREE_CODE (t1) == INTEGER_TYPE
3297 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3298 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3299 return 0;
3300
3301 /* That's all we need to check for float and fixed-point types. */
3302 if (SCALAR_FLOAT_TYPE_P (t1)
3303 || FIXED_POINT_TYPE_P (t1))
3304 return 1;
3305
3306 /* Perform cheap tail-recursion for vector and complex types. */
3307 if (TREE_CODE (t1) == VECTOR_TYPE
3308 || TREE_CODE (t1) == COMPLEX_TYPE)
3309 return gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2));
3310
3311 /* For integral types fall thru to more complex checks. */
3312 }
d7f09764
DN
3313
3314 /* If the hash values of t1 and t2 are different the types can't
3315 possibly be the same. This helps keeping the type-pair hashtable
3316 small, only tracking comparisons for hash collisions. */
3317 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3318 return 0;
3319
3320 /* If we've visited this type pair before (in the case of aggregates
3321 with self-referential types), and we made a decision, return it. */
88ca1146 3322 p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
d7f09764
DN
3323 if (p->same_p == 0 || p->same_p == 1)
3324 {
3325 /* We have already decided whether T1 and T2 are the
3326 same, return the cached result. */
3327 return p->same_p == 1;
3328 }
3329 else if (p->same_p == -1)
3330 {
3331 /* We are currently comparing this pair of types, assume
3332 that they are the same and let the caller decide. */
3333 return 1;
3334 }
3335
3336 gcc_assert (p->same_p == -2);
3337
3338 /* Mark the (T1, T2) comparison in progress. */
3339 p->same_p = -1;
3340
3341 /* If their attributes are not the same they can't be the same type. */
3342 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3343 goto different_types;
3344
d7f09764
DN
3345 /* Do type-specific comparisons. */
3346 switch (TREE_CODE (t1))
3347 {
3348 case ARRAY_TYPE:
3349 /* Array types are the same if the element types are the same and
3350 the number of elements are the same. */
3351 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
b0cc341f
RG
3352 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3353 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
d7f09764
DN
3354 goto different_types;
3355 else
3356 {
3357 tree i1 = TYPE_DOMAIN (t1);
3358 tree i2 = TYPE_DOMAIN (t2);
3359
3360 /* For an incomplete external array, the type domain can be
3361 NULL_TREE. Check this condition also. */
3362 if (i1 == NULL_TREE && i2 == NULL_TREE)
3363 goto same_types;
3364 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3365 goto different_types;
3366 /* If for a complete array type the possibly gimplified sizes
3367 are different the types are different. */
3368 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3369 || (TYPE_SIZE (i1)
3370 && TYPE_SIZE (i2)
3371 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3372 goto different_types;
3373 else
3374 {
3375 tree min1 = TYPE_MIN_VALUE (i1);
3376 tree min2 = TYPE_MIN_VALUE (i2);
3377 tree max1 = TYPE_MAX_VALUE (i1);
3378 tree max2 = TYPE_MAX_VALUE (i2);
3379
3380 /* The minimum/maximum values have to be the same. */
3381 if ((min1 == min2
f56000ed
EB
3382 || (min1 && min2
3383 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3384 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3385 || operand_equal_p (min1, min2, 0))))
d7f09764 3386 && (max1 == max2
f56000ed
EB
3387 || (max1 && max2
3388 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3389 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3390 || operand_equal_p (max1, max2, 0)))))
d7f09764
DN
3391 goto same_types;
3392 else
3393 goto different_types;
3394 }
3395 }
3396
3397 case METHOD_TYPE:
3398 /* Method types should belong to the same class. */
3399 if (!gimple_types_compatible_p (TYPE_METHOD_BASETYPE (t1),
3400 TYPE_METHOD_BASETYPE (t2)))
3401 goto different_types;
3402
3403 /* Fallthru */
3404
3405 case FUNCTION_TYPE:
3406 /* Function types are the same if the return type and arguments types
3407 are the same. */
3408 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3409 goto different_types;
3410 else
3411 {
3412 if (!targetm.comp_type_attributes (t1, t2))
3413 goto different_types;
3414
3415 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3416 goto same_types;
3417 else
3418 {
3419 tree parms1, parms2;
3420
3421 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3422 parms1 && parms2;
3423 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3424 {
3425 if (!gimple_types_compatible_p (TREE_VALUE (parms1),
3426 TREE_VALUE (parms2)))
3427 goto different_types;
3428 }
3429
3430 if (parms1 || parms2)
3431 goto different_types;
3432
3433 goto same_types;
3434 }
3435 }
3436
b23dc2c0
RG
3437 case OFFSET_TYPE:
3438 {
3439 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3440 || !gimple_types_compatible_p (TYPE_OFFSET_BASETYPE (t1),
3441 TYPE_OFFSET_BASETYPE (t2)))
3442 goto different_types;
3443
3444 goto same_types;
3445 }
3446
d7f09764
DN
3447 case POINTER_TYPE:
3448 case REFERENCE_TYPE:
e575382e
RG
3449 {
3450 /* If the two pointers have different ref-all attributes,
3451 they can't be the same type. */
3452 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3453 goto different_types;
d7f09764 3454
e575382e
RG
3455 /* If one pointer points to an incomplete type variant of
3456 the other pointed-to type they are the same. */
3457 if (TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3458 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t1))
3459 && (!COMPLETE_TYPE_P (TREE_TYPE (t1))
3460 || !COMPLETE_TYPE_P (TREE_TYPE (t2)))
cfee9aa4
RG
3461 && compare_type_names_p (TYPE_MAIN_VARIANT (TREE_TYPE (t1)),
3462 TYPE_MAIN_VARIANT (TREE_TYPE (t2)), true))
e575382e
RG
3463 {
3464 /* Replace the pointed-to incomplete type with the
3465 complete one. */
3466 if (COMPLETE_TYPE_P (TREE_TYPE (t2)))
3467 TREE_TYPE (t1) = TREE_TYPE (t2);
3468 else
3469 TREE_TYPE (t2) = TREE_TYPE (t1);
d7f09764 3470 goto same_types;
e575382e
RG
3471 }
3472
3473 /* Otherwise, pointer and reference types are the same if the
3474 pointed-to types are the same. */
3475 if (gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3476 goto same_types;
3477
3478 goto different_types;
3479 }
d7f09764 3480
b0cc341f
RG
3481 case INTEGER_TYPE:
3482 case BOOLEAN_TYPE:
3483 {
3484 tree min1 = TYPE_MIN_VALUE (t1);
3485 tree max1 = TYPE_MAX_VALUE (t1);
3486 tree min2 = TYPE_MIN_VALUE (t2);
3487 tree max2 = TYPE_MAX_VALUE (t2);
3488 bool min_equal_p = false;
3489 bool max_equal_p = false;
3490
3491 /* If either type has a minimum value, the other type must
3492 have the same. */
3493 if (min1 == NULL_TREE && min2 == NULL_TREE)
3494 min_equal_p = true;
3495 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3496 min_equal_p = true;
3497
3498 /* Likewise, if either type has a maximum value, the other
3499 type must have the same. */
3500 if (max1 == NULL_TREE && max2 == NULL_TREE)
3501 max_equal_p = true;
3502 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3503 max_equal_p = true;
3504
3505 if (!min_equal_p || !max_equal_p)
3506 goto different_types;
3507
3508 goto same_types;
3509 }
3510
d7f09764 3511 case ENUMERAL_TYPE:
e575382e 3512 {
b0cc341f
RG
3513 /* FIXME lto, we cannot check bounds on enumeral types because
3514 different front ends will produce different values.
3515 In C, enumeral types are integers, while in C++ each element
3516 will have its own symbolic value. We should decide how enums
3517 are to be represented in GIMPLE and have each front end lower
3518 to that. */
e575382e 3519 tree v1, v2;
d7f09764 3520
b0cc341f 3521 /* For enumeral types, all the values must be the same. */
e575382e
RG
3522 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3523 goto same_types;
d7f09764 3524
e575382e
RG
3525 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3526 v1 && v2;
3527 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3528 {
3529 tree c1 = TREE_VALUE (v1);
3530 tree c2 = TREE_VALUE (v2);
d7f09764 3531
e575382e
RG
3532 if (TREE_CODE (c1) == CONST_DECL)
3533 c1 = DECL_INITIAL (c1);
d7f09764 3534
e575382e
RG
3535 if (TREE_CODE (c2) == CONST_DECL)
3536 c2 = DECL_INITIAL (c2);
d7f09764 3537
e575382e
RG
3538 if (tree_int_cst_equal (c1, c2) != 1)
3539 goto different_types;
3540 }
d7f09764 3541
e575382e
RG
3542 /* If one enumeration has more values than the other, they
3543 are not the same. */
3544 if (v1 || v2)
3545 goto different_types;
d7f09764 3546
e575382e
RG
3547 goto same_types;
3548 }
d7f09764
DN
3549
3550 case RECORD_TYPE:
3551 case UNION_TYPE:
3552 case QUAL_UNION_TYPE:
e575382e
RG
3553 {
3554 tree f1, f2;
d7f09764 3555
e575382e
RG
3556 /* If one type requires structural equality checks and the
3557 other doesn't, do not merge the types. */
3558 if (TYPE_STRUCTURAL_EQUALITY_P (t1)
3559 != TYPE_STRUCTURAL_EQUALITY_P (t2))
3560 goto different_types;
021ed367 3561
e575382e
RG
3562 /* The struct tags shall compare equal. */
3563 if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3564 TYPE_MAIN_VARIANT (t2), false))
3565 goto different_types;
77785f4f 3566
e575382e
RG
3567 /* For aggregate types, all the fields must be the same. */
3568 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3569 f1 && f2;
3570 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3571 {
3572 /* The fields must have the same name, offset and type. */
3573 if (DECL_NAME (f1) != DECL_NAME (f2)
b0cc341f 3574 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
e575382e
RG
3575 || !compare_field_offset (f1, f2)
3576 || !gimple_types_compatible_p (TREE_TYPE (f1),
3577 TREE_TYPE (f2)))
3578 goto different_types;
3579 }
d7f09764 3580
e575382e
RG
3581 /* If one aggregate has more fields than the other, they
3582 are not the same. */
3583 if (f1 || f2)
3584 goto different_types;
d7f09764 3585
e575382e
RG
3586 goto same_types;
3587 }
d7f09764 3588
d7f09764 3589 default:
b0cc341f 3590 gcc_unreachable ();
d7f09764
DN
3591 }
3592
3593 /* Common exit path for types that are not compatible. */
3594different_types:
b0cc341f 3595 p->same_p = 0;
d7f09764
DN
3596 return 0;
3597
3598 /* Common exit path for types that are compatible. */
3599same_types:
b0cc341f 3600 p->same_p = 1;
d7f09764
DN
3601 return 1;
3602}
3603
3604
3605
3606
3607/* Per pointer state for the SCC finding. The on_sccstack flag
3608 is not strictly required, it is true when there is no hash value
3609 recorded for the type and false otherwise. But querying that
3610 is slower. */
3611
3612struct sccs
3613{
3614 unsigned int dfsnum;
3615 unsigned int low;
3616 bool on_sccstack;
3617 hashval_t hash;
3618};
3619
3620static unsigned int next_dfs_num;
3621
3622static hashval_t
3623iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3624 struct pointer_map_t *, struct obstack *);
3625
3626/* DFS visit the edge from the callers type with state *STATE to T.
3627 Update the callers type hash V with the hash for T if it is not part
3628 of the SCC containing the callers type and return it.
3629 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3630
3631static hashval_t
3632visit (tree t, struct sccs *state, hashval_t v,
3633 VEC (tree, heap) **sccstack,
3634 struct pointer_map_t *sccstate,
3635 struct obstack *sccstate_obstack)
3636{
3637 struct sccs *cstate = NULL;
3638 void **slot;
3639
3640 /* If there is a hash value recorded for this type then it can't
3641 possibly be part of our parent SCC. Simply mix in its hash. */
3642 if ((slot = pointer_map_contains (type_hash_cache, t)))
3643 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, v);
3644
3645 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3646 cstate = (struct sccs *)*slot;
3647 if (!cstate)
3648 {
3649 hashval_t tem;
3650 /* Not yet visited. DFS recurse. */
3651 tem = iterative_hash_gimple_type (t, v,
3652 sccstack, sccstate, sccstate_obstack);
3653 if (!cstate)
3654 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3655 state->low = MIN (state->low, cstate->low);
3656 /* If the type is no longer on the SCC stack and thus is not part
3657 of the parents SCC mix in its hash value. Otherwise we will
3658 ignore the type for hashing purposes and return the unaltered
3659 hash value. */
3660 if (!cstate->on_sccstack)
3661 return tem;
3662 }
3663 if (cstate->dfsnum < state->dfsnum
3664 && cstate->on_sccstack)
3665 state->low = MIN (cstate->dfsnum, state->low);
3666
3667 /* We are part of our parents SCC, skip this type during hashing
3668 and return the unaltered hash value. */
3669 return v;
3670}
3671
77785f4f 3672/* Hash NAME with the previous hash value V and return it. */
d7f09764
DN
3673
3674static hashval_t
77785f4f 3675iterative_hash_name (tree name, hashval_t v)
d7f09764 3676{
d7f09764
DN
3677 if (!name)
3678 return v;
3679 if (TREE_CODE (name) == TYPE_DECL)
3680 name = DECL_NAME (name);
3681 if (!name)
3682 return v;
3683 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
d7f09764
DN
3684 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3685}
3686
3687/* Returning a hash value for gimple type TYPE combined with VAL.
3688 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3689
3690 To hash a type we end up hashing in types that are reachable.
3691 Through pointers we can end up with cycles which messes up the
3692 required property that we need to compute the same hash value
3693 for structurally equivalent types. To avoid this we have to
3694 hash all types in a cycle (the SCC) in a commutative way. The
3695 easiest way is to not mix in the hashes of the SCC members at
3696 all. To make this work we have to delay setting the hash
3697 values of the SCC until it is complete. */
3698
3699static hashval_t
3700iterative_hash_gimple_type (tree type, hashval_t val,
3701 VEC(tree, heap) **sccstack,
3702 struct pointer_map_t *sccstate,
3703 struct obstack *sccstate_obstack)
3704{
3705 hashval_t v;
3706 void **slot;
3707 struct sccs *state;
3708
3709#ifdef ENABLE_CHECKING
3710 /* Not visited during this DFS walk nor during previous walks. */
3711 gcc_assert (!pointer_map_contains (type_hash_cache, type)
3712 && !pointer_map_contains (sccstate, type));
3713#endif
3714 state = XOBNEW (sccstate_obstack, struct sccs);
3715 *pointer_map_insert (sccstate, type) = state;
3716
3717 VEC_safe_push (tree, heap, *sccstack, type);
3718 state->dfsnum = next_dfs_num++;
3719 state->low = state->dfsnum;
3720 state->on_sccstack = true;
3721
3722 /* Combine a few common features of types so that types are grouped into
3723 smaller sets; when searching for existing matching types to merge,
3724 only existing types having the same features as the new type will be
3725 checked. */
3726 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
3727 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
3728 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3729
3730 /* Do not hash the types size as this will cause differences in
3731 hash values for the complete vs. the incomplete type variant. */
3732
3733 /* Incorporate common features of numerical types. */
3734 if (INTEGRAL_TYPE_P (type)
3735 || SCALAR_FLOAT_TYPE_P (type)
3736 || FIXED_POINT_TYPE_P (type))
3737 {
3738 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3739 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3740 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3741 }
3742
3743 /* For pointer and reference types, fold in information about the type
3744 pointed to but do not recurse into possibly incomplete types to
3745 avoid hash differences for complete vs. incomplete types. */
3746 if (POINTER_TYPE_P (type))
3747 {
021ed367 3748 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
d7f09764
DN
3749 {
3750 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
77785f4f
RG
3751 v = iterative_hash_name
3752 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
d7f09764
DN
3753 }
3754 else
3755 v = visit (TREE_TYPE (type), state, v,
3756 sccstack, sccstate, sccstate_obstack);
3757 }
3758
f798226d
RG
3759 /* For integer types hash the types min/max values and the string flag. */
3760 if (TREE_CODE (type) == INTEGER_TYPE)
3761 {
429c98c9
RG
3762 /* OMP lowering can introduce error_mark_node in place of
3763 random local decls in types. */
3764 if (TYPE_MIN_VALUE (type) != error_mark_node)
3765 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
3766 if (TYPE_MAX_VALUE (type) != error_mark_node)
3767 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
f798226d
RG
3768 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3769 }
3770
3771 /* For array types hash their domain and the string flag. */
3772 if (TREE_CODE (type) == ARRAY_TYPE
3773 && TYPE_DOMAIN (type))
3774 {
3775 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3776 v = visit (TYPE_DOMAIN (type), state, v,
3777 sccstack, sccstate, sccstate_obstack);
3778 }
3779
3780 /* Recurse for aggregates with a single element type. */
d7f09764
DN
3781 if (TREE_CODE (type) == ARRAY_TYPE
3782 || TREE_CODE (type) == COMPLEX_TYPE
3783 || TREE_CODE (type) == VECTOR_TYPE)
3784 v = visit (TREE_TYPE (type), state, v,
3785 sccstack, sccstate, sccstate_obstack);
3786
3787 /* Incorporate function return and argument types. */
3788 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3789 {
3790 unsigned na;
3791 tree p;
3792
3793 /* For method types also incorporate their parent class. */
3794 if (TREE_CODE (type) == METHOD_TYPE)
3795 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
3796 sccstack, sccstate, sccstate_obstack);
3797
3798 v = visit (TREE_TYPE (type), state, v,
3799 sccstack, sccstate, sccstate_obstack);
3800
3801 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3802 {
3803 v = visit (TREE_VALUE (p), state, v,
3804 sccstack, sccstate, sccstate_obstack);
3805 na++;
3806 }
3807
3808 v = iterative_hash_hashval_t (na, v);
3809 }
3810
3811 if (TREE_CODE (type) == RECORD_TYPE
3812 || TREE_CODE (type) == UNION_TYPE
3813 || TREE_CODE (type) == QUAL_UNION_TYPE)
3814 {
3815 unsigned nf;
3816 tree f;
3817
77785f4f 3818 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
d7f09764
DN
3819
3820 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
3821 {
77785f4f 3822 v = iterative_hash_name (DECL_NAME (f), v);
d7f09764
DN
3823 v = visit (TREE_TYPE (f), state, v,
3824 sccstack, sccstate, sccstate_obstack);
3825 nf++;
3826 }
3827
3828 v = iterative_hash_hashval_t (nf, v);
3829 }
3830
3831 /* Record hash for us. */
3832 state->hash = v;
3833
3834 /* See if we found an SCC. */
3835 if (state->low == state->dfsnum)
3836 {
3837 tree x;
3838
3839 /* Pop off the SCC and set its hash values. */
3840 do
3841 {
3842 struct sccs *cstate;
3843 x = VEC_pop (tree, *sccstack);
3844 gcc_assert (!pointer_map_contains (type_hash_cache, x));
3845 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3846 cstate->on_sccstack = false;
3847 slot = pointer_map_insert (type_hash_cache, x);
3848 *slot = (void *) (size_t) cstate->hash;
3849 }
3850 while (x != type);
3851 }
3852
3853 return iterative_hash_hashval_t (v, val);
3854}
3855
3856
3857/* Returns a hash value for P (assumed to be a type). The hash value
3858 is computed using some distinguishing features of the type. Note
3859 that we cannot use pointer hashing here as we may be dealing with
3860 two distinct instances of the same type.
3861
3862 This function should produce the same hash value for two compatible
3863 types according to gimple_types_compatible_p. */
3864
3865static hashval_t
3866gimple_type_hash (const void *p)
3867{
ddd4d0e1 3868 const_tree t = (const_tree) p;
d7f09764
DN
3869 VEC(tree, heap) *sccstack = NULL;
3870 struct pointer_map_t *sccstate;
3871 struct obstack sccstate_obstack;
3872 hashval_t val;
3873 void **slot;
3874
3875 if (type_hash_cache == NULL)
3876 type_hash_cache = pointer_map_create ();
3877
3878 if ((slot = pointer_map_contains (type_hash_cache, p)) != NULL)
3879 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, 0);
3880
3881 /* Perform a DFS walk and pre-hash all reachable types. */
3882 next_dfs_num = 1;
3883 sccstate = pointer_map_create ();
3884 gcc_obstack_init (&sccstate_obstack);
ddd4d0e1 3885 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
d7f09764
DN
3886 &sccstack, sccstate, &sccstate_obstack);
3887 VEC_free (tree, heap, sccstack);
3888 pointer_map_destroy (sccstate);
3889 obstack_free (&sccstate_obstack, NULL);
3890
3891 return val;
3892}
3893
3894
3895/* Returns nonzero if P1 and P2 are equal. */
3896
3897static int
3898gimple_type_eq (const void *p1, const void *p2)
3899{
3900 const_tree t1 = (const_tree) p1;
3901 const_tree t2 = (const_tree) p2;
3902 return gimple_types_compatible_p (CONST_CAST_TREE (t1), CONST_CAST_TREE (t2));
3903}
3904
3905
3906/* Register type T in the global type table gimple_types.
3907 If another type T', compatible with T, already existed in
3908 gimple_types then return T', otherwise return T. This is used by
3909 LTO to merge identical types read from different TUs. */
3910
3911tree
3912gimple_register_type (tree t)
3913{
3914 void **slot;
3915
3916 gcc_assert (TYPE_P (t));
3917
20d36f0e
RG
3918 /* Always register the main variant first. This is important so we
3919 pick up the non-typedef variants as canonical, otherwise we'll end
3920 up taking typedef ids for structure tags during comparison. */
3921 if (TYPE_MAIN_VARIANT (t) != t)
3922 gimple_register_type (TYPE_MAIN_VARIANT (t));
3923
d7f09764
DN
3924 if (gimple_types == NULL)
3925 gimple_types = htab_create (16381, gimple_type_hash, gimple_type_eq, 0);
3926
3927 slot = htab_find_slot (gimple_types, t, INSERT);
3928 if (*slot
3929 && *(tree *)slot != t)
3930 {
3931 tree new_type = (tree) *((tree *) slot);
3932
3933 /* Do not merge types with different addressability. */
3934 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
3935
3936 /* If t is not its main variant then make t unreachable from its
3937 main variant list. Otherwise we'd queue up a lot of duplicates
3938 there. */
3939 if (t != TYPE_MAIN_VARIANT (t))
3940 {
3941 tree tem = TYPE_MAIN_VARIANT (t);
3942 while (tem && TYPE_NEXT_VARIANT (tem) != t)
3943 tem = TYPE_NEXT_VARIANT (tem);
3944 if (tem)
3945 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
3946 TYPE_NEXT_VARIANT (t) = NULL_TREE;
3947 }
3948
3949 /* If we are a pointer then remove us from the pointer-to or
3950 reference-to chain. Otherwise we'd queue up a lot of duplicates
3951 there. */
3952 if (TREE_CODE (t) == POINTER_TYPE)
3953 {
3954 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
3955 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
3956 else
3957 {
3958 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
3959 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
3960 tem = TYPE_NEXT_PTR_TO (tem);
3961 if (tem)
3962 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
3963 }
3964 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
3965 }
3966 else if (TREE_CODE (t) == REFERENCE_TYPE)
3967 {
3968 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
3969 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
3970 else
3971 {
3972 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
3973 while (tem && TYPE_NEXT_REF_TO (tem) != t)
3974 tem = TYPE_NEXT_REF_TO (tem);
3975 if (tem)
3976 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
3977 }
3978 TYPE_NEXT_REF_TO (t) = NULL_TREE;
3979 }
3980
3981 t = new_type;
3982 }
3983 else
3984 *slot = (void *) t;
3985
3986 return t;
3987}
3988
3989
3990/* Show statistics on references to the global type table gimple_types. */
3991
3992void
3993print_gimple_types_stats (void)
3994{
3995 if (gimple_types)
3996 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
3997 "%ld searches, %ld collisions (ratio: %f)\n",
3998 (long) htab_size (gimple_types),
3999 (long) htab_elements (gimple_types),
4000 (long) gimple_types->searches,
4001 (long) gimple_types->collisions,
4002 htab_collisions (gimple_types));
4003 else
4004 fprintf (stderr, "GIMPLE type table is empty\n");
4005 if (gtc_visited)
0d0bfe17
RG
4006 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
4007 "elements, %ld searches, %ld collisions (ratio: %f)\n",
d7f09764
DN
4008 (long) htab_size (gtc_visited),
4009 (long) htab_elements (gtc_visited),
4010 (long) gtc_visited->searches,
4011 (long) gtc_visited->collisions,
4012 htab_collisions (gtc_visited));
4013 else
4014 fprintf (stderr, "GIMPLE type comparison table is empty\n");
4015}
4016
0d0bfe17
RG
4017/* Free the gimple type hashtables used for LTO type merging. */
4018
4019void
4020free_gimple_type_tables (void)
4021{
4022 /* Last chance to print stats for the tables. */
4023 if (flag_lto_report)
4024 print_gimple_types_stats ();
4025
4026 if (gimple_types)
4027 {
4028 htab_delete (gimple_types);
4029 gimple_types = NULL;
4030 }
4031 if (type_hash_cache)
4032 {
4033 pointer_map_destroy (type_hash_cache);
4034 type_hash_cache = NULL;
4035 }
4036 if (gtc_visited)
4037 {
4038 htab_delete (gtc_visited);
88ca1146 4039 obstack_free (&gtc_ob, NULL);
0d0bfe17
RG
4040 gtc_visited = NULL;
4041 }
4042}
4043
d7f09764
DN
4044
4045/* Return a type the same as TYPE except unsigned or
4046 signed according to UNSIGNEDP. */
4047
4048static tree
4049gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4050{
4051 tree type1;
4052
4053 type1 = TYPE_MAIN_VARIANT (type);
4054 if (type1 == signed_char_type_node
4055 || type1 == char_type_node
4056 || type1 == unsigned_char_type_node)
4057 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4058 if (type1 == integer_type_node || type1 == unsigned_type_node)
4059 return unsignedp ? unsigned_type_node : integer_type_node;
4060 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4061 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4062 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4063 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4064 if (type1 == long_long_integer_type_node
4065 || type1 == long_long_unsigned_type_node)
4066 return unsignedp
4067 ? long_long_unsigned_type_node
4068 : long_long_integer_type_node;
4069#if HOST_BITS_PER_WIDE_INT >= 64
4070 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4071 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4072#endif
4073 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4074 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4075 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4076 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4077 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4078 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4079 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4080 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4081
4082#define GIMPLE_FIXED_TYPES(NAME) \
4083 if (type1 == short_ ## NAME ## _type_node \
4084 || type1 == unsigned_short_ ## NAME ## _type_node) \
4085 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4086 : short_ ## NAME ## _type_node; \
4087 if (type1 == NAME ## _type_node \
4088 || type1 == unsigned_ ## NAME ## _type_node) \
4089 return unsignedp ? unsigned_ ## NAME ## _type_node \
4090 : NAME ## _type_node; \
4091 if (type1 == long_ ## NAME ## _type_node \
4092 || type1 == unsigned_long_ ## NAME ## _type_node) \
4093 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4094 : long_ ## NAME ## _type_node; \
4095 if (type1 == long_long_ ## NAME ## _type_node \
4096 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4097 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4098 : long_long_ ## NAME ## _type_node;
4099
4100#define GIMPLE_FIXED_MODE_TYPES(NAME) \
4101 if (type1 == NAME ## _type_node \
4102 || type1 == u ## NAME ## _type_node) \
4103 return unsignedp ? u ## NAME ## _type_node \
4104 : NAME ## _type_node;
4105
4106#define GIMPLE_FIXED_TYPES_SAT(NAME) \
4107 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4108 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4109 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4110 : sat_ ## short_ ## NAME ## _type_node; \
4111 if (type1 == sat_ ## NAME ## _type_node \
4112 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4113 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4114 : sat_ ## NAME ## _type_node; \
4115 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4116 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4117 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4118 : sat_ ## long_ ## NAME ## _type_node; \
4119 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4120 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4121 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4122 : sat_ ## long_long_ ## NAME ## _type_node;
4123
4124#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4125 if (type1 == sat_ ## NAME ## _type_node \
4126 || type1 == sat_ ## u ## NAME ## _type_node) \
4127 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4128 : sat_ ## NAME ## _type_node;
4129
4130 GIMPLE_FIXED_TYPES (fract);
4131 GIMPLE_FIXED_TYPES_SAT (fract);
4132 GIMPLE_FIXED_TYPES (accum);
4133 GIMPLE_FIXED_TYPES_SAT (accum);
4134
4135 GIMPLE_FIXED_MODE_TYPES (qq);
4136 GIMPLE_FIXED_MODE_TYPES (hq);
4137 GIMPLE_FIXED_MODE_TYPES (sq);
4138 GIMPLE_FIXED_MODE_TYPES (dq);
4139 GIMPLE_FIXED_MODE_TYPES (tq);
4140 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4141 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4142 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4143 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4144 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4145 GIMPLE_FIXED_MODE_TYPES (ha);
4146 GIMPLE_FIXED_MODE_TYPES (sa);
4147 GIMPLE_FIXED_MODE_TYPES (da);
4148 GIMPLE_FIXED_MODE_TYPES (ta);
4149 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4150 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4151 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4152 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4153
4154 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4155 the precision; they have precision set to match their range, but
4156 may use a wider mode to match an ABI. If we change modes, we may
4157 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4158 the precision as well, so as to yield correct results for
4159 bit-field types. C++ does not have these separate bit-field
4160 types, and producing a signed or unsigned variant of an
4161 ENUMERAL_TYPE may cause other problems as well. */
4162 if (!INTEGRAL_TYPE_P (type)
4163 || TYPE_UNSIGNED (type) == unsignedp)
4164 return type;
4165
4166#define TYPE_OK(node) \
4167 (TYPE_MODE (type) == TYPE_MODE (node) \
4168 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4169 if (TYPE_OK (signed_char_type_node))
4170 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4171 if (TYPE_OK (integer_type_node))
4172 return unsignedp ? unsigned_type_node : integer_type_node;
4173 if (TYPE_OK (short_integer_type_node))
4174 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4175 if (TYPE_OK (long_integer_type_node))
4176 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4177 if (TYPE_OK (long_long_integer_type_node))
4178 return (unsignedp
4179 ? long_long_unsigned_type_node
4180 : long_long_integer_type_node);
4181
4182#if HOST_BITS_PER_WIDE_INT >= 64
4183 if (TYPE_OK (intTI_type_node))
4184 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4185#endif
4186 if (TYPE_OK (intDI_type_node))
4187 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4188 if (TYPE_OK (intSI_type_node))
4189 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4190 if (TYPE_OK (intHI_type_node))
4191 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4192 if (TYPE_OK (intQI_type_node))
4193 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4194
4195#undef GIMPLE_FIXED_TYPES
4196#undef GIMPLE_FIXED_MODE_TYPES
4197#undef GIMPLE_FIXED_TYPES_SAT
4198#undef GIMPLE_FIXED_MODE_TYPES_SAT
4199#undef TYPE_OK
4200
4201 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4202}
4203
4204
4205/* Return an unsigned type the same as TYPE in other respects. */
4206
4207tree
4208gimple_unsigned_type (tree type)
4209{
4210 return gimple_signed_or_unsigned_type (true, type);
4211}
4212
4213
4214/* Return a signed type the same as TYPE in other respects. */
4215
4216tree
4217gimple_signed_type (tree type)
4218{
4219 return gimple_signed_or_unsigned_type (false, type);
4220}
4221
4222
4223/* Return the typed-based alias set for T, which may be an expression
4224 or a type. Return -1 if we don't do anything special. */
4225
4226alias_set_type
4227gimple_get_alias_set (tree t)
4228{
4229 tree u;
4230
4231 /* Permit type-punning when accessing a union, provided the access
4232 is directly through the union. For example, this code does not
4233 permit taking the address of a union member and then storing
4234 through it. Even the type-punning allowed here is a GCC
4235 extension, albeit a common and useful one; the C standard says
4236 that such accesses have implementation-defined behavior. */
4237 for (u = t;
4238 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4239 u = TREE_OPERAND (u, 0))
4240 if (TREE_CODE (u) == COMPONENT_REF
4241 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4242 return 0;
4243
4244 /* That's all the expressions we handle specially. */
4245 if (!TYPE_P (t))
4246 return -1;
4247
4248 /* For convenience, follow the C standard when dealing with
4249 character types. Any object may be accessed via an lvalue that
4250 has character type. */
4251 if (t == char_type_node
4252 || t == signed_char_type_node
4253 || t == unsigned_char_type_node)
4254 return 0;
4255
4256 /* Allow aliasing between signed and unsigned variants of the same
4257 type. We treat the signed variant as canonical. */
4258 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4259 {
4260 tree t1 = gimple_signed_type (t);
4261
4262 /* t1 == t can happen for boolean nodes which are always unsigned. */
4263 if (t1 != t)
4264 return get_alias_set (t1);
4265 }
4266 else if (POINTER_TYPE_P (t))
4267 {
14cf68d9 4268 /* From the common C and C++ langhook implementation:
d7f09764 4269
14cf68d9 4270 Unfortunately, there is no canonical form of a pointer type.
d7f09764
DN
4271 In particular, if we have `typedef int I', then `int *', and
4272 `I *' are different types. So, we have to pick a canonical
4273 representative. We do this below.
4274
4275 Technically, this approach is actually more conservative that
4276 it needs to be. In particular, `const int *' and `int *'
4277 should be in different alias sets, according to the C and C++
4278 standard, since their types are not the same, and so,
4279 technically, an `int **' and `const int **' cannot point at
4280 the same thing.
4281
4282 But, the standard is wrong. In particular, this code is
4283 legal C++:
4284
4285 int *ip;
4286 int **ipp = &ip;
4287 const int* const* cipp = ipp;
4288 And, it doesn't make sense for that to be legal unless you
4289 can dereference IPP and CIPP. So, we ignore cv-qualifiers on
4290 the pointed-to types. This issue has been reported to the
4291 C++ committee. */
14cf68d9
RG
4292
4293 /* In addition to the above canonicalization issue with LTO
4294 we should also canonicalize `T (*)[]' to `T *' avoiding
4295 alias issues with pointer-to element types and pointer-to
4296 array types.
4297
4298 Likewise we need to deal with the situation of incomplete
4299 pointed-to types and make `*(struct X **)&a' and
4300 `*(struct X {} **)&a' alias. Otherwise we will have to
4301 guarantee that all pointer-to incomplete type variants
4302 will be replaced by pointer-to complete type variants if
4303 they are available.
4304
4305 With LTO the convenient situation of using `void *' to
4306 access and store any pointer type will also become
f883d997 4307 more apparent (and `void *' is just another pointer-to
14cf68d9
RG
4308 incomplete type). Assigning alias-set zero to `void *'
4309 and all pointer-to incomplete types is a not appealing
4310 solution. Assigning an effective alias-set zero only
4311 affecting pointers might be - by recording proper subset
4312 relationships of all pointer alias-sets.
4313
4314 Pointer-to function types are another grey area which
4315 needs caution. Globbing them all into one alias-set
4316 or the above effective zero set would work. */
4317
4318 /* For now just assign the same alias-set to all pointers.
4319 That's simple and avoids all the above problems. */
4320 if (t != ptr_type_node)
4321 return get_alias_set (ptr_type_node);
d7f09764
DN
4322 }
4323
4324 return -1;
4325}
4326
4327
5006671f
RG
4328/* Data structure used to count the number of dereferences to PTR
4329 inside an expression. */
4330struct count_ptr_d
4331{
4332 tree ptr;
4333 unsigned num_stores;
4334 unsigned num_loads;
4335};
4336
4337/* Helper for count_uses_and_derefs. Called by walk_tree to look for
4338 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4339
4340static tree
4341count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4342{
4343 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4344 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4345
4346 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4347 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4348 the address of 'fld' as 'ptr + offsetof(fld)'. */
4349 if (TREE_CODE (*tp) == ADDR_EXPR)
4350 {
4351 *walk_subtrees = 0;
4352 return NULL_TREE;
4353 }
4354
4355 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
4356 {
4357 if (wi_p->is_lhs)
4358 count_p->num_stores++;
4359 else
4360 count_p->num_loads++;
4361 }
4362
4363 return NULL_TREE;
4364}
4365
4366/* Count the number of direct and indirect uses for pointer PTR in
4367 statement STMT. The number of direct uses is stored in
4368 *NUM_USES_P. Indirect references are counted separately depending
4369 on whether they are store or load operations. The counts are
4370 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4371
4372void
4373count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4374 unsigned *num_loads_p, unsigned *num_stores_p)
4375{
4376 ssa_op_iter i;
4377 tree use;
4378
4379 *num_uses_p = 0;
4380 *num_loads_p = 0;
4381 *num_stores_p = 0;
4382
4383 /* Find out the total number of uses of PTR in STMT. */
4384 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4385 if (use == ptr)
4386 (*num_uses_p)++;
4387
4388 /* Now count the number of indirect references to PTR. This is
4389 truly awful, but we don't have much choice. There are no parent
4390 pointers inside INDIRECT_REFs, so an expression like
4391 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4392 find all the indirect and direct uses of x_1 inside. The only
4393 shortcut we can take is the fact that GIMPLE only allows
4394 INDIRECT_REFs inside the expressions below. */
4395 if (is_gimple_assign (stmt)
4396 || gimple_code (stmt) == GIMPLE_RETURN
4397 || gimple_code (stmt) == GIMPLE_ASM
4398 || is_gimple_call (stmt))
4399 {
4400 struct walk_stmt_info wi;
4401 struct count_ptr_d count;
4402
4403 count.ptr = ptr;
4404 count.num_stores = 0;
4405 count.num_loads = 0;
4406
4407 memset (&wi, 0, sizeof (wi));
4408 wi.info = &count;
4409 walk_gimple_op (stmt, count_ptr_derefs, &wi);
4410
4411 *num_stores_p = count.num_stores;
4412 *num_loads_p = count.num_loads;
4413 }
4414
4415 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4416}
4417
346ef3fa
RG
4418/* From a tree operand OP return the base of a load or store operation
4419 or NULL_TREE if OP is not a load or a store. */
4420
4421static tree
4422get_base_loadstore (tree op)
4423{
4424 while (handled_component_p (op))
4425 op = TREE_OPERAND (op, 0);
4426 if (DECL_P (op)
4427 || INDIRECT_REF_P (op)
4428 || TREE_CODE (op) == TARGET_MEM_REF)
4429 return op;
4430 return NULL_TREE;
4431}
4432
4433/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4434 VISIT_ADDR if non-NULL on loads, store and address-taken operands
4435 passing the STMT, the base of the operand and DATA to it. The base
4436 will be either a decl, an indirect reference (including TARGET_MEM_REF)
4437 or the argument of an address expression.
4438 Returns the results of these callbacks or'ed. */
4439
4440bool
4441walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4442 bool (*visit_load)(gimple, tree, void *),
4443 bool (*visit_store)(gimple, tree, void *),
4444 bool (*visit_addr)(gimple, tree, void *))
4445{
4446 bool ret = false;
4447 unsigned i;
4448 if (gimple_assign_single_p (stmt))
4449 {
4450 tree lhs, rhs;
4451 if (visit_store)
4452 {
4453 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
4454 if (lhs)
4455 ret |= visit_store (stmt, lhs, data);
4456 }
4457 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
4458 while (handled_component_p (rhs))
4459 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
4460 if (visit_addr)
4461 {
4462 if (TREE_CODE (rhs) == ADDR_EXPR)
4463 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4464 else if (TREE_CODE (rhs) == TARGET_MEM_REF
fff1894c 4465 && TMR_BASE (rhs) != NULL_TREE
346ef3fa
RG
4466 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
4467 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
4468 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
4469 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
4470 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
4471 0), data);
fff1894c
AB
4472 lhs = gimple_assign_lhs (stmt);
4473 if (TREE_CODE (lhs) == TARGET_MEM_REF
4474 && TMR_BASE (lhs) != NULL_TREE
4475 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
4476 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
4477 }
4478 if (visit_load)
4479 {
4480 rhs = get_base_loadstore (rhs);
4481 if (rhs)
4482 ret |= visit_load (stmt, rhs, data);
4483 }
4484 }
4485 else if (visit_addr
4486 && (is_gimple_assign (stmt)
4d7a65ea 4487 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
4488 {
4489 for (i = 0; i < gimple_num_ops (stmt); ++i)
4490 if (gimple_op (stmt, i)
4491 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
4492 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
4493 }
4494 else if (is_gimple_call (stmt))
4495 {
4496 if (visit_store)
4497 {
4498 tree lhs = gimple_call_lhs (stmt);
4499 if (lhs)
4500 {
4501 lhs = get_base_loadstore (lhs);
4502 if (lhs)
4503 ret |= visit_store (stmt, lhs, data);
4504 }
4505 }
4506 if (visit_load || visit_addr)
4507 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4508 {
4509 tree rhs = gimple_call_arg (stmt, i);
4510 if (visit_addr
4511 && TREE_CODE (rhs) == ADDR_EXPR)
4512 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4513 else if (visit_load)
4514 {
4515 rhs = get_base_loadstore (rhs);
4516 if (rhs)
4517 ret |= visit_load (stmt, rhs, data);
4518 }
4519 }
4520 if (visit_addr
4521 && gimple_call_chain (stmt)
4522 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
4523 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
4524 data);
1d24fdd9
RG
4525 if (visit_addr
4526 && gimple_call_return_slot_opt_p (stmt)
4527 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 4528 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 4529 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
4530 }
4531 else if (gimple_code (stmt) == GIMPLE_ASM)
4532 {
4533 unsigned noutputs;
4534 const char *constraint;
4535 const char **oconstraints;
4536 bool allows_mem, allows_reg, is_inout;
4537 noutputs = gimple_asm_noutputs (stmt);
4538 oconstraints = XALLOCAVEC (const char *, noutputs);
4539 if (visit_store || visit_addr)
4540 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
4541 {
4542 tree link = gimple_asm_output_op (stmt, i);
4543 tree op = get_base_loadstore (TREE_VALUE (link));
4544 if (op && visit_store)
4545 ret |= visit_store (stmt, op, data);
4546 if (visit_addr)
4547 {
4548 constraint = TREE_STRING_POINTER
4549 (TREE_VALUE (TREE_PURPOSE (link)));
4550 oconstraints[i] = constraint;
4551 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4552 &allows_reg, &is_inout);
4553 if (op && !allows_reg && allows_mem)
4554 ret |= visit_addr (stmt, op, data);
4555 }
4556 }
4557 if (visit_load || visit_addr)
4558 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
4559 {
4560 tree link = gimple_asm_input_op (stmt, i);
4561 tree op = TREE_VALUE (link);
4562 if (visit_addr
4563 && TREE_CODE (op) == ADDR_EXPR)
4564 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4565 else if (visit_load || visit_addr)
4566 {
4567 op = get_base_loadstore (op);
4568 if (op)
4569 {
4570 if (visit_load)
4571 ret |= visit_load (stmt, op, data);
4572 if (visit_addr)
4573 {
4574 constraint = TREE_STRING_POINTER
4575 (TREE_VALUE (TREE_PURPOSE (link)));
4576 parse_input_constraint (&constraint, 0, 0, noutputs,
4577 0, oconstraints,
4578 &allows_mem, &allows_reg);
4579 if (!allows_reg && allows_mem)
4580 ret |= visit_addr (stmt, op, data);
4581 }
4582 }
4583 }
4584 }
4585 }
4586 else if (gimple_code (stmt) == GIMPLE_RETURN)
4587 {
4588 tree op = gimple_return_retval (stmt);
4589 if (op)
4590 {
4591 if (visit_addr
4592 && TREE_CODE (op) == ADDR_EXPR)
4593 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4594 else if (visit_load)
4595 {
4596 op = get_base_loadstore (op);
4597 if (op)
4598 ret |= visit_load (stmt, op, data);
4599 }
4600 }
4601 }
4602 else if (visit_addr
4603 && gimple_code (stmt) == GIMPLE_PHI)
4604 {
4605 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
4606 {
4607 tree op = PHI_ARG_DEF (stmt, i);
4608 if (TREE_CODE (op) == ADDR_EXPR)
4609 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4610 }
4611 }
4612
4613 return ret;
4614}
4615
4616/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
4617 should make a faster clone for this case. */
4618
4619bool
4620walk_stmt_load_store_ops (gimple stmt, void *data,
4621 bool (*visit_load)(gimple, tree, void *),
4622 bool (*visit_store)(gimple, tree, void *))
4623{
4624 return walk_stmt_load_store_addr_ops (stmt, data,
4625 visit_load, visit_store, NULL);
4626}
4627
ccacdf06
RG
4628/* Helper for gimple_ior_addresses_taken_1. */
4629
4630static bool
4631gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
4632 tree addr, void *data)
4633{
4634 bitmap addresses_taken = (bitmap)data;
2ea9dc64
RG
4635 addr = get_base_address (addr);
4636 if (addr
4637 && DECL_P (addr))
ccacdf06
RG
4638 {
4639 bitmap_set_bit (addresses_taken, DECL_UID (addr));
4640 return true;
4641 }
4642 return false;
4643}
4644
4645/* Set the bit for the uid of all decls that have their address taken
4646 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
4647 were any in this stmt. */
4648
4649bool
4650gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
4651{
4652 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
4653 gimple_ior_addresses_taken_1);
4654}
4655
4537ec0c
DN
4656
4657/* Return a printable name for symbol DECL. */
4658
4659const char *
4660gimple_decl_printable_name (tree decl, int verbosity)
4661{
98b2dfbb
RG
4662 if (!DECL_NAME (decl))
4663 return NULL;
4537ec0c
DN
4664
4665 if (DECL_ASSEMBLER_NAME_SET_P (decl))
4666 {
4667 const char *str, *mangled_str;
4668 int dmgl_opts = DMGL_NO_OPTS;
4669
4670 if (verbosity >= 2)
4671 {
4672 dmgl_opts = DMGL_VERBOSE
4537ec0c
DN
4673 | DMGL_ANSI
4674 | DMGL_GNU_V3
4675 | DMGL_RET_POSTFIX;
4676 if (TREE_CODE (decl) == FUNCTION_DECL)
4677 dmgl_opts |= DMGL_PARAMS;
4678 }
4679
4680 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4681 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
4682 return (str) ? str : mangled_str;
4683 }
4684
4685 return IDENTIFIER_POINTER (DECL_NAME (decl));
4686}
4687
726a989a 4688#include "gt-gimple.h"
This page took 1.130002 seconds and 5 git commands to generate.