]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple.c
tree-switch-conversion.c (build_constructors): Split a long line.
[gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
bbbbb16a 3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "ggc.h"
28#include "errors.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "gimple.h"
32#include "diagnostic.h"
33#include "tree-flow.h"
34#include "value-prof.h"
35#include "flags.h"
36
37#define DEFGSCODE(SYM, NAME, STRUCT) NAME,
38const char *const gimple_code_name[] = {
39#include "gimple.def"
40};
41#undef DEFGSCODE
42
43/* All the tuples have their operand vector at the very bottom
44 of the structure. Therefore, the offset required to find the
45 operands vector the size of the structure minus the size of the 1
46 element tree array at the end (see gimple_ops). */
47#define DEFGSCODE(SYM, NAME, STRUCT) (sizeof (STRUCT) - sizeof (tree)),
48const size_t gimple_ops_offset_[] = {
49#include "gimple.def"
50};
51#undef DEFGSCODE
52
53#ifdef GATHER_STATISTICS
54/* Gimple stats. */
55
56int gimple_alloc_counts[(int) gimple_alloc_kind_all];
57int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
58
59/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
60static const char * const gimple_alloc_kind_names[] = {
61 "assignments",
62 "phi nodes",
63 "conditionals",
64 "sequences",
65 "everything else"
66};
67
68#endif /* GATHER_STATISTICS */
69
70/* A cache of gimple_seq objects. Sequences are created and destroyed
71 fairly often during gimplification. */
72static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
73
74/* Private API manipulation functions shared only with some
75 other files. */
76extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
77extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
78
79/* Gimple tuple constructors.
80 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
81 be passed a NULL to start with an empty sequence. */
82
83/* Set the code for statement G to CODE. */
84
85static inline void
86gimple_set_code (gimple g, enum gimple_code code)
87{
88 g->gsbase.code = code;
89}
90
91
92/* Return the GSS_* identifier for the given GIMPLE statement CODE. */
93
94static enum gimple_statement_structure_enum
95gss_for_code (enum gimple_code code)
96{
97 switch (code)
98 {
99 case GIMPLE_ASSIGN:
100 case GIMPLE_CALL:
101 case GIMPLE_RETURN: return GSS_WITH_MEM_OPS;
102 case GIMPLE_COND:
103 case GIMPLE_GOTO:
104 case GIMPLE_LABEL:
105 case GIMPLE_CHANGE_DYNAMIC_TYPE:
106 case GIMPLE_SWITCH: return GSS_WITH_OPS;
107 case GIMPLE_ASM: return GSS_ASM;
108 case GIMPLE_BIND: return GSS_BIND;
109 case GIMPLE_CATCH: return GSS_CATCH;
110 case GIMPLE_EH_FILTER: return GSS_EH_FILTER;
111 case GIMPLE_NOP: return GSS_BASE;
112 case GIMPLE_PHI: return GSS_PHI;
113 case GIMPLE_RESX: return GSS_RESX;
114 case GIMPLE_TRY: return GSS_TRY;
115 case GIMPLE_WITH_CLEANUP_EXPR: return GSS_WCE;
116 case GIMPLE_OMP_CRITICAL: return GSS_OMP_CRITICAL;
117 case GIMPLE_OMP_FOR: return GSS_OMP_FOR;
118 case GIMPLE_OMP_MASTER:
119 case GIMPLE_OMP_ORDERED:
120 case GIMPLE_OMP_SECTION: return GSS_OMP;
121 case GIMPLE_OMP_RETURN:
122 case GIMPLE_OMP_SECTIONS_SWITCH: return GSS_BASE;
123 case GIMPLE_OMP_CONTINUE: return GSS_OMP_CONTINUE;
124 case GIMPLE_OMP_PARALLEL: return GSS_OMP_PARALLEL;
125 case GIMPLE_OMP_TASK: return GSS_OMP_TASK;
126 case GIMPLE_OMP_SECTIONS: return GSS_OMP_SECTIONS;
127 case GIMPLE_OMP_SINGLE: return GSS_OMP_SINGLE;
128 case GIMPLE_OMP_ATOMIC_LOAD: return GSS_OMP_ATOMIC_LOAD;
129 case GIMPLE_OMP_ATOMIC_STORE: return GSS_OMP_ATOMIC_STORE;
130 case GIMPLE_PREDICT: return GSS_BASE;
131 default: gcc_unreachable ();
132 }
133}
134
135
136/* Return the number of bytes needed to hold a GIMPLE statement with
137 code CODE. */
138
139static size_t
140gimple_size (enum gimple_code code)
141{
142 enum gimple_statement_structure_enum gss = gss_for_code (code);
143
144 if (gss == GSS_WITH_OPS)
145 return sizeof (struct gimple_statement_with_ops);
146 else if (gss == GSS_WITH_MEM_OPS)
147 return sizeof (struct gimple_statement_with_memory_ops);
148
149 switch (code)
150 {
151 case GIMPLE_ASM:
152 return sizeof (struct gimple_statement_asm);
153 case GIMPLE_NOP:
154 return sizeof (struct gimple_statement_base);
155 case GIMPLE_BIND:
156 return sizeof (struct gimple_statement_bind);
157 case GIMPLE_CATCH:
158 return sizeof (struct gimple_statement_catch);
159 case GIMPLE_EH_FILTER:
160 return sizeof (struct gimple_statement_eh_filter);
161 case GIMPLE_TRY:
162 return sizeof (struct gimple_statement_try);
163 case GIMPLE_RESX:
164 return sizeof (struct gimple_statement_resx);
165 case GIMPLE_OMP_CRITICAL:
166 return sizeof (struct gimple_statement_omp_critical);
167 case GIMPLE_OMP_FOR:
168 return sizeof (struct gimple_statement_omp_for);
169 case GIMPLE_OMP_PARALLEL:
170 return sizeof (struct gimple_statement_omp_parallel);
171 case GIMPLE_OMP_TASK:
172 return sizeof (struct gimple_statement_omp_task);
173 case GIMPLE_OMP_SECTION:
174 case GIMPLE_OMP_MASTER:
175 case GIMPLE_OMP_ORDERED:
176 return sizeof (struct gimple_statement_omp);
177 case GIMPLE_OMP_RETURN:
178 return sizeof (struct gimple_statement_base);
179 case GIMPLE_OMP_CONTINUE:
180 return sizeof (struct gimple_statement_omp_continue);
181 case GIMPLE_OMP_SECTIONS:
182 return sizeof (struct gimple_statement_omp_sections);
183 case GIMPLE_OMP_SECTIONS_SWITCH:
184 return sizeof (struct gimple_statement_base);
185 case GIMPLE_OMP_SINGLE:
186 return sizeof (struct gimple_statement_omp_single);
187 case GIMPLE_OMP_ATOMIC_LOAD:
188 return sizeof (struct gimple_statement_omp_atomic_load);
189 case GIMPLE_OMP_ATOMIC_STORE:
190 return sizeof (struct gimple_statement_omp_atomic_store);
191 case GIMPLE_WITH_CLEANUP_EXPR:
192 return sizeof (struct gimple_statement_wce);
193 case GIMPLE_CHANGE_DYNAMIC_TYPE:
194 return sizeof (struct gimple_statement_with_ops);
195 case GIMPLE_PREDICT:
196 return sizeof (struct gimple_statement_base);
197 default:
198 break;
199 }
200
201 gcc_unreachable ();
202}
203
204
205/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
206 operands. */
207
208#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
209static gimple
210gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
211{
212 size_t size;
213 gimple stmt;
214
215 size = gimple_size (code);
216 if (num_ops > 0)
217 size += sizeof (tree) * (num_ops - 1);
218
219#ifdef GATHER_STATISTICS
220 {
221 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
222 gimple_alloc_counts[(int) kind]++;
223 gimple_alloc_sizes[(int) kind] += size;
224 }
225#endif
226
227 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
228 gimple_set_code (stmt, code);
229 gimple_set_num_ops (stmt, num_ops);
230
231 /* Do not call gimple_set_modified here as it has other side
232 effects and this tuple is still not completely built. */
233 stmt->gsbase.modified = 1;
234
235 return stmt;
236}
237
238/* Set SUBCODE to be the code of the expression computed by statement G. */
239
240static inline void
241gimple_set_subcode (gimple g, unsigned subcode)
242{
243 /* We only have 16 bits for the RHS code. Assert that we are not
244 overflowing it. */
245 gcc_assert (subcode < (1 << 16));
246 g->gsbase.subcode = subcode;
247}
248
249
250
251/* Build a tuple with operands. CODE is the statement to build (which
252 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
253 for the new tuple. NUM_OPS is the number of operands to allocate. */
254
255#define gimple_build_with_ops(c, s, n) \
256 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
257
258static gimple
259gimple_build_with_ops_stat (enum gimple_code code, enum tree_code subcode,
260 unsigned num_ops MEM_STAT_DECL)
261{
262 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
263 gimple_set_subcode (s, subcode);
264
265 return s;
266}
267
268
269/* Build a GIMPLE_RETURN statement returning RETVAL. */
270
271gimple
272gimple_build_return (tree retval)
273{
bbbbb16a 274 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
275 if (retval)
276 gimple_return_set_retval (s, retval);
277 return s;
278}
279
280/* Helper for gimple_build_call, gimple_build_call_vec and
281 gimple_build_call_from_tree. Build the basic components of a
282 GIMPLE_CALL statement to function FN with NARGS arguments. */
283
284static inline gimple
285gimple_build_call_1 (tree fn, unsigned nargs)
286{
bbbbb16a 287 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
288 if (TREE_CODE (fn) == FUNCTION_DECL)
289 fn = build_fold_addr_expr (fn);
726a989a
RB
290 gimple_set_op (s, 1, fn);
291 return s;
292}
293
294
295/* Build a GIMPLE_CALL statement to function FN with the arguments
296 specified in vector ARGS. */
297
298gimple
299gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
300{
301 unsigned i;
302 unsigned nargs = VEC_length (tree, args);
303 gimple call = gimple_build_call_1 (fn, nargs);
304
305 for (i = 0; i < nargs; i++)
306 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
307
308 return call;
309}
310
311
312/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
313 arguments. The ... are the arguments. */
314
315gimple
316gimple_build_call (tree fn, unsigned nargs, ...)
317{
318 va_list ap;
319 gimple call;
320 unsigned i;
321
322 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
323
324 call = gimple_build_call_1 (fn, nargs);
325
326 va_start (ap, nargs);
327 for (i = 0; i < nargs; i++)
328 gimple_call_set_arg (call, i, va_arg (ap, tree));
329 va_end (ap);
330
331 return call;
332}
333
334
335/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
336 assumed to be in GIMPLE form already. Minimal checking is done of
337 this fact. */
338
339gimple
340gimple_build_call_from_tree (tree t)
341{
342 unsigned i, nargs;
343 gimple call;
344 tree fndecl = get_callee_fndecl (t);
345
346 gcc_assert (TREE_CODE (t) == CALL_EXPR);
347
348 nargs = call_expr_nargs (t);
349 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
350
351 for (i = 0; i < nargs; i++)
352 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
353
354 gimple_set_block (call, TREE_BLOCK (t));
355
356 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
357 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
358 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
359 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
360 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
361 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
362 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
363
364 return call;
365}
366
367
368/* Extract the operands and code for expression EXPR into *SUBCODE_P,
369 *OP1_P and *OP2_P respectively. */
370
371void
372extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
373 tree *op2_p)
374{
82d6e6fc 375 enum gimple_rhs_class grhs_class;
726a989a
RB
376
377 *subcode_p = TREE_CODE (expr);
82d6e6fc 378 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 379
82d6e6fc 380 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
381 {
382 *op1_p = TREE_OPERAND (expr, 0);
383 *op2_p = TREE_OPERAND (expr, 1);
384 }
82d6e6fc 385 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
386 {
387 *op1_p = TREE_OPERAND (expr, 0);
388 *op2_p = NULL_TREE;
389 }
82d6e6fc 390 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
391 {
392 *op1_p = expr;
393 *op2_p = NULL_TREE;
394 }
395 else
396 gcc_unreachable ();
397}
398
399
400/* Build a GIMPLE_ASSIGN statement.
401
402 LHS of the assignment.
403 RHS of the assignment which can be unary or binary. */
404
405gimple
406gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
407{
408 enum tree_code subcode;
409 tree op1, op2;
410
411 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
412 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
413 PASS_MEM_STAT);
414}
415
416
417/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
418 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
419 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
420
421gimple
422gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
423 tree op2 MEM_STAT_DECL)
424{
425 unsigned num_ops;
426 gimple p;
427
428 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
429 code). */
430 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
431
432 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, subcode, num_ops
433 PASS_MEM_STAT);
434 gimple_assign_set_lhs (p, lhs);
435 gimple_assign_set_rhs1 (p, op1);
436 if (op2)
437 {
438 gcc_assert (num_ops > 2);
439 gimple_assign_set_rhs2 (p, op2);
440 }
441
442 return p;
443}
444
445
446/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
447
448 DST/SRC are the destination and source respectively. You can pass
449 ungimplified trees in DST or SRC, in which case they will be
450 converted to a gimple operand if necessary.
451
452 This function returns the newly created GIMPLE_ASSIGN tuple. */
453
454inline gimple
455gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
456{
457 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
458 gimplify_and_add (t, seq_p);
459 ggc_free (t);
460 return gimple_seq_last_stmt (*seq_p);
461}
462
463
464/* Build a GIMPLE_COND statement.
465
466 PRED is the condition used to compare LHS and the RHS.
467 T_LABEL is the label to jump to if the condition is true.
468 F_LABEL is the label to jump to otherwise. */
469
470gimple
471gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
472 tree t_label, tree f_label)
473{
474 gimple p;
475
476 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
477 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
478 gimple_cond_set_lhs (p, lhs);
479 gimple_cond_set_rhs (p, rhs);
480 gimple_cond_set_true_label (p, t_label);
481 gimple_cond_set_false_label (p, f_label);
482 return p;
483}
484
485
486/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
487
488void
489gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
490 tree *lhs_p, tree *rhs_p)
491{
492 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
493 || TREE_CODE (cond) == TRUTH_NOT_EXPR
494 || is_gimple_min_invariant (cond)
495 || SSA_VAR_P (cond));
496
497 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
498
499 /* Canonicalize conditionals of the form 'if (!VAL)'. */
500 if (*code_p == TRUTH_NOT_EXPR)
501 {
502 *code_p = EQ_EXPR;
503 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
504 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
505 }
506 /* Canonicalize conditionals of the form 'if (VAL)' */
507 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
508 {
509 *code_p = NE_EXPR;
510 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
511 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
512 }
513}
514
515
516/* Build a GIMPLE_COND statement from the conditional expression tree
517 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
518
519gimple
520gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
521{
522 enum tree_code code;
523 tree lhs, rhs;
524
525 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
526 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
527}
528
529/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
530 boolean expression tree COND. */
531
532void
533gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
534{
535 enum tree_code code;
536 tree lhs, rhs;
537
538 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
539 gimple_cond_set_condition (stmt, code, lhs, rhs);
540}
541
542/* Build a GIMPLE_LABEL statement for LABEL. */
543
544gimple
545gimple_build_label (tree label)
546{
bbbbb16a 547 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
548 gimple_label_set_label (p, label);
549 return p;
550}
551
552/* Build a GIMPLE_GOTO statement to label DEST. */
553
554gimple
555gimple_build_goto (tree dest)
556{
bbbbb16a 557 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
558 gimple_goto_set_dest (p, dest);
559 return p;
560}
561
562
563/* Build a GIMPLE_NOP statement. */
564
565gimple
566gimple_build_nop (void)
567{
568 return gimple_alloc (GIMPLE_NOP, 0);
569}
570
571
572/* Build a GIMPLE_BIND statement.
573 VARS are the variables in BODY.
574 BLOCK is the containing block. */
575
576gimple
577gimple_build_bind (tree vars, gimple_seq body, tree block)
578{
579 gimple p = gimple_alloc (GIMPLE_BIND, 0);
580 gimple_bind_set_vars (p, vars);
581 if (body)
582 gimple_bind_set_body (p, body);
583 if (block)
584 gimple_bind_set_block (p, block);
585 return p;
586}
587
588/* Helper function to set the simple fields of a asm stmt.
589
590 STRING is a pointer to a string that is the asm blocks assembly code.
591 NINPUT is the number of register inputs.
592 NOUTPUT is the number of register outputs.
593 NCLOBBERS is the number of clobbered registers.
594 */
595
596static inline gimple
597gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
598 unsigned nclobbers)
599{
600 gimple p;
601 int size = strlen (string);
602
bbbbb16a
ILT
603 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
604 ninputs + noutputs + nclobbers);
726a989a
RB
605
606 p->gimple_asm.ni = ninputs;
607 p->gimple_asm.no = noutputs;
608 p->gimple_asm.nc = nclobbers;
609 p->gimple_asm.string = ggc_alloc_string (string, size);
610
611#ifdef GATHER_STATISTICS
612 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
613#endif
614
615 return p;
616}
617
618/* Build a GIMPLE_ASM statement.
619
620 STRING is the assembly code.
621 NINPUT is the number of register inputs.
622 NOUTPUT is the number of register outputs.
623 NCLOBBERS is the number of clobbered registers.
624 INPUTS is a vector of the input register parameters.
625 OUTPUTS is a vector of the output register parameters.
626 CLOBBERS is a vector of the clobbered register parameters. */
627
628gimple
629gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
630 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers)
631{
632 gimple p;
633 unsigned i;
634
635 p = gimple_build_asm_1 (string,
636 VEC_length (tree, inputs),
637 VEC_length (tree, outputs),
638 VEC_length (tree, clobbers));
639
640 for (i = 0; i < VEC_length (tree, inputs); i++)
641 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
642
643 for (i = 0; i < VEC_length (tree, outputs); i++)
644 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
645
646 for (i = 0; i < VEC_length (tree, clobbers); i++)
647 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
648
649 return p;
650}
651
652/* Build a GIMPLE_ASM statement.
653
654 STRING is the assembly code.
655 NINPUT is the number of register inputs.
656 NOUTPUT is the number of register outputs.
657 NCLOBBERS is the number of clobbered registers.
658 ... are trees for each input, output and clobbered register. */
659
660gimple
661gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs,
662 unsigned nclobbers, ...)
663{
664 gimple p;
665 unsigned i;
666 va_list ap;
667
668 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
669
670 va_start (ap, nclobbers);
671
672 for (i = 0; i < ninputs; i++)
673 gimple_asm_set_input_op (p, i, va_arg (ap, tree));
674
675 for (i = 0; i < noutputs; i++)
676 gimple_asm_set_output_op (p, i, va_arg (ap, tree));
677
678 for (i = 0; i < nclobbers; i++)
679 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
680
681 va_end (ap);
682
683 return p;
684}
685
686/* Build a GIMPLE_CATCH statement.
687
688 TYPES are the catch types.
689 HANDLER is the exception handler. */
690
691gimple
692gimple_build_catch (tree types, gimple_seq handler)
693{
694 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
695 gimple_catch_set_types (p, types);
696 if (handler)
697 gimple_catch_set_handler (p, handler);
698
699 return p;
700}
701
702/* Build a GIMPLE_EH_FILTER statement.
703
704 TYPES are the filter's types.
705 FAILURE is the filter's failure action. */
706
707gimple
708gimple_build_eh_filter (tree types, gimple_seq failure)
709{
710 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
711 gimple_eh_filter_set_types (p, types);
712 if (failure)
713 gimple_eh_filter_set_failure (p, failure);
714
715 return p;
716}
717
718/* Build a GIMPLE_TRY statement.
719
720 EVAL is the expression to evaluate.
721 CLEANUP is the cleanup expression.
722 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
723 whether this is a try/catch or a try/finally respectively. */
724
725gimple
726gimple_build_try (gimple_seq eval, gimple_seq cleanup,
727 enum gimple_try_flags kind)
728{
729 gimple p;
730
731 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
732 p = gimple_alloc (GIMPLE_TRY, 0);
733 gimple_set_subcode (p, kind);
734 if (eval)
735 gimple_try_set_eval (p, eval);
736 if (cleanup)
737 gimple_try_set_cleanup (p, cleanup);
738
739 return p;
740}
741
742/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
743
744 CLEANUP is the cleanup expression. */
745
746gimple
747gimple_build_wce (gimple_seq cleanup)
748{
749 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
750 if (cleanup)
751 gimple_wce_set_cleanup (p, cleanup);
752
753 return p;
754}
755
756
757/* Build a GIMPLE_RESX statement.
758
759 REGION is the region number from which this resx causes control flow to
760 leave. */
761
762gimple
763gimple_build_resx (int region)
764{
765 gimple p = gimple_alloc (GIMPLE_RESX, 0);
766 gimple_resx_set_region (p, region);
767 return p;
768}
769
770
771/* The helper for constructing a gimple switch statement.
772 INDEX is the switch's index.
773 NLABELS is the number of labels in the switch excluding the default.
774 DEFAULT_LABEL is the default label for the switch statement. */
775
776static inline gimple
777gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label)
778{
779 /* nlabels + 1 default label + 1 index. */
bbbbb16a
ILT
780 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
781 nlabels + 1 + 1);
726a989a
RB
782 gimple_switch_set_index (p, index);
783 gimple_switch_set_default_label (p, default_label);
784 return p;
785}
786
787
788/* Build a GIMPLE_SWITCH statement.
789
790 INDEX is the switch's index.
791 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
792 ... are the labels excluding the default. */
793
794gimple
795gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
796{
797 va_list al;
798 unsigned i;
799 gimple p;
800
801 p = gimple_build_switch_1 (nlabels, index, default_label);
802
803 /* Store the rest of the labels. */
804 va_start (al, default_label);
805 for (i = 1; i <= nlabels; i++)
806 gimple_switch_set_label (p, i, va_arg (al, tree));
807 va_end (al);
808
809 return p;
810}
811
812
813/* Build a GIMPLE_SWITCH statement.
814
815 INDEX is the switch's index.
816 DEFAULT_LABEL is the default label
817 ARGS is a vector of labels excluding the default. */
818
819gimple
820gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
821{
822 unsigned i;
823 unsigned nlabels = VEC_length (tree, args);
824 gimple p = gimple_build_switch_1 (nlabels, index, default_label);
825
826 /* Put labels in labels[1 - (nlabels + 1)].
827 Default label is in labels[0]. */
828 for (i = 1; i <= nlabels; i++)
829 gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1));
830
831 return p;
832}
833
834
835/* Build a GIMPLE_OMP_CRITICAL statement.
836
837 BODY is the sequence of statements for which only one thread can execute.
838 NAME is optional identifier for this critical block. */
839
840gimple
841gimple_build_omp_critical (gimple_seq body, tree name)
842{
843 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
844 gimple_omp_critical_set_name (p, name);
845 if (body)
846 gimple_omp_set_body (p, body);
847
848 return p;
849}
850
851/* Build a GIMPLE_OMP_FOR statement.
852
853 BODY is sequence of statements inside the for loop.
854 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
855 lastprivate, reductions, ordered, schedule, and nowait.
856 COLLAPSE is the collapse count.
857 PRE_BODY is the sequence of statements that are loop invariant. */
858
859gimple
860gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
861 gimple_seq pre_body)
862{
863 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
864 if (body)
865 gimple_omp_set_body (p, body);
866 gimple_omp_for_set_clauses (p, clauses);
867 p->gimple_omp_for.collapse = collapse;
868 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
869 if (pre_body)
870 gimple_omp_for_set_pre_body (p, pre_body);
871
872 return p;
873}
874
875
876/* Build a GIMPLE_OMP_PARALLEL statement.
877
878 BODY is sequence of statements which are executed in parallel.
879 CLAUSES, are the OMP parallel construct's clauses.
880 CHILD_FN is the function created for the parallel threads to execute.
881 DATA_ARG are the shared data argument(s). */
882
883gimple
884gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
885 tree data_arg)
886{
887 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
888 if (body)
889 gimple_omp_set_body (p, body);
890 gimple_omp_parallel_set_clauses (p, clauses);
891 gimple_omp_parallel_set_child_fn (p, child_fn);
892 gimple_omp_parallel_set_data_arg (p, data_arg);
893
894 return p;
895}
896
897
898/* Build a GIMPLE_OMP_TASK statement.
899
900 BODY is sequence of statements which are executed by the explicit task.
901 CLAUSES, are the OMP parallel construct's clauses.
902 CHILD_FN is the function created for the parallel threads to execute.
903 DATA_ARG are the shared data argument(s).
904 COPY_FN is the optional function for firstprivate initialization.
905 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
906
907gimple
908gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
909 tree data_arg, tree copy_fn, tree arg_size,
910 tree arg_align)
911{
912 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
913 if (body)
914 gimple_omp_set_body (p, body);
915 gimple_omp_task_set_clauses (p, clauses);
916 gimple_omp_task_set_child_fn (p, child_fn);
917 gimple_omp_task_set_data_arg (p, data_arg);
918 gimple_omp_task_set_copy_fn (p, copy_fn);
919 gimple_omp_task_set_arg_size (p, arg_size);
920 gimple_omp_task_set_arg_align (p, arg_align);
921
922 return p;
923}
924
925
926/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
927
928 BODY is the sequence of statements in the section. */
929
930gimple
931gimple_build_omp_section (gimple_seq body)
932{
933 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
934 if (body)
935 gimple_omp_set_body (p, body);
936
937 return p;
938}
939
940
941/* Build a GIMPLE_OMP_MASTER statement.
942
943 BODY is the sequence of statements to be executed by just the master. */
944
945gimple
946gimple_build_omp_master (gimple_seq body)
947{
948 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
949 if (body)
950 gimple_omp_set_body (p, body);
951
952 return p;
953}
954
955
956/* Build a GIMPLE_OMP_CONTINUE statement.
957
958 CONTROL_DEF is the definition of the control variable.
959 CONTROL_USE is the use of the control variable. */
960
961gimple
962gimple_build_omp_continue (tree control_def, tree control_use)
963{
964 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
965 gimple_omp_continue_set_control_def (p, control_def);
966 gimple_omp_continue_set_control_use (p, control_use);
967 return p;
968}
969
970/* Build a GIMPLE_OMP_ORDERED statement.
971
972 BODY is the sequence of statements inside a loop that will executed in
973 sequence. */
974
975gimple
976gimple_build_omp_ordered (gimple_seq body)
977{
978 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
979 if (body)
980 gimple_omp_set_body (p, body);
981
982 return p;
983}
984
985
986/* Build a GIMPLE_OMP_RETURN statement.
987 WAIT_P is true if this is a non-waiting return. */
988
989gimple
990gimple_build_omp_return (bool wait_p)
991{
992 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
993 if (wait_p)
994 gimple_omp_return_set_nowait (p);
995
996 return p;
997}
998
999
1000/* Build a GIMPLE_OMP_SECTIONS statement.
1001
1002 BODY is a sequence of section statements.
1003 CLAUSES are any of the OMP sections contsruct's clauses: private,
1004 firstprivate, lastprivate, reduction, and nowait. */
1005
1006gimple
1007gimple_build_omp_sections (gimple_seq body, tree clauses)
1008{
1009 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1010 if (body)
1011 gimple_omp_set_body (p, body);
1012 gimple_omp_sections_set_clauses (p, clauses);
1013
1014 return p;
1015}
1016
1017
1018/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1019
1020gimple
1021gimple_build_omp_sections_switch (void)
1022{
1023 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1024}
1025
1026
1027/* Build a GIMPLE_OMP_SINGLE statement.
1028
1029 BODY is the sequence of statements that will be executed once.
1030 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1031 copyprivate, nowait. */
1032
1033gimple
1034gimple_build_omp_single (gimple_seq body, tree clauses)
1035{
1036 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1037 if (body)
1038 gimple_omp_set_body (p, body);
1039 gimple_omp_single_set_clauses (p, clauses);
1040
1041 return p;
1042}
1043
1044
1045/* Build a GIMPLE_CHANGE_DYNAMIC_TYPE statement. TYPE is the new type
1046 for the location PTR. */
1047
1048gimple
1049gimple_build_cdt (tree type, tree ptr)
1050{
bbbbb16a 1051 gimple p = gimple_build_with_ops (GIMPLE_CHANGE_DYNAMIC_TYPE, ERROR_MARK, 2);
726a989a
RB
1052 gimple_cdt_set_new_type (p, type);
1053 gimple_cdt_set_location (p, ptr);
1054
1055 return p;
1056}
1057
1058
1059/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1060
1061gimple
1062gimple_build_omp_atomic_load (tree lhs, tree rhs)
1063{
1064 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1065 gimple_omp_atomic_load_set_lhs (p, lhs);
1066 gimple_omp_atomic_load_set_rhs (p, rhs);
1067 return p;
1068}
1069
1070/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1071
1072 VAL is the value we are storing. */
1073
1074gimple
1075gimple_build_omp_atomic_store (tree val)
1076{
1077 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1078 gimple_omp_atomic_store_set_val (p, val);
1079 return p;
1080}
1081
1082/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1083 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1084
1085gimple
1086gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1087{
1088 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1089 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1090 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1091 gimple_predict_set_predictor (p, predictor);
1092 gimple_predict_set_outcome (p, outcome);
1093 return p;
1094}
1095
1096/* Return which gimple structure is used by T. The enums here are defined
1097 in gsstruct.def. */
1098
1099enum gimple_statement_structure_enum
1100gimple_statement_structure (gimple gs)
1101{
1102 return gss_for_code (gimple_code (gs));
1103}
1104
cea094ed 1105#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1106/* Complain of a gimple type mismatch and die. */
1107
1108void
1109gimple_check_failed (const_gimple gs, const char *file, int line,
1110 const char *function, enum gimple_code code,
1111 enum tree_code subcode)
1112{
1113 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1114 gimple_code_name[code],
1115 tree_code_name[subcode],
1116 gimple_code_name[gimple_code (gs)],
1117 gs->gsbase.subcode > 0
1118 ? tree_code_name[gs->gsbase.subcode]
1119 : "",
1120 function, trim_filename (file), line);
1121}
726a989a
RB
1122#endif /* ENABLE_GIMPLE_CHECKING */
1123
1124
1125/* Allocate a new GIMPLE sequence in GC memory and return it. If
1126 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1127 instead. */
1128
1129gimple_seq
1130gimple_seq_alloc (void)
1131{
1132 gimple_seq seq = gimple_seq_cache;
1133 if (seq)
1134 {
1135 gimple_seq_cache = gimple_seq_cache->next_free;
1136 gcc_assert (gimple_seq_cache != seq);
1137 memset (seq, 0, sizeof (*seq));
1138 }
1139 else
1140 {
1141 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1142#ifdef GATHER_STATISTICS
1143 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1144 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1145#endif
1146 }
1147
1148 return seq;
1149}
1150
1151/* Return SEQ to the free pool of GIMPLE sequences. */
1152
1153void
1154gimple_seq_free (gimple_seq seq)
1155{
1156 if (seq == NULL)
1157 return;
1158
1159 gcc_assert (gimple_seq_first (seq) == NULL);
1160 gcc_assert (gimple_seq_last (seq) == NULL);
1161
1162 /* If this triggers, it's a sign that the same list is being freed
1163 twice. */
1164 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1165
1166 /* Add SEQ to the pool of free sequences. */
1167 seq->next_free = gimple_seq_cache;
1168 gimple_seq_cache = seq;
1169}
1170
1171
1172/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1173 *SEQ_P is NULL, a new sequence is allocated. */
1174
1175void
1176gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1177{
1178 gimple_stmt_iterator si;
1179
1180 if (gs == NULL)
1181 return;
1182
1183 if (*seq_p == NULL)
1184 *seq_p = gimple_seq_alloc ();
1185
1186 si = gsi_last (*seq_p);
1187 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1188}
1189
1190
1191/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1192 NULL, a new sequence is allocated. */
1193
1194void
1195gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1196{
1197 gimple_stmt_iterator si;
1198
1199 if (src == NULL)
1200 return;
1201
1202 if (*dst_p == NULL)
1203 *dst_p = gimple_seq_alloc ();
1204
1205 si = gsi_last (*dst_p);
1206 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1207}
1208
1209
1210/* Helper function of empty_body_p. Return true if STMT is an empty
1211 statement. */
1212
1213static bool
1214empty_stmt_p (gimple stmt)
1215{
1216 if (gimple_code (stmt) == GIMPLE_NOP)
1217 return true;
1218 if (gimple_code (stmt) == GIMPLE_BIND)
1219 return empty_body_p (gimple_bind_body (stmt));
1220 return false;
1221}
1222
1223
1224/* Return true if BODY contains nothing but empty statements. */
1225
1226bool
1227empty_body_p (gimple_seq body)
1228{
1229 gimple_stmt_iterator i;
1230
1231
1232 if (gimple_seq_empty_p (body))
1233 return true;
1234 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1235 if (!empty_stmt_p (gsi_stmt (i)))
1236 return false;
1237
1238 return true;
1239}
1240
1241
1242/* Perform a deep copy of sequence SRC and return the result. */
1243
1244gimple_seq
1245gimple_seq_copy (gimple_seq src)
1246{
1247 gimple_stmt_iterator gsi;
82d6e6fc 1248 gimple_seq new_seq = gimple_seq_alloc ();
726a989a
RB
1249 gimple stmt;
1250
1251 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1252 {
1253 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1254 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1255 }
1256
82d6e6fc 1257 return new_seq;
726a989a
RB
1258}
1259
1260
1261/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1262 on each one. WI is as in walk_gimple_stmt.
1263
1264 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1265 value is stored in WI->CALLBACK_RESULT and the statement that
1266 produced the value is returned.
1267
1268 Otherwise, all the statements are walked and NULL returned. */
1269
1270gimple
1271walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1272 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1273{
1274 gimple_stmt_iterator gsi;
1275
1276 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1277 {
1278 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1279 if (ret)
1280 {
1281 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1282 to hold it. */
1283 gcc_assert (wi);
1284 wi->callback_result = ret;
1285 return gsi_stmt (gsi);
1286 }
1287 }
1288
1289 if (wi)
1290 wi->callback_result = NULL_TREE;
1291
1292 return NULL;
1293}
1294
1295
1296/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1297
1298static tree
1299walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1300 struct walk_stmt_info *wi)
1301{
1302 tree ret;
1303 unsigned noutputs;
1304 const char **oconstraints;
1305 unsigned i;
1306 const char *constraint;
1307 bool allows_mem, allows_reg, is_inout;
1308
1309 noutputs = gimple_asm_noutputs (stmt);
1310 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1311
1312 if (wi)
1313 wi->is_lhs = true;
1314
1315 for (i = 0; i < noutputs; i++)
1316 {
1317 tree op = gimple_asm_output_op (stmt, i);
1318 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1319 oconstraints[i] = constraint;
1320 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1321 &is_inout);
1322 if (wi)
1323 wi->val_only = (allows_reg || !allows_mem);
1324 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1325 if (ret)
1326 return ret;
1327 }
1328
1329 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1330 {
1331 tree op = gimple_asm_input_op (stmt, i);
1332 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1333 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1334 oconstraints, &allows_mem, &allows_reg);
1335 if (wi)
1336 wi->val_only = (allows_reg || !allows_mem);
1337
1338 /* Although input "m" is not really a LHS, we need a lvalue. */
1339 if (wi)
1340 wi->is_lhs = !wi->val_only;
1341 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1342 if (ret)
1343 return ret;
1344 }
1345
1346 if (wi)
1347 {
1348 wi->is_lhs = false;
1349 wi->val_only = true;
1350 }
1351
1352 return NULL_TREE;
1353}
1354
1355
1356/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1357 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1358
1359 CALLBACK_OP is called on each operand of STMT via walk_tree.
1360 Additional parameters to walk_tree must be stored in WI. For each operand
1361 OP, walk_tree is called as:
1362
1363 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1364
1365 If CALLBACK_OP returns non-NULL for an operand, the remaining
1366 operands are not scanned.
1367
1368 The return value is that returned by the last call to walk_tree, or
1369 NULL_TREE if no CALLBACK_OP is specified. */
1370
1371inline tree
1372walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1373 struct walk_stmt_info *wi)
1374{
1375 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1376 unsigned i;
1377 tree ret = NULL_TREE;
1378
1379 switch (gimple_code (stmt))
1380 {
1381 case GIMPLE_ASSIGN:
1382 /* Walk the RHS operands. A formal temporary LHS may use a
1383 COMPONENT_REF RHS. */
1384 if (wi)
ba4d8f9d
RG
1385 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1386 || !gimple_assign_single_p (stmt);
726a989a
RB
1387
1388 for (i = 1; i < gimple_num_ops (stmt); i++)
1389 {
1390 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1391 pset);
1392 if (ret)
1393 return ret;
1394 }
1395
1396 /* Walk the LHS. If the RHS is appropriate for a memory, we
1397 may use a COMPONENT_REF on the LHS. */
1398 if (wi)
1399 {
1400 /* If the RHS has more than 1 operand, it is not appropriate
1401 for the memory. */
1402 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1403 || !gimple_assign_single_p (stmt);
1404 wi->is_lhs = true;
1405 }
1406
1407 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1408 if (ret)
1409 return ret;
1410
1411 if (wi)
1412 {
1413 wi->val_only = true;
1414 wi->is_lhs = false;
1415 }
1416 break;
1417
1418 case GIMPLE_CALL:
1419 if (wi)
1420 wi->is_lhs = false;
1421
1422 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1423 if (ret)
1424 return ret;
1425
1426 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1427 if (ret)
1428 return ret;
1429
1430 for (i = 0; i < gimple_call_num_args (stmt); i++)
1431 {
1432 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1433 pset);
1434 if (ret)
1435 return ret;
1436 }
1437
1438 if (wi)
1439 wi->is_lhs = true;
1440
1441 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1442 if (ret)
1443 return ret;
1444
1445 if (wi)
1446 wi->is_lhs = false;
1447 break;
1448
1449 case GIMPLE_CATCH:
1450 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1451 pset);
1452 if (ret)
1453 return ret;
1454 break;
1455
1456 case GIMPLE_EH_FILTER:
1457 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1458 pset);
1459 if (ret)
1460 return ret;
1461 break;
1462
1463 case GIMPLE_CHANGE_DYNAMIC_TYPE:
1464 ret = walk_tree (gimple_cdt_location_ptr (stmt), callback_op, wi, pset);
1465 if (ret)
1466 return ret;
1467
1468 ret = walk_tree (gimple_cdt_new_type_ptr (stmt), callback_op, wi, pset);
1469 if (ret)
1470 return ret;
1471 break;
1472
1473 case GIMPLE_ASM:
1474 ret = walk_gimple_asm (stmt, callback_op, wi);
1475 if (ret)
1476 return ret;
1477 break;
1478
1479 case GIMPLE_OMP_CONTINUE:
1480 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1481 callback_op, wi, pset);
1482 if (ret)
1483 return ret;
1484
1485 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1486 callback_op, wi, pset);
1487 if (ret)
1488 return ret;
1489 break;
1490
1491 case GIMPLE_OMP_CRITICAL:
1492 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1493 pset);
1494 if (ret)
1495 return ret;
1496 break;
1497
1498 case GIMPLE_OMP_FOR:
1499 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1500 pset);
1501 if (ret)
1502 return ret;
1503 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1504 {
1505 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1506 wi, pset);
1507 if (ret)
1508 return ret;
1509 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1510 wi, pset);
1511 if (ret)
1512 return ret;
1513 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1514 wi, pset);
1515 if (ret)
1516 return ret;
1517 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1518 wi, pset);
1519 }
1520 if (ret)
1521 return ret;
1522 break;
1523
1524 case GIMPLE_OMP_PARALLEL:
1525 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1526 wi, pset);
1527 if (ret)
1528 return ret;
1529 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1530 wi, pset);
1531 if (ret)
1532 return ret;
1533 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1534 wi, pset);
1535 if (ret)
1536 return ret;
1537 break;
1538
1539 case GIMPLE_OMP_TASK:
1540 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1541 wi, pset);
1542 if (ret)
1543 return ret;
1544 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1545 wi, pset);
1546 if (ret)
1547 return ret;
1548 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1549 wi, pset);
1550 if (ret)
1551 return ret;
1552 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1553 wi, pset);
1554 if (ret)
1555 return ret;
1556 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1557 wi, pset);
1558 if (ret)
1559 return ret;
1560 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1561 wi, pset);
1562 if (ret)
1563 return ret;
1564 break;
1565
1566 case GIMPLE_OMP_SECTIONS:
1567 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1568 wi, pset);
1569 if (ret)
1570 return ret;
1571
1572 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1573 wi, pset);
1574 if (ret)
1575 return ret;
1576
1577 break;
1578
1579 case GIMPLE_OMP_SINGLE:
1580 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1581 pset);
1582 if (ret)
1583 return ret;
1584 break;
1585
1586 case GIMPLE_OMP_ATOMIC_LOAD:
1587 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1588 pset);
1589 if (ret)
1590 return ret;
1591
1592 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1593 pset);
1594 if (ret)
1595 return ret;
1596 break;
1597
1598 case GIMPLE_OMP_ATOMIC_STORE:
1599 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1600 wi, pset);
1601 if (ret)
1602 return ret;
1603 break;
1604
1605 /* Tuples that do not have operands. */
1606 case GIMPLE_NOP:
1607 case GIMPLE_RESX:
1608 case GIMPLE_OMP_RETURN:
1609 case GIMPLE_PREDICT:
1610 break;
1611
1612 default:
1613 {
1614 enum gimple_statement_structure_enum gss;
1615 gss = gimple_statement_structure (stmt);
1616 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1617 for (i = 0; i < gimple_num_ops (stmt); i++)
1618 {
1619 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1620 if (ret)
1621 return ret;
1622 }
1623 }
1624 break;
1625 }
1626
1627 return NULL_TREE;
1628}
1629
1630
1631/* Walk the current statement in GSI (optionally using traversal state
1632 stored in WI). If WI is NULL, no state is kept during traversal.
1633 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1634 that it has handled all the operands of the statement, its return
1635 value is returned. Otherwise, the return value from CALLBACK_STMT
1636 is discarded and its operands are scanned.
1637
1638 If CALLBACK_STMT is NULL or it didn't handle the operands,
1639 CALLBACK_OP is called on each operand of the statement via
1640 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1641 operand, the remaining operands are not scanned. In this case, the
1642 return value from CALLBACK_OP is returned.
1643
1644 In any other case, NULL_TREE is returned. */
1645
1646tree
1647walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1648 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1649{
1650 gimple ret;
1651 tree tree_ret;
1652 gimple stmt = gsi_stmt (*gsi);
1653
1654 if (wi)
1655 wi->gsi = *gsi;
1656
1657 if (wi && wi->want_locations && gimple_has_location (stmt))
1658 input_location = gimple_location (stmt);
1659
1660 ret = NULL;
1661
1662 /* Invoke the statement callback. Return if the callback handled
1663 all of STMT operands by itself. */
1664 if (callback_stmt)
1665 {
1666 bool handled_ops = false;
1667 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1668 if (handled_ops)
1669 return tree_ret;
1670
1671 /* If CALLBACK_STMT did not handle operands, it should not have
1672 a value to return. */
1673 gcc_assert (tree_ret == NULL);
1674
1675 /* Re-read stmt in case the callback changed it. */
1676 stmt = gsi_stmt (*gsi);
1677 }
1678
1679 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1680 if (callback_op)
1681 {
1682 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1683 if (tree_ret)
1684 return tree_ret;
1685 }
1686
1687 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1688 switch (gimple_code (stmt))
1689 {
1690 case GIMPLE_BIND:
1691 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1692 callback_op, wi);
1693 if (ret)
1694 return wi->callback_result;
1695 break;
1696
1697 case GIMPLE_CATCH:
1698 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1699 callback_op, wi);
1700 if (ret)
1701 return wi->callback_result;
1702 break;
1703
1704 case GIMPLE_EH_FILTER:
1705 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1706 callback_op, wi);
1707 if (ret)
1708 return wi->callback_result;
1709 break;
1710
1711 case GIMPLE_TRY:
1712 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1713 wi);
1714 if (ret)
1715 return wi->callback_result;
1716
1717 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1718 callback_op, wi);
1719 if (ret)
1720 return wi->callback_result;
1721 break;
1722
1723 case GIMPLE_OMP_FOR:
1724 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1725 callback_op, wi);
1726 if (ret)
1727 return wi->callback_result;
1728
1729 /* FALL THROUGH. */
1730 case GIMPLE_OMP_CRITICAL:
1731 case GIMPLE_OMP_MASTER:
1732 case GIMPLE_OMP_ORDERED:
1733 case GIMPLE_OMP_SECTION:
1734 case GIMPLE_OMP_PARALLEL:
1735 case GIMPLE_OMP_TASK:
1736 case GIMPLE_OMP_SECTIONS:
1737 case GIMPLE_OMP_SINGLE:
1738 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1739 wi);
1740 if (ret)
1741 return wi->callback_result;
1742 break;
1743
1744 case GIMPLE_WITH_CLEANUP_EXPR:
1745 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1746 callback_op, wi);
1747 if (ret)
1748 return wi->callback_result;
1749 break;
1750
1751 default:
1752 gcc_assert (!gimple_has_substatements (stmt));
1753 break;
1754 }
1755
1756 return NULL;
1757}
1758
1759
1760/* Set sequence SEQ to be the GIMPLE body for function FN. */
1761
1762void
1763gimple_set_body (tree fndecl, gimple_seq seq)
1764{
1765 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1766 if (fn == NULL)
1767 {
1768 /* If FNDECL still does not have a function structure associated
1769 with it, then it does not make sense for it to receive a
1770 GIMPLE body. */
1771 gcc_assert (seq == NULL);
1772 }
1773 else
1774 fn->gimple_body = seq;
1775}
1776
1777
1778/* Return the body of GIMPLE statements for function FN. */
1779
1780gimple_seq
1781gimple_body (tree fndecl)
1782{
1783 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1784 return fn ? fn->gimple_body : NULL;
1785}
1786
39ecc018
JH
1787/* Return true when FNDECL has Gimple body either in unlowered
1788 or CFG form. */
1789bool
1790gimple_has_body_p (tree fndecl)
1791{
1792 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1793 return (gimple_body (fndecl) || (fn && fn->cfg));
1794}
726a989a
RB
1795
1796/* Detect flags from a GIMPLE_CALL. This is just like
1797 call_expr_flags, but for gimple tuples. */
1798
1799int
1800gimple_call_flags (const_gimple stmt)
1801{
1802 int flags;
1803 tree decl = gimple_call_fndecl (stmt);
1804 tree t;
1805
1806 if (decl)
1807 flags = flags_from_decl_or_type (decl);
1808 else
1809 {
1810 t = TREE_TYPE (gimple_call_fn (stmt));
1811 if (t && TREE_CODE (t) == POINTER_TYPE)
1812 flags = flags_from_decl_or_type (TREE_TYPE (t));
1813 else
1814 flags = 0;
1815 }
1816
1817 return flags;
1818}
1819
1820
1821/* Return true if GS is a copy assignment. */
1822
1823bool
1824gimple_assign_copy_p (gimple gs)
1825{
1826 return gimple_code (gs) == GIMPLE_ASSIGN
1827 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1828 == GIMPLE_SINGLE_RHS
1829 && is_gimple_val (gimple_op (gs, 1));
1830}
1831
1832
1833/* Return true if GS is a SSA_NAME copy assignment. */
1834
1835bool
1836gimple_assign_ssa_name_copy_p (gimple gs)
1837{
1838 return (gimple_code (gs) == GIMPLE_ASSIGN
1839 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1840 == GIMPLE_SINGLE_RHS)
1841 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1842 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1843}
1844
1845
1846/* Return true if GS is an assignment with a singleton RHS, i.e.,
1847 there is no operator associated with the assignment itself.
1848 Unlike gimple_assign_copy_p, this predicate returns true for
1849 any RHS operand, including those that perform an operation
1850 and do not have the semantics of a copy, such as COND_EXPR. */
1851
1852bool
1853gimple_assign_single_p (gimple gs)
1854{
1855 return (gimple_code (gs) == GIMPLE_ASSIGN
1856 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1857 == GIMPLE_SINGLE_RHS);
1858}
1859
1860/* Return true if GS is an assignment with a unary RHS, but the
1861 operator has no effect on the assigned value. The logic is adapted
1862 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1863 instances in which STRIP_NOPS was previously applied to the RHS of
1864 an assignment.
1865
1866 NOTE: In the use cases that led to the creation of this function
1867 and of gimple_assign_single_p, it is typical to test for either
1868 condition and to proceed in the same manner. In each case, the
1869 assigned value is represented by the single RHS operand of the
1870 assignment. I suspect there may be cases where gimple_assign_copy_p,
1871 gimple_assign_single_p, or equivalent logic is used where a similar
1872 treatment of unary NOPs is appropriate. */
1873
1874bool
1875gimple_assign_unary_nop_p (gimple gs)
1876{
1877 return (gimple_code (gs) == GIMPLE_ASSIGN
1a87cf0c 1878 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
1879 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1880 && gimple_assign_rhs1 (gs) != error_mark_node
1881 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1882 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1883}
1884
1885/* Set BB to be the basic block holding G. */
1886
1887void
1888gimple_set_bb (gimple stmt, basic_block bb)
1889{
1890 stmt->gsbase.bb = bb;
1891
1892 /* If the statement is a label, add the label to block-to-labels map
1893 so that we can speed up edge creation for GIMPLE_GOTOs. */
1894 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1895 {
1896 tree t;
1897 int uid;
1898
1899 t = gimple_label_label (stmt);
1900 uid = LABEL_DECL_UID (t);
1901 if (uid == -1)
1902 {
1903 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1904 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1905 if (old_len <= (unsigned) uid)
1906 {
5006671f 1907 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
1908
1909 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1910 new_len);
1911 }
1912 }
1913
1914 VEC_replace (basic_block, label_to_block_map, uid, bb);
1915 }
1916}
1917
1918
1919/* Fold the expression computed by STMT. If the expression can be
1920 folded, return the folded result, otherwise return NULL. STMT is
1921 not modified. */
1922
1923tree
1924gimple_fold (const_gimple stmt)
1925{
1926 switch (gimple_code (stmt))
1927 {
1928 case GIMPLE_COND:
1929 return fold_binary (gimple_cond_code (stmt),
1930 boolean_type_node,
1931 gimple_cond_lhs (stmt),
1932 gimple_cond_rhs (stmt));
1933
1934 case GIMPLE_ASSIGN:
1935 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1936 {
1937 case GIMPLE_UNARY_RHS:
1938 return fold_unary (gimple_assign_rhs_code (stmt),
1939 TREE_TYPE (gimple_assign_lhs (stmt)),
1940 gimple_assign_rhs1 (stmt));
1941 case GIMPLE_BINARY_RHS:
1942 return fold_binary (gimple_assign_rhs_code (stmt),
1943 TREE_TYPE (gimple_assign_lhs (stmt)),
1944 gimple_assign_rhs1 (stmt),
1945 gimple_assign_rhs2 (stmt));
1946 case GIMPLE_SINGLE_RHS:
1947 return fold (gimple_assign_rhs1 (stmt));
1948 default:;
1949 }
1950 break;
1951
1952 case GIMPLE_SWITCH:
1953 return gimple_switch_index (stmt);
1954
1955 case GIMPLE_CALL:
1956 return NULL_TREE;
1957
1958 default:
1959 break;
1960 }
1961
1962 gcc_unreachable ();
1963}
1964
1965
1966/* Modify the RHS of the assignment pointed-to by GSI using the
1967 operands in the expression tree EXPR.
1968
1969 NOTE: The statement pointed-to by GSI may be reallocated if it
1970 did not have enough operand slots.
1971
1972 This function is useful to convert an existing tree expression into
1973 the flat representation used for the RHS of a GIMPLE assignment.
1974 It will reallocate memory as needed to expand or shrink the number
1975 of operand slots needed to represent EXPR.
1976
1977 NOTE: If you find yourself building a tree and then calling this
1978 function, you are most certainly doing it the slow way. It is much
1979 better to build a new assignment or to use the function
1980 gimple_assign_set_rhs_with_ops, which does not require an
1981 expression tree to be built. */
1982
1983void
1984gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1985{
1986 enum tree_code subcode;
1987 tree op1, op2;
1988
1989 extract_ops_from_tree (expr, &subcode, &op1, &op2);
1990 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1991}
1992
1993
1994/* Set the RHS of assignment statement pointed-to by GSI to CODE with
1995 operands OP1 and OP2.
1996
1997 NOTE: The statement pointed-to by GSI may be reallocated if it
1998 did not have enough operand slots. */
1999
2000void
2001gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
2002 tree op1, tree op2)
2003{
2004 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2005 gimple stmt = gsi_stmt (*gsi);
2006
2007 /* If the new CODE needs more operands, allocate a new statement. */
2008 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2009 {
2010 tree lhs = gimple_assign_lhs (stmt);
2011 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2012 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2013 gsi_replace (gsi, new_stmt, true);
2014 stmt = new_stmt;
2015
2016 /* The LHS needs to be reset as this also changes the SSA name
2017 on the LHS. */
2018 gimple_assign_set_lhs (stmt, lhs);
2019 }
2020
2021 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2022 gimple_set_subcode (stmt, code);
2023 gimple_assign_set_rhs1 (stmt, op1);
2024 if (new_rhs_ops > 1)
2025 gimple_assign_set_rhs2 (stmt, op2);
2026}
2027
2028
2029/* Return the LHS of a statement that performs an assignment,
2030 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2031 for a call to a function that returns no value, or for a
2032 statement other than an assignment or a call. */
2033
2034tree
2035gimple_get_lhs (const_gimple stmt)
2036{
e0c68ce9 2037 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2038
2039 if (code == GIMPLE_ASSIGN)
2040 return gimple_assign_lhs (stmt);
2041 else if (code == GIMPLE_CALL)
2042 return gimple_call_lhs (stmt);
2043 else
2044 return NULL_TREE;
2045}
2046
2047
2048/* Set the LHS of a statement that performs an assignment,
2049 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2050
2051void
2052gimple_set_lhs (gimple stmt, tree lhs)
2053{
e0c68ce9 2054 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2055
2056 if (code == GIMPLE_ASSIGN)
2057 gimple_assign_set_lhs (stmt, lhs);
2058 else if (code == GIMPLE_CALL)
2059 gimple_call_set_lhs (stmt, lhs);
2060 else
2061 gcc_unreachable();
2062}
2063
2064
2065/* Return a deep copy of statement STMT. All the operands from STMT
2066 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2067 and VUSE operand arrays are set to empty in the new copy. */
2068
2069gimple
2070gimple_copy (gimple stmt)
2071{
2072 enum gimple_code code = gimple_code (stmt);
2073 unsigned num_ops = gimple_num_ops (stmt);
2074 gimple copy = gimple_alloc (code, num_ops);
2075 unsigned i;
2076
2077 /* Shallow copy all the fields from STMT. */
2078 memcpy (copy, stmt, gimple_size (code));
2079
2080 /* If STMT has sub-statements, deep-copy them as well. */
2081 if (gimple_has_substatements (stmt))
2082 {
2083 gimple_seq new_seq;
2084 tree t;
2085
2086 switch (gimple_code (stmt))
2087 {
2088 case GIMPLE_BIND:
2089 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2090 gimple_bind_set_body (copy, new_seq);
2091 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2092 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2093 break;
2094
2095 case GIMPLE_CATCH:
2096 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2097 gimple_catch_set_handler (copy, new_seq);
2098 t = unshare_expr (gimple_catch_types (stmt));
2099 gimple_catch_set_types (copy, t);
2100 break;
2101
2102 case GIMPLE_EH_FILTER:
2103 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2104 gimple_eh_filter_set_failure (copy, new_seq);
2105 t = unshare_expr (gimple_eh_filter_types (stmt));
2106 gimple_eh_filter_set_types (copy, t);
2107 break;
2108
2109 case GIMPLE_TRY:
2110 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2111 gimple_try_set_eval (copy, new_seq);
2112 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2113 gimple_try_set_cleanup (copy, new_seq);
2114 break;
2115
2116 case GIMPLE_OMP_FOR:
2117 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2118 gimple_omp_for_set_pre_body (copy, new_seq);
2119 t = unshare_expr (gimple_omp_for_clauses (stmt));
2120 gimple_omp_for_set_clauses (copy, t);
2121 copy->gimple_omp_for.iter
2122 = GGC_NEWVEC (struct gimple_omp_for_iter,
2123 gimple_omp_for_collapse (stmt));
2124 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2125 {
2126 gimple_omp_for_set_cond (copy, i,
2127 gimple_omp_for_cond (stmt, i));
2128 gimple_omp_for_set_index (copy, i,
2129 gimple_omp_for_index (stmt, i));
2130 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2131 gimple_omp_for_set_initial (copy, i, t);
2132 t = unshare_expr (gimple_omp_for_final (stmt, i));
2133 gimple_omp_for_set_final (copy, i, t);
2134 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2135 gimple_omp_for_set_incr (copy, i, t);
2136 }
2137 goto copy_omp_body;
2138
2139 case GIMPLE_OMP_PARALLEL:
2140 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2141 gimple_omp_parallel_set_clauses (copy, t);
2142 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2143 gimple_omp_parallel_set_child_fn (copy, t);
2144 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2145 gimple_omp_parallel_set_data_arg (copy, t);
2146 goto copy_omp_body;
2147
2148 case GIMPLE_OMP_TASK:
2149 t = unshare_expr (gimple_omp_task_clauses (stmt));
2150 gimple_omp_task_set_clauses (copy, t);
2151 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2152 gimple_omp_task_set_child_fn (copy, t);
2153 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2154 gimple_omp_task_set_data_arg (copy, t);
2155 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2156 gimple_omp_task_set_copy_fn (copy, t);
2157 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2158 gimple_omp_task_set_arg_size (copy, t);
2159 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2160 gimple_omp_task_set_arg_align (copy, t);
2161 goto copy_omp_body;
2162
2163 case GIMPLE_OMP_CRITICAL:
2164 t = unshare_expr (gimple_omp_critical_name (stmt));
2165 gimple_omp_critical_set_name (copy, t);
2166 goto copy_omp_body;
2167
2168 case GIMPLE_OMP_SECTIONS:
2169 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2170 gimple_omp_sections_set_clauses (copy, t);
2171 t = unshare_expr (gimple_omp_sections_control (stmt));
2172 gimple_omp_sections_set_control (copy, t);
2173 /* FALLTHRU */
2174
2175 case GIMPLE_OMP_SINGLE:
2176 case GIMPLE_OMP_SECTION:
2177 case GIMPLE_OMP_MASTER:
2178 case GIMPLE_OMP_ORDERED:
2179 copy_omp_body:
2180 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2181 gimple_omp_set_body (copy, new_seq);
2182 break;
2183
2184 case GIMPLE_WITH_CLEANUP_EXPR:
2185 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2186 gimple_wce_set_cleanup (copy, new_seq);
2187 break;
2188
2189 default:
2190 gcc_unreachable ();
2191 }
2192 }
2193
2194 /* Make copy of operands. */
2195 if (num_ops > 0)
2196 {
2197 for (i = 0; i < num_ops; i++)
2198 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2199
ccacdf06 2200 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2201 if (gimple_has_ops (stmt))
2202 {
2203 gimple_set_def_ops (copy, NULL);
2204 gimple_set_use_ops (copy, NULL);
726a989a
RB
2205 }
2206
2207 if (gimple_has_mem_ops (stmt))
2208 {
5006671f
RG
2209 gimple_set_vdef (copy, gimple_vdef (stmt));
2210 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2211 }
2212
5006671f
RG
2213 /* SSA operands need to be updated. */
2214 gimple_set_modified (copy, true);
726a989a
RB
2215 }
2216
2217 return copy;
2218}
2219
2220
2221/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2222 a MODIFIED field. */
2223
2224void
2225gimple_set_modified (gimple s, bool modifiedp)
2226{
2227 if (gimple_has_ops (s))
2228 {
2229 s->gsbase.modified = (unsigned) modifiedp;
2230
2231 if (modifiedp
2232 && cfun->gimple_df
2233 && is_gimple_call (s)
2234 && gimple_call_noreturn_p (s))
2235 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2236 }
2237}
2238
2239
2240/* Return true if statement S has side-effects. We consider a
2241 statement to have side effects if:
2242
2243 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2244 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2245
2246bool
2247gimple_has_side_effects (const_gimple s)
2248{
2249 unsigned i;
2250
2251 /* We don't have to scan the arguments to check for
2252 volatile arguments, though, at present, we still
2253 do a scan to check for TREE_SIDE_EFFECTS. */
2254 if (gimple_has_volatile_ops (s))
2255 return true;
2256
2257 if (is_gimple_call (s))
2258 {
2259 unsigned nargs = gimple_call_num_args (s);
2260
2261 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2262 return true;
2263 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2264 /* An infinite loop is considered a side effect. */
2265 return true;
2266
2267 if (gimple_call_lhs (s)
2268 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2269 {
2270 gcc_assert (gimple_has_volatile_ops (s));
2271 return true;
2272 }
2273
2274 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2275 return true;
2276
2277 for (i = 0; i < nargs; i++)
2278 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2279 {
2280 gcc_assert (gimple_has_volatile_ops (s));
2281 return true;
2282 }
2283
2284 return false;
2285 }
2286 else
2287 {
2288 for (i = 0; i < gimple_num_ops (s); i++)
2289 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2290 {
2291 gcc_assert (gimple_has_volatile_ops (s));
2292 return true;
2293 }
2294 }
2295
2296 return false;
2297}
2298
2299/* Return true if the RHS of statement S has side effects.
2300 We may use it to determine if it is admissable to replace
2301 an assignment or call with a copy of a previously-computed
2302 value. In such cases, side-effects due the the LHS are
2303 preserved. */
2304
2305bool
2306gimple_rhs_has_side_effects (const_gimple s)
2307{
2308 unsigned i;
2309
2310 if (is_gimple_call (s))
2311 {
2312 unsigned nargs = gimple_call_num_args (s);
2313
2314 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2315 return true;
2316
2317 /* We cannot use gimple_has_volatile_ops here,
2318 because we must ignore a volatile LHS. */
2319 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2320 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2321 {
2322 gcc_assert (gimple_has_volatile_ops (s));
2323 return true;
2324 }
2325
2326 for (i = 0; i < nargs; i++)
2327 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2328 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2329 return true;
2330
2331 return false;
2332 }
2333 else if (is_gimple_assign (s))
2334 {
2335 /* Skip the first operand, the LHS. */
2336 for (i = 1; i < gimple_num_ops (s); i++)
2337 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2338 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2339 {
2340 gcc_assert (gimple_has_volatile_ops (s));
2341 return true;
2342 }
2343 }
2344 else
2345 {
2346 /* For statements without an LHS, examine all arguments. */
2347 for (i = 0; i < gimple_num_ops (s); i++)
2348 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2349 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2350 {
2351 gcc_assert (gimple_has_volatile_ops (s));
2352 return true;
2353 }
2354 }
2355
2356 return false;
2357}
2358
2359
2360/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2361 Return true if S can trap. If INCLUDE_LHS is true and S is a
2362 GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2363 Otherwise, only the RHS of the assignment is checked. */
2364
2365static bool
2366gimple_could_trap_p_1 (gimple s, bool include_lhs)
2367{
2368 unsigned i, start;
2369 tree t, div = NULL_TREE;
2370 enum tree_code op;
2371
2372 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2373
2374 for (i = start; i < gimple_num_ops (s); i++)
2375 if (tree_could_trap_p (gimple_op (s, i)))
2376 return true;
2377
2378 switch (gimple_code (s))
2379 {
2380 case GIMPLE_ASM:
2381 return gimple_asm_volatile_p (s);
2382
2383 case GIMPLE_CALL:
2384 t = gimple_call_fndecl (s);
2385 /* Assume that calls to weak functions may trap. */
2386 if (!t || !DECL_P (t) || DECL_WEAK (t))
2387 return true;
2388 return false;
2389
2390 case GIMPLE_ASSIGN:
2391 t = gimple_expr_type (s);
2392 op = gimple_assign_rhs_code (s);
2393 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2394 div = gimple_assign_rhs2 (s);
2395 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2396 (INTEGRAL_TYPE_P (t)
2397 && TYPE_OVERFLOW_TRAPS (t)),
2398 div));
2399
2400 default:
2401 break;
2402 }
2403
2404 return false;
2405
2406}
2407
2408
2409/* Return true if statement S can trap. */
2410
2411bool
2412gimple_could_trap_p (gimple s)
2413{
2414 return gimple_could_trap_p_1 (s, true);
2415}
2416
2417
2418/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2419
2420bool
2421gimple_assign_rhs_could_trap_p (gimple s)
2422{
2423 gcc_assert (is_gimple_assign (s));
2424 return gimple_could_trap_p_1 (s, false);
2425}
2426
2427
2428/* Print debugging information for gimple stmts generated. */
2429
2430void
2431dump_gimple_statistics (void)
2432{
2433#ifdef GATHER_STATISTICS
2434 int i, total_tuples = 0, total_bytes = 0;
2435
2436 fprintf (stderr, "\nGIMPLE statements\n");
2437 fprintf (stderr, "Kind Stmts Bytes\n");
2438 fprintf (stderr, "---------------------------------------\n");
2439 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2440 {
2441 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2442 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2443 total_tuples += gimple_alloc_counts[i];
2444 total_bytes += gimple_alloc_sizes[i];
2445 }
2446 fprintf (stderr, "---------------------------------------\n");
2447 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2448 fprintf (stderr, "---------------------------------------\n");
2449#else
2450 fprintf (stderr, "No gimple statistics\n");
2451#endif
2452}
2453
2454
726a989a
RB
2455/* Return the number of operands needed on the RHS of a GIMPLE
2456 assignment for an expression with tree code CODE. */
2457
2458unsigned
2459get_gimple_rhs_num_ops (enum tree_code code)
2460{
2461 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2462
2463 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2464 return 1;
2465 else if (rhs_class == GIMPLE_BINARY_RHS)
2466 return 2;
2467 else
2468 gcc_unreachable ();
2469}
2470
2471#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2472 (unsigned char) \
2473 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2474 : ((TYPE) == tcc_binary \
2475 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2476 : ((TYPE) == tcc_constant \
2477 || (TYPE) == tcc_declaration \
2478 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2479 : ((SYM) == TRUTH_AND_EXPR \
2480 || (SYM) == TRUTH_OR_EXPR \
2481 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2482 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2483 : ((SYM) == COND_EXPR \
2484 || (SYM) == CONSTRUCTOR \
2485 || (SYM) == OBJ_TYPE_REF \
2486 || (SYM) == ASSERT_EXPR \
2487 || (SYM) == ADDR_EXPR \
2488 || (SYM) == WITH_SIZE_EXPR \
2489 || (SYM) == EXC_PTR_EXPR \
2490 || (SYM) == SSA_NAME \
2491 || (SYM) == FILTER_EXPR \
2492 || (SYM) == POLYNOMIAL_CHREC \
2493 || (SYM) == DOT_PROD_EXPR \
2494 || (SYM) == VEC_COND_EXPR \
2495 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2496 : GIMPLE_INVALID_RHS),
2497#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2498
2499const unsigned char gimple_rhs_class_table[] = {
2500#include "all-tree.def"
2501};
2502
2503#undef DEFTREECODE
2504#undef END_OF_BASE_TREE_CODES
2505
2506/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2507
2508/* Validation of GIMPLE expressions. */
2509
2510/* Return true if OP is an acceptable tree node to be used as a GIMPLE
2511 operand. */
2512
2513bool
2514is_gimple_operand (const_tree op)
2515{
2516 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2517}
2518
726a989a
RB
2519/* Returns true iff T is a valid RHS for an assignment to a renamed
2520 user -- or front-end generated artificial -- variable. */
2521
2522bool
2523is_gimple_reg_rhs (tree t)
2524{
ba4d8f9d 2525 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
726a989a
RB
2526}
2527
2528/* Returns true iff T is a valid RHS for an assignment to an un-renamed
2529 LHS, or for a call argument. */
2530
2531bool
2532is_gimple_mem_rhs (tree t)
2533{
2534 /* If we're dealing with a renamable type, either source or dest must be
2535 a renamed variable. */
2536 if (is_gimple_reg_type (TREE_TYPE (t)))
2537 return is_gimple_val (t);
2538 else
ba4d8f9d 2539 return is_gimple_val (t) || is_gimple_lvalue (t);
726a989a
RB
2540}
2541
2542/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2543
2544bool
2545is_gimple_lvalue (tree t)
2546{
2547 return (is_gimple_addressable (t)
2548 || TREE_CODE (t) == WITH_SIZE_EXPR
2549 /* These are complex lvalues, but don't have addresses, so they
2550 go here. */
2551 || TREE_CODE (t) == BIT_FIELD_REF);
2552}
2553
2554/* Return true if T is a GIMPLE condition. */
2555
2556bool
2557is_gimple_condexpr (tree t)
2558{
2559 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2560 && !tree_could_trap_p (t)
2561 && is_gimple_val (TREE_OPERAND (t, 0))
2562 && is_gimple_val (TREE_OPERAND (t, 1))));
2563}
2564
2565/* Return true if T is something whose address can be taken. */
2566
2567bool
2568is_gimple_addressable (tree t)
2569{
2570 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2571}
2572
2573/* Return true if T is a valid gimple constant. */
2574
2575bool
2576is_gimple_constant (const_tree t)
2577{
2578 switch (TREE_CODE (t))
2579 {
2580 case INTEGER_CST:
2581 case REAL_CST:
2582 case FIXED_CST:
2583 case STRING_CST:
2584 case COMPLEX_CST:
2585 case VECTOR_CST:
2586 return true;
2587
2588 /* Vector constant constructors are gimple invariant. */
2589 case CONSTRUCTOR:
2590 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2591 return TREE_CONSTANT (t);
2592 else
2593 return false;
2594
2595 default:
2596 return false;
2597 }
2598}
2599
2600/* Return true if T is a gimple address. */
2601
2602bool
2603is_gimple_address (const_tree t)
2604{
2605 tree op;
2606
2607 if (TREE_CODE (t) != ADDR_EXPR)
2608 return false;
2609
2610 op = TREE_OPERAND (t, 0);
2611 while (handled_component_p (op))
2612 {
2613 if ((TREE_CODE (op) == ARRAY_REF
2614 || TREE_CODE (op) == ARRAY_RANGE_REF)
2615 && !is_gimple_val (TREE_OPERAND (op, 1)))
2616 return false;
2617
2618 op = TREE_OPERAND (op, 0);
2619 }
2620
2621 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2622 return true;
2623
2624 switch (TREE_CODE (op))
2625 {
2626 case PARM_DECL:
2627 case RESULT_DECL:
2628 case LABEL_DECL:
2629 case FUNCTION_DECL:
2630 case VAR_DECL:
2631 case CONST_DECL:
2632 return true;
2633
2634 default:
2635 return false;
2636 }
2637}
2638
00fc2333
JH
2639/* Strip out all handled components that produce invariant
2640 offsets. */
726a989a 2641
00fc2333
JH
2642static const_tree
2643strip_invariant_refs (const_tree op)
726a989a 2644{
726a989a
RB
2645 while (handled_component_p (op))
2646 {
2647 switch (TREE_CODE (op))
2648 {
2649 case ARRAY_REF:
2650 case ARRAY_RANGE_REF:
2651 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2652 || TREE_OPERAND (op, 2) != NULL_TREE
2653 || TREE_OPERAND (op, 3) != NULL_TREE)
00fc2333 2654 return NULL;
726a989a
RB
2655 break;
2656
2657 case COMPONENT_REF:
2658 if (TREE_OPERAND (op, 2) != NULL_TREE)
00fc2333 2659 return NULL;
726a989a
RB
2660 break;
2661
2662 default:;
2663 }
2664 op = TREE_OPERAND (op, 0);
2665 }
2666
00fc2333
JH
2667 return op;
2668}
2669
2670/* Return true if T is a gimple invariant address. */
2671
2672bool
2673is_gimple_invariant_address (const_tree t)
2674{
2675 const_tree op;
2676
2677 if (TREE_CODE (t) != ADDR_EXPR)
2678 return false;
2679
2680 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2681
2682 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2683}
2684
2685/* Return true if T is a gimple invariant address at IPA level
2686 (so addresses of variables on stack are not allowed). */
2687
2688bool
2689is_gimple_ip_invariant_address (const_tree t)
2690{
2691 const_tree op;
2692
2693 if (TREE_CODE (t) != ADDR_EXPR)
2694 return false;
2695
2696 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2697
2698 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
726a989a
RB
2699}
2700
2701/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2702 form of function invariant. */
2703
2704bool
2705is_gimple_min_invariant (const_tree t)
2706{
2707 if (TREE_CODE (t) == ADDR_EXPR)
2708 return is_gimple_invariant_address (t);
2709
2710 return is_gimple_constant (t);
2711}
2712
00fc2333
JH
2713/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2714 form of gimple minimal invariant. */
2715
2716bool
2717is_gimple_ip_invariant (const_tree t)
2718{
2719 if (TREE_CODE (t) == ADDR_EXPR)
2720 return is_gimple_ip_invariant_address (t);
2721
2722 return is_gimple_constant (t);
2723}
2724
726a989a
RB
2725/* Return true if T looks like a valid GIMPLE statement. */
2726
2727bool
2728is_gimple_stmt (tree t)
2729{
2730 const enum tree_code code = TREE_CODE (t);
2731
2732 switch (code)
2733 {
2734 case NOP_EXPR:
2735 /* The only valid NOP_EXPR is the empty statement. */
2736 return IS_EMPTY_STMT (t);
2737
2738 case BIND_EXPR:
2739 case COND_EXPR:
2740 /* These are only valid if they're void. */
2741 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2742
2743 case SWITCH_EXPR:
2744 case GOTO_EXPR:
2745 case RETURN_EXPR:
2746 case LABEL_EXPR:
2747 case CASE_LABEL_EXPR:
2748 case TRY_CATCH_EXPR:
2749 case TRY_FINALLY_EXPR:
2750 case EH_FILTER_EXPR:
2751 case CATCH_EXPR:
2752 case CHANGE_DYNAMIC_TYPE_EXPR:
2753 case ASM_EXPR:
2754 case RESX_EXPR:
2755 case STATEMENT_LIST:
2756 case OMP_PARALLEL:
2757 case OMP_FOR:
2758 case OMP_SECTIONS:
2759 case OMP_SECTION:
2760 case OMP_SINGLE:
2761 case OMP_MASTER:
2762 case OMP_ORDERED:
2763 case OMP_CRITICAL:
2764 case OMP_TASK:
2765 /* These are always void. */
2766 return true;
2767
2768 case CALL_EXPR:
2769 case MODIFY_EXPR:
2770 case PREDICT_EXPR:
2771 /* These are valid regardless of their type. */
2772 return true;
2773
2774 default:
2775 return false;
2776 }
2777}
2778
2779/* Return true if T is a variable. */
2780
2781bool
2782is_gimple_variable (tree t)
2783{
2784 return (TREE_CODE (t) == VAR_DECL
2785 || TREE_CODE (t) == PARM_DECL
2786 || TREE_CODE (t) == RESULT_DECL
2787 || TREE_CODE (t) == SSA_NAME);
2788}
2789
2790/* Return true if T is a GIMPLE identifier (something with an address). */
2791
2792bool
2793is_gimple_id (tree t)
2794{
2795 return (is_gimple_variable (t)
2796 || TREE_CODE (t) == FUNCTION_DECL
2797 || TREE_CODE (t) == LABEL_DECL
2798 || TREE_CODE (t) == CONST_DECL
2799 /* Allow string constants, since they are addressable. */
2800 || TREE_CODE (t) == STRING_CST);
2801}
2802
2803/* Return true if TYPE is a suitable type for a scalar register variable. */
2804
2805bool
2806is_gimple_reg_type (tree type)
2807{
2808 /* In addition to aggregate types, we also exclude complex types if not
2809 optimizing because they can be subject to partial stores in GNU C by
2810 means of the __real__ and __imag__ operators and we cannot promote
2811 them to total stores (see gimplify_modify_expr_complex_part). */
2812 return !(AGGREGATE_TYPE_P (type)
2813 || (TREE_CODE (type) == COMPLEX_TYPE && !optimize));
2814
2815}
2816
2817/* Return true if T is a non-aggregate register variable. */
2818
2819bool
2820is_gimple_reg (tree t)
2821{
2822 if (TREE_CODE (t) == SSA_NAME)
2823 t = SSA_NAME_VAR (t);
2824
726a989a
RB
2825 if (!is_gimple_variable (t))
2826 return false;
2827
ba4d8f9d
RG
2828 /* Complex and vector values must have been put into SSA-like form.
2829 That is, no assignments to the individual components. */
2830 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2831 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2832 return DECL_GIMPLE_REG_P (t);
2833
726a989a
RB
2834 if (!is_gimple_reg_type (TREE_TYPE (t)))
2835 return false;
2836
2837 /* A volatile decl is not acceptable because we can't reuse it as
2838 needed. We need to copy it into a temp first. */
2839 if (TREE_THIS_VOLATILE (t))
2840 return false;
2841
2842 /* We define "registers" as things that can be renamed as needed,
2843 which with our infrastructure does not apply to memory. */
2844 if (needs_to_live_in_memory (t))
2845 return false;
2846
2847 /* Hard register variables are an interesting case. For those that
2848 are call-clobbered, we don't know where all the calls are, since
2849 we don't (want to) take into account which operations will turn
2850 into libcalls at the rtl level. For those that are call-saved,
2851 we don't currently model the fact that calls may in fact change
2852 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2853 level, and so miss variable changes that might imply. All around,
2854 it seems safest to not do too much optimization with these at the
2855 tree level at all. We'll have to rely on the rtl optimizers to
2856 clean this up, as there we've got all the appropriate bits exposed. */
2857 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2858 return false;
2859
726a989a
RB
2860 return true;
2861}
2862
2863
726a989a
RB
2864/* Return true if T is a GIMPLE variable whose address is not needed. */
2865
2866bool
2867is_gimple_non_addressable (tree t)
2868{
2869 if (TREE_CODE (t) == SSA_NAME)
2870 t = SSA_NAME_VAR (t);
2871
2872 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2873}
2874
2875/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2876
2877bool
2878is_gimple_val (tree t)
2879{
2880 /* Make loads from volatiles and memory vars explicit. */
2881 if (is_gimple_variable (t)
2882 && is_gimple_reg_type (TREE_TYPE (t))
2883 && !is_gimple_reg (t))
2884 return false;
2885
2886 /* FIXME make these decls. That can happen only when we expose the
2887 entire landing-pad construct at the tree level. */
2888 if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
2889 return true;
2890
2891 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2892}
2893
2894/* Similarly, but accept hard registers as inputs to asm statements. */
2895
2896bool
2897is_gimple_asm_val (tree t)
2898{
2899 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2900 return true;
2901
2902 return is_gimple_val (t);
2903}
2904
2905/* Return true if T is a GIMPLE minimal lvalue. */
2906
2907bool
2908is_gimple_min_lval (tree t)
2909{
ba4d8f9d
RG
2910 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2911 return false;
726a989a
RB
2912 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2913}
2914
2915/* Return true if T is a typecast operation. */
2916
2917bool
2918is_gimple_cast (tree t)
2919{
2920 return (CONVERT_EXPR_P (t)
2921 || TREE_CODE (t) == FIX_TRUNC_EXPR);
2922}
2923
2924/* Return true if T is a valid function operand of a CALL_EXPR. */
2925
2926bool
2927is_gimple_call_addr (tree t)
2928{
2929 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2930}
2931
2932/* If T makes a function call, return the corresponding CALL_EXPR operand.
2933 Otherwise, return NULL_TREE. */
2934
2935tree
2936get_call_expr_in (tree t)
2937{
2938 if (TREE_CODE (t) == MODIFY_EXPR)
2939 t = TREE_OPERAND (t, 1);
2940 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2941 t = TREE_OPERAND (t, 0);
2942 if (TREE_CODE (t) == CALL_EXPR)
2943 return t;
2944 return NULL_TREE;
2945}
2946
2947
2948/* Given a memory reference expression T, return its base address.
2949 The base address of a memory reference expression is the main
2950 object being referenced. For instance, the base address for
2951 'array[i].fld[j]' is 'array'. You can think of this as stripping
2952 away the offset part from a memory address.
2953
2954 This function calls handled_component_p to strip away all the inner
2955 parts of the memory reference until it reaches the base object. */
2956
2957tree
2958get_base_address (tree t)
2959{
2960 while (handled_component_p (t))
2961 t = TREE_OPERAND (t, 0);
2962
2963 if (SSA_VAR_P (t)
2964 || TREE_CODE (t) == STRING_CST
2965 || TREE_CODE (t) == CONSTRUCTOR
2966 || INDIRECT_REF_P (t))
2967 return t;
2968 else
2969 return NULL_TREE;
2970}
2971
2972void
2973recalculate_side_effects (tree t)
2974{
2975 enum tree_code code = TREE_CODE (t);
2976 int len = TREE_OPERAND_LENGTH (t);
2977 int i;
2978
2979 switch (TREE_CODE_CLASS (code))
2980 {
2981 case tcc_expression:
2982 switch (code)
2983 {
2984 case INIT_EXPR:
2985 case MODIFY_EXPR:
2986 case VA_ARG_EXPR:
2987 case PREDECREMENT_EXPR:
2988 case PREINCREMENT_EXPR:
2989 case POSTDECREMENT_EXPR:
2990 case POSTINCREMENT_EXPR:
2991 /* All of these have side-effects, no matter what their
2992 operands are. */
2993 return;
2994
2995 default:
2996 break;
2997 }
2998 /* Fall through. */
2999
3000 case tcc_comparison: /* a comparison expression */
3001 case tcc_unary: /* a unary arithmetic expression */
3002 case tcc_binary: /* a binary arithmetic expression */
3003 case tcc_reference: /* a reference */
3004 case tcc_vl_exp: /* a function call */
3005 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3006 for (i = 0; i < len; ++i)
3007 {
3008 tree op = TREE_OPERAND (t, i);
3009 if (op && TREE_SIDE_EFFECTS (op))
3010 TREE_SIDE_EFFECTS (t) = 1;
3011 }
3012 break;
3013
13f95bdb
EB
3014 case tcc_constant:
3015 /* No side-effects. */
3016 return;
3017
726a989a 3018 default:
726a989a
RB
3019 gcc_unreachable ();
3020 }
3021}
3022
3023/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3024 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3025 we failed to create one. */
3026
3027tree
3028canonicalize_cond_expr_cond (tree t)
3029{
3030 /* For (bool)x use x != 0. */
3031 if (TREE_CODE (t) == NOP_EXPR
3032 && TREE_TYPE (t) == boolean_type_node)
3033 {
3034 tree top0 = TREE_OPERAND (t, 0);
3035 t = build2 (NE_EXPR, TREE_TYPE (t),
3036 top0, build_int_cst (TREE_TYPE (top0), 0));
3037 }
3038 /* For !x use x == 0. */
3039 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3040 {
3041 tree top0 = TREE_OPERAND (t, 0);
3042 t = build2 (EQ_EXPR, TREE_TYPE (t),
3043 top0, build_int_cst (TREE_TYPE (top0), 0));
3044 }
3045 /* For cmp ? 1 : 0 use cmp. */
3046 else if (TREE_CODE (t) == COND_EXPR
3047 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3048 && integer_onep (TREE_OPERAND (t, 1))
3049 && integer_zerop (TREE_OPERAND (t, 2)))
3050 {
3051 tree top0 = TREE_OPERAND (t, 0);
3052 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3053 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3054 }
3055
3056 if (is_gimple_condexpr (t))
3057 return t;
3058
3059 return NULL_TREE;
3060}
3061
e6c99067
DN
3062/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3063 the positions marked by the set ARGS_TO_SKIP. */
3064
c6f7cfc1 3065gimple
5c0466b5 3066gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3067{
3068 int i;
3069 tree fn = gimple_call_fn (stmt);
3070 int nargs = gimple_call_num_args (stmt);
3071 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3072 gimple new_stmt;
3073
3074 for (i = 0; i < nargs; i++)
3075 if (!bitmap_bit_p (args_to_skip, i))
3076 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3077
3078 new_stmt = gimple_build_call_vec (fn, vargs);
3079 VEC_free (tree, heap, vargs);
3080 if (gimple_call_lhs (stmt))
3081 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3082
5006671f
RG
3083 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3084 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3085
c6f7cfc1
JH
3086 gimple_set_block (new_stmt, gimple_block (stmt));
3087 if (gimple_has_location (stmt))
3088 gimple_set_location (new_stmt, gimple_location (stmt));
3089
3090 /* Carry all the flags to the new GIMPLE_CALL. */
3091 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3092 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
3093 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
3094 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
3095 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
3096 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
5006671f
RG
3097
3098 gimple_set_modified (new_stmt, true);
3099
c6f7cfc1
JH
3100 return new_stmt;
3101}
3102
5006671f
RG
3103
3104/* Data structure used to count the number of dereferences to PTR
3105 inside an expression. */
3106struct count_ptr_d
3107{
3108 tree ptr;
3109 unsigned num_stores;
3110 unsigned num_loads;
3111};
3112
3113/* Helper for count_uses_and_derefs. Called by walk_tree to look for
3114 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3115
3116static tree
3117count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3118{
3119 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3120 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3121
3122 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3123 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3124 the address of 'fld' as 'ptr + offsetof(fld)'. */
3125 if (TREE_CODE (*tp) == ADDR_EXPR)
3126 {
3127 *walk_subtrees = 0;
3128 return NULL_TREE;
3129 }
3130
3131 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
3132 {
3133 if (wi_p->is_lhs)
3134 count_p->num_stores++;
3135 else
3136 count_p->num_loads++;
3137 }
3138
3139 return NULL_TREE;
3140}
3141
3142/* Count the number of direct and indirect uses for pointer PTR in
3143 statement STMT. The number of direct uses is stored in
3144 *NUM_USES_P. Indirect references are counted separately depending
3145 on whether they are store or load operations. The counts are
3146 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3147
3148void
3149count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3150 unsigned *num_loads_p, unsigned *num_stores_p)
3151{
3152 ssa_op_iter i;
3153 tree use;
3154
3155 *num_uses_p = 0;
3156 *num_loads_p = 0;
3157 *num_stores_p = 0;
3158
3159 /* Find out the total number of uses of PTR in STMT. */
3160 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3161 if (use == ptr)
3162 (*num_uses_p)++;
3163
3164 /* Now count the number of indirect references to PTR. This is
3165 truly awful, but we don't have much choice. There are no parent
3166 pointers inside INDIRECT_REFs, so an expression like
3167 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3168 find all the indirect and direct uses of x_1 inside. The only
3169 shortcut we can take is the fact that GIMPLE only allows
3170 INDIRECT_REFs inside the expressions below. */
3171 if (is_gimple_assign (stmt)
3172 || gimple_code (stmt) == GIMPLE_RETURN
3173 || gimple_code (stmt) == GIMPLE_ASM
3174 || is_gimple_call (stmt))
3175 {
3176 struct walk_stmt_info wi;
3177 struct count_ptr_d count;
3178
3179 count.ptr = ptr;
3180 count.num_stores = 0;
3181 count.num_loads = 0;
3182
3183 memset (&wi, 0, sizeof (wi));
3184 wi.info = &count;
3185 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3186
3187 *num_stores_p = count.num_stores;
3188 *num_loads_p = count.num_loads;
3189 }
3190
3191 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3192}
3193
346ef3fa
RG
3194/* From a tree operand OP return the base of a load or store operation
3195 or NULL_TREE if OP is not a load or a store. */
3196
3197static tree
3198get_base_loadstore (tree op)
3199{
3200 while (handled_component_p (op))
3201 op = TREE_OPERAND (op, 0);
3202 if (DECL_P (op)
3203 || INDIRECT_REF_P (op)
3204 || TREE_CODE (op) == TARGET_MEM_REF)
3205 return op;
3206 return NULL_TREE;
3207}
3208
3209/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3210 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3211 passing the STMT, the base of the operand and DATA to it. The base
3212 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3213 or the argument of an address expression.
3214 Returns the results of these callbacks or'ed. */
3215
3216bool
3217walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3218 bool (*visit_load)(gimple, tree, void *),
3219 bool (*visit_store)(gimple, tree, void *),
3220 bool (*visit_addr)(gimple, tree, void *))
3221{
3222 bool ret = false;
3223 unsigned i;
3224 if (gimple_assign_single_p (stmt))
3225 {
3226 tree lhs, rhs;
3227 if (visit_store)
3228 {
3229 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3230 if (lhs)
3231 ret |= visit_store (stmt, lhs, data);
3232 }
3233 rhs = gimple_assign_rhs1 (stmt);
3234 if (visit_addr)
3235 {
3236 if (TREE_CODE (rhs) == ADDR_EXPR)
3237 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3238 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3239 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3240 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3241 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3242 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3243 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3244 0), data);
3245 }
3246 if (visit_load)
3247 {
3248 rhs = get_base_loadstore (rhs);
3249 if (rhs)
3250 ret |= visit_load (stmt, rhs, data);
3251 }
3252 }
3253 else if (visit_addr
3254 && (is_gimple_assign (stmt)
3255 || gimple_code (stmt) == GIMPLE_COND
3256 || gimple_code (stmt) == GIMPLE_CHANGE_DYNAMIC_TYPE))
3257 {
3258 for (i = 0; i < gimple_num_ops (stmt); ++i)
3259 if (gimple_op (stmt, i)
3260 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
3261 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
3262 }
3263 else if (is_gimple_call (stmt))
3264 {
3265 if (visit_store)
3266 {
3267 tree lhs = gimple_call_lhs (stmt);
3268 if (lhs)
3269 {
3270 lhs = get_base_loadstore (lhs);
3271 if (lhs)
3272 ret |= visit_store (stmt, lhs, data);
3273 }
3274 }
3275 if (visit_load || visit_addr)
3276 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3277 {
3278 tree rhs = gimple_call_arg (stmt, i);
3279 if (visit_addr
3280 && TREE_CODE (rhs) == ADDR_EXPR)
3281 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3282 else if (visit_load)
3283 {
3284 rhs = get_base_loadstore (rhs);
3285 if (rhs)
3286 ret |= visit_load (stmt, rhs, data);
3287 }
3288 }
3289 if (visit_addr
3290 && gimple_call_chain (stmt)
3291 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3292 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3293 data);
3294 }
3295 else if (gimple_code (stmt) == GIMPLE_ASM)
3296 {
3297 unsigned noutputs;
3298 const char *constraint;
3299 const char **oconstraints;
3300 bool allows_mem, allows_reg, is_inout;
3301 noutputs = gimple_asm_noutputs (stmt);
3302 oconstraints = XALLOCAVEC (const char *, noutputs);
3303 if (visit_store || visit_addr)
3304 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3305 {
3306 tree link = gimple_asm_output_op (stmt, i);
3307 tree op = get_base_loadstore (TREE_VALUE (link));
3308 if (op && visit_store)
3309 ret |= visit_store (stmt, op, data);
3310 if (visit_addr)
3311 {
3312 constraint = TREE_STRING_POINTER
3313 (TREE_VALUE (TREE_PURPOSE (link)));
3314 oconstraints[i] = constraint;
3315 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3316 &allows_reg, &is_inout);
3317 if (op && !allows_reg && allows_mem)
3318 ret |= visit_addr (stmt, op, data);
3319 }
3320 }
3321 if (visit_load || visit_addr)
3322 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3323 {
3324 tree link = gimple_asm_input_op (stmt, i);
3325 tree op = TREE_VALUE (link);
3326 if (visit_addr
3327 && TREE_CODE (op) == ADDR_EXPR)
3328 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3329 else if (visit_load || visit_addr)
3330 {
3331 op = get_base_loadstore (op);
3332 if (op)
3333 {
3334 if (visit_load)
3335 ret |= visit_load (stmt, op, data);
3336 if (visit_addr)
3337 {
3338 constraint = TREE_STRING_POINTER
3339 (TREE_VALUE (TREE_PURPOSE (link)));
3340 parse_input_constraint (&constraint, 0, 0, noutputs,
3341 0, oconstraints,
3342 &allows_mem, &allows_reg);
3343 if (!allows_reg && allows_mem)
3344 ret |= visit_addr (stmt, op, data);
3345 }
3346 }
3347 }
3348 }
3349 }
3350 else if (gimple_code (stmt) == GIMPLE_RETURN)
3351 {
3352 tree op = gimple_return_retval (stmt);
3353 if (op)
3354 {
3355 if (visit_addr
3356 && TREE_CODE (op) == ADDR_EXPR)
3357 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3358 else if (visit_load)
3359 {
3360 op = get_base_loadstore (op);
3361 if (op)
3362 ret |= visit_load (stmt, op, data);
3363 }
3364 }
3365 }
3366 else if (visit_addr
3367 && gimple_code (stmt) == GIMPLE_PHI)
3368 {
3369 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3370 {
3371 tree op = PHI_ARG_DEF (stmt, i);
3372 if (TREE_CODE (op) == ADDR_EXPR)
3373 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3374 }
3375 }
3376
3377 return ret;
3378}
3379
3380/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3381 should make a faster clone for this case. */
3382
3383bool
3384walk_stmt_load_store_ops (gimple stmt, void *data,
3385 bool (*visit_load)(gimple, tree, void *),
3386 bool (*visit_store)(gimple, tree, void *))
3387{
3388 return walk_stmt_load_store_addr_ops (stmt, data,
3389 visit_load, visit_store, NULL);
3390}
3391
ccacdf06
RG
3392/* Helper for gimple_ior_addresses_taken_1. */
3393
3394static bool
3395gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3396 tree addr, void *data)
3397{
3398 bitmap addresses_taken = (bitmap)data;
3399 while (handled_component_p (addr))
3400 addr = TREE_OPERAND (addr, 0);
3401 if (DECL_P (addr))
3402 {
3403 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3404 return true;
3405 }
3406 return false;
3407}
3408
3409/* Set the bit for the uid of all decls that have their address taken
3410 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3411 were any in this stmt. */
3412
3413bool
3414gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3415{
3416 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3417 gimple_ior_addresses_taken_1);
3418}
3419
726a989a 3420#include "gt-gimple.h"
This page took 0.589183 seconds and 5 git commands to generate.