]> gcc.gnu.org Git - gcc.git/blob - gcc/gimple-expr.c
[multiple changes]
[gcc.git] / gcc / gimple-expr.c
1 /* Gimple decl, type, and expression support functions.
2
3 Copyright (C) 2007-2017 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "stringpool.h"
29 #include "gimple-ssa.h"
30 #include "fold-const.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "stor-layout.h"
34 #include "demangle.h"
35 #include "hash-set.h"
36 #include "rtl.h"
37 #include "tree-pass.h"
38 #include "stringpool.h"
39 #include "attribs.h"
40
41 /* ----- Type related ----- */
42
43 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
44 useless type conversion, otherwise return false.
45
46 This function implicitly defines the middle-end type system. With
47 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
48 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
49 the following invariants shall be fulfilled:
50
51 1) useless_type_conversion_p is transitive.
52 If a < b and b < c then a < c.
53
54 2) useless_type_conversion_p is not symmetric.
55 From a < b does not follow a > b.
56
57 3) Types define the available set of operations applicable to values.
58 A type conversion is useless if the operations for the target type
59 is a subset of the operations for the source type. For example
60 casts to void* are useless, casts from void* are not (void* can't
61 be dereferenced or offsetted, but copied, hence its set of operations
62 is a strict subset of that of all other data pointer types). Casts
63 to const T* are useless (can't be written to), casts from const T*
64 to T* are not. */
65
66 bool
67 useless_type_conversion_p (tree outer_type, tree inner_type)
68 {
69 /* Do the following before stripping toplevel qualifiers. */
70 if (POINTER_TYPE_P (inner_type)
71 && POINTER_TYPE_P (outer_type))
72 {
73 /* Do not lose casts between pointers to different address spaces. */
74 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
75 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
76 return false;
77 /* Do not lose casts to function pointer types. */
78 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
79 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
80 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
81 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
82 return false;
83 }
84
85 /* From now on qualifiers on value types do not matter. */
86 inner_type = TYPE_MAIN_VARIANT (inner_type);
87 outer_type = TYPE_MAIN_VARIANT (outer_type);
88
89 if (inner_type == outer_type)
90 return true;
91
92 /* Changes in machine mode are never useless conversions because the RTL
93 middle-end expects explicit conversions between modes. */
94 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
95 return false;
96
97 /* If both the inner and outer types are integral types, then the
98 conversion is not necessary if they have the same mode and
99 signedness and precision, and both or neither are boolean. */
100 if (INTEGRAL_TYPE_P (inner_type)
101 && INTEGRAL_TYPE_P (outer_type))
102 {
103 /* Preserve changes in signedness or precision. */
104 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
105 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
106 return false;
107
108 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
109 of precision one. */
110 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
111 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
112 && TYPE_PRECISION (outer_type) != 1)
113 return false;
114
115 /* We don't need to preserve changes in the types minimum or
116 maximum value in general as these do not generate code
117 unless the types precisions are different. */
118 return true;
119 }
120
121 /* Scalar floating point types with the same mode are compatible. */
122 else if (SCALAR_FLOAT_TYPE_P (inner_type)
123 && SCALAR_FLOAT_TYPE_P (outer_type))
124 return true;
125
126 /* Fixed point types with the same mode are compatible. */
127 else if (FIXED_POINT_TYPE_P (inner_type)
128 && FIXED_POINT_TYPE_P (outer_type))
129 return TYPE_SATURATING (inner_type) == TYPE_SATURATING (outer_type);
130
131 /* We need to take special care recursing to pointed-to types. */
132 else if (POINTER_TYPE_P (inner_type)
133 && POINTER_TYPE_P (outer_type))
134 {
135 /* We do not care for const qualification of the pointed-to types
136 as const qualification has no semantic value to the middle-end. */
137
138 /* Otherwise pointers/references are equivalent. */
139 return true;
140 }
141
142 /* Recurse for complex types. */
143 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
144 && TREE_CODE (outer_type) == COMPLEX_TYPE)
145 return useless_type_conversion_p (TREE_TYPE (outer_type),
146 TREE_TYPE (inner_type));
147
148 /* Recurse for vector types with the same number of subparts. */
149 else if (TREE_CODE (inner_type) == VECTOR_TYPE
150 && TREE_CODE (outer_type) == VECTOR_TYPE
151 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
152 return useless_type_conversion_p (TREE_TYPE (outer_type),
153 TREE_TYPE (inner_type));
154
155 else if (TREE_CODE (inner_type) == ARRAY_TYPE
156 && TREE_CODE (outer_type) == ARRAY_TYPE)
157 {
158 /* Preserve various attributes. */
159 if (TYPE_REVERSE_STORAGE_ORDER (inner_type)
160 != TYPE_REVERSE_STORAGE_ORDER (outer_type))
161 return false;
162 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
163 return false;
164
165 /* Conversions from array types with unknown extent to
166 array types with known extent are not useless. */
167 if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type))
168 return false;
169
170 /* Nor are conversions from array types with non-constant size to
171 array types with constant size or to different size. */
172 if (TYPE_SIZE (outer_type)
173 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
174 && (!TYPE_SIZE (inner_type)
175 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
176 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
177 TYPE_SIZE (inner_type))))
178 return false;
179
180 /* Check conversions between arrays with partially known extents.
181 If the array min/max values are constant they have to match.
182 Otherwise allow conversions to unknown and variable extents.
183 In particular this declares conversions that may change the
184 mode to BLKmode as useless. */
185 if (TYPE_DOMAIN (inner_type)
186 && TYPE_DOMAIN (outer_type)
187 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
188 {
189 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
190 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
191 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
192 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
193
194 /* After gimplification a variable min/max value carries no
195 additional information compared to a NULL value. All that
196 matters has been lowered to be part of the IL. */
197 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
198 inner_min = NULL_TREE;
199 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
200 outer_min = NULL_TREE;
201 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
202 inner_max = NULL_TREE;
203 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
204 outer_max = NULL_TREE;
205
206 /* Conversions NULL / variable <- cst are useless, but not
207 the other way around. */
208 if (outer_min
209 && (!inner_min
210 || !tree_int_cst_equal (inner_min, outer_min)))
211 return false;
212 if (outer_max
213 && (!inner_max
214 || !tree_int_cst_equal (inner_max, outer_max)))
215 return false;
216 }
217
218 /* Recurse on the element check. */
219 return useless_type_conversion_p (TREE_TYPE (outer_type),
220 TREE_TYPE (inner_type));
221 }
222
223 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
224 || TREE_CODE (inner_type) == METHOD_TYPE)
225 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
226 {
227 tree outer_parm, inner_parm;
228
229 /* If the return types are not compatible bail out. */
230 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
231 TREE_TYPE (inner_type)))
232 return false;
233
234 /* Method types should belong to a compatible base class. */
235 if (TREE_CODE (inner_type) == METHOD_TYPE
236 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
237 TYPE_METHOD_BASETYPE (inner_type)))
238 return false;
239
240 /* A conversion to an unprototyped argument list is ok. */
241 if (!prototype_p (outer_type))
242 return true;
243
244 /* If the unqualified argument types are compatible the conversion
245 is useless. */
246 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
247 return true;
248
249 for (outer_parm = TYPE_ARG_TYPES (outer_type),
250 inner_parm = TYPE_ARG_TYPES (inner_type);
251 outer_parm && inner_parm;
252 outer_parm = TREE_CHAIN (outer_parm),
253 inner_parm = TREE_CHAIN (inner_parm))
254 if (!useless_type_conversion_p
255 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
256 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
257 return false;
258
259 /* If there is a mismatch in the number of arguments the functions
260 are not compatible. */
261 if (outer_parm || inner_parm)
262 return false;
263
264 /* Defer to the target if necessary. */
265 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
266 return comp_type_attributes (outer_type, inner_type) != 0;
267
268 return true;
269 }
270
271 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
272 explicit conversions for types involving to be structurally
273 compared types. */
274 else if (AGGREGATE_TYPE_P (inner_type)
275 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
276 return TYPE_CANONICAL (inner_type)
277 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type);
278
279 else if (TREE_CODE (inner_type) == OFFSET_TYPE
280 && TREE_CODE (outer_type) == OFFSET_TYPE)
281 return useless_type_conversion_p (TREE_TYPE (outer_type),
282 TREE_TYPE (inner_type))
283 && useless_type_conversion_p
284 (TYPE_OFFSET_BASETYPE (outer_type),
285 TYPE_OFFSET_BASETYPE (inner_type));
286
287 return false;
288 }
289
290
291 /* ----- Decl related ----- */
292
293 /* Set sequence SEQ to be the GIMPLE body for function FN. */
294
295 void
296 gimple_set_body (tree fndecl, gimple_seq seq)
297 {
298 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
299 if (fn == NULL)
300 {
301 /* If FNDECL still does not have a function structure associated
302 with it, then it does not make sense for it to receive a
303 GIMPLE body. */
304 gcc_assert (seq == NULL);
305 }
306 else
307 fn->gimple_body = seq;
308 }
309
310
311 /* Return the body of GIMPLE statements for function FN. After the
312 CFG pass, the function body doesn't exist anymore because it has
313 been split up into basic blocks. In this case, it returns
314 NULL. */
315
316 gimple_seq
317 gimple_body (tree fndecl)
318 {
319 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
320 return fn ? fn->gimple_body : NULL;
321 }
322
323 /* Return true when FNDECL has Gimple body either in unlowered
324 or CFG form. */
325 bool
326 gimple_has_body_p (tree fndecl)
327 {
328 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
329 return (gimple_body (fndecl) || (fn && fn->cfg && !(fn->curr_properties & PROP_rtl)));
330 }
331
332 /* Return a printable name for symbol DECL. */
333
334 const char *
335 gimple_decl_printable_name (tree decl, int verbosity)
336 {
337 if (!DECL_NAME (decl))
338 return NULL;
339
340 if (HAS_DECL_ASSEMBLER_NAME_P (decl) && DECL_ASSEMBLER_NAME_SET_P (decl))
341 {
342 int dmgl_opts = DMGL_NO_OPTS;
343
344 if (verbosity >= 2)
345 {
346 dmgl_opts = DMGL_VERBOSE
347 | DMGL_ANSI
348 | DMGL_GNU_V3
349 | DMGL_RET_POSTFIX;
350 if (TREE_CODE (decl) == FUNCTION_DECL)
351 dmgl_opts |= DMGL_PARAMS;
352 }
353
354 const char *mangled_str
355 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
356 const char *str = cplus_demangle_v3 (mangled_str, dmgl_opts);
357 return str ? str : mangled_str;
358 }
359
360 return IDENTIFIER_POINTER (DECL_NAME (decl));
361 }
362
363
364 /* Create a new VAR_DECL and copy information from VAR to it. */
365
366 tree
367 copy_var_decl (tree var, tree name, tree type)
368 {
369 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
370
371 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
372 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
373 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
374 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
375 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
376 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
377 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
378 TREE_USED (copy) = 1;
379 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
380 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
381 if (DECL_USER_ALIGN (var))
382 {
383 SET_DECL_ALIGN (copy, DECL_ALIGN (var));
384 DECL_USER_ALIGN (copy) = 1;
385 }
386
387 return copy;
388 }
389
390 /* Strip off a legitimate source ending from the input string NAME of
391 length LEN. Rather than having to know the names used by all of
392 our front ends, we strip off an ending of a period followed by
393 up to four characters. (like ".cpp".) */
394
395 static inline void
396 remove_suffix (char *name, int len)
397 {
398 int i;
399
400 for (i = 2; i < 7 && len > i; i++)
401 {
402 if (name[len - i] == '.')
403 {
404 name[len - i] = '\0';
405 break;
406 }
407 }
408 }
409
410 /* Create a new temporary name with PREFIX. Return an identifier. */
411
412 static GTY(()) unsigned int tmp_var_id_num;
413
414 tree
415 create_tmp_var_name (const char *prefix)
416 {
417 char *tmp_name;
418
419 if (prefix)
420 {
421 char *preftmp = ASTRDUP (prefix);
422
423 remove_suffix (preftmp, strlen (preftmp));
424 clean_symbol_name (preftmp);
425
426 prefix = preftmp;
427 }
428
429 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
430 return get_identifier (tmp_name);
431 }
432
433 /* Create a new temporary variable declaration of type TYPE.
434 Do NOT push it into the current binding. */
435
436 tree
437 create_tmp_var_raw (tree type, const char *prefix)
438 {
439 tree tmp_var;
440
441 tmp_var = build_decl (input_location,
442 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
443 type);
444
445 /* The variable was declared by the compiler. */
446 DECL_ARTIFICIAL (tmp_var) = 1;
447 /* And we don't want debug info for it. */
448 DECL_IGNORED_P (tmp_var) = 1;
449
450 /* Make the variable writable. */
451 TREE_READONLY (tmp_var) = 0;
452
453 DECL_EXTERNAL (tmp_var) = 0;
454 TREE_STATIC (tmp_var) = 0;
455 TREE_USED (tmp_var) = 1;
456
457 return tmp_var;
458 }
459
460 /* Create a new temporary variable declaration of type TYPE. DO push the
461 variable into the current binding. Further, assume that this is called
462 only from gimplification or optimization, at which point the creation of
463 certain types are bugs. */
464
465 tree
466 create_tmp_var (tree type, const char *prefix)
467 {
468 tree tmp_var;
469
470 /* We don't allow types that are addressable (meaning we can't make copies),
471 or incomplete. We also used to reject every variable size objects here,
472 but now support those for which a constant upper bound can be obtained.
473 The processing for variable sizes is performed in gimple_add_tmp_var,
474 point at which it really matters and possibly reached via paths not going
475 through this function, e.g. after direct calls to create_tmp_var_raw. */
476 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
477
478 tmp_var = create_tmp_var_raw (type, prefix);
479 gimple_add_tmp_var (tmp_var);
480 return tmp_var;
481 }
482
483 /* Create a new temporary variable declaration of type TYPE by calling
484 create_tmp_var and if TYPE is a vector or a complex number, mark the new
485 temporary as gimple register. */
486
487 tree
488 create_tmp_reg (tree type, const char *prefix)
489 {
490 tree tmp;
491
492 tmp = create_tmp_var (type, prefix);
493 if (TREE_CODE (type) == COMPLEX_TYPE
494 || TREE_CODE (type) == VECTOR_TYPE)
495 DECL_GIMPLE_REG_P (tmp) = 1;
496
497 return tmp;
498 }
499
500 /* Create a new temporary variable declaration of type TYPE by calling
501 create_tmp_var and if TYPE is a vector or a complex number, mark the new
502 temporary as gimple register. */
503
504 tree
505 create_tmp_reg_fn (struct function *fn, tree type, const char *prefix)
506 {
507 tree tmp;
508
509 tmp = create_tmp_var_raw (type, prefix);
510 gimple_add_tmp_var_fn (fn, tmp);
511 if (TREE_CODE (type) == COMPLEX_TYPE
512 || TREE_CODE (type) == VECTOR_TYPE)
513 DECL_GIMPLE_REG_P (tmp) = 1;
514
515 return tmp;
516 }
517
518
519 /* ----- Expression related ----- */
520
521 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
522 *OP1_P, *OP2_P and *OP3_P respectively. */
523
524 void
525 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
526 tree *op2_p, tree *op3_p)
527 {
528 enum gimple_rhs_class grhs_class;
529
530 *subcode_p = TREE_CODE (expr);
531 grhs_class = get_gimple_rhs_class (*subcode_p);
532
533 if (grhs_class == GIMPLE_TERNARY_RHS)
534 {
535 *op1_p = TREE_OPERAND (expr, 0);
536 *op2_p = TREE_OPERAND (expr, 1);
537 *op3_p = TREE_OPERAND (expr, 2);
538 }
539 else if (grhs_class == GIMPLE_BINARY_RHS)
540 {
541 *op1_p = TREE_OPERAND (expr, 0);
542 *op2_p = TREE_OPERAND (expr, 1);
543 *op3_p = NULL_TREE;
544 }
545 else if (grhs_class == GIMPLE_UNARY_RHS)
546 {
547 *op1_p = TREE_OPERAND (expr, 0);
548 *op2_p = NULL_TREE;
549 *op3_p = NULL_TREE;
550 }
551 else if (grhs_class == GIMPLE_SINGLE_RHS)
552 {
553 *op1_p = expr;
554 *op2_p = NULL_TREE;
555 *op3_p = NULL_TREE;
556 }
557 else
558 gcc_unreachable ();
559 }
560
561 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
562
563 void
564 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
565 tree *lhs_p, tree *rhs_p)
566 {
567 gcc_assert (COMPARISON_CLASS_P (cond)
568 || TREE_CODE (cond) == TRUTH_NOT_EXPR
569 || is_gimple_min_invariant (cond)
570 || SSA_VAR_P (cond));
571
572 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
573
574 /* Canonicalize conditionals of the form 'if (!VAL)'. */
575 if (*code_p == TRUTH_NOT_EXPR)
576 {
577 *code_p = EQ_EXPR;
578 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
579 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
580 }
581 /* Canonicalize conditionals of the form 'if (VAL)' */
582 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
583 {
584 *code_p = NE_EXPR;
585 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
586 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
587 }
588 }
589
590 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
591
592 bool
593 is_gimple_lvalue (tree t)
594 {
595 return (is_gimple_addressable (t)
596 || TREE_CODE (t) == WITH_SIZE_EXPR
597 /* These are complex lvalues, but don't have addresses, so they
598 go here. */
599 || TREE_CODE (t) == BIT_FIELD_REF);
600 }
601
602 /* Return true if T is a GIMPLE condition. */
603
604 bool
605 is_gimple_condexpr (tree t)
606 {
607 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
608 && !tree_could_throw_p (t)
609 && is_gimple_val (TREE_OPERAND (t, 0))
610 && is_gimple_val (TREE_OPERAND (t, 1))));
611 }
612
613 /* Return true if T is a gimple address. */
614
615 bool
616 is_gimple_address (const_tree t)
617 {
618 tree op;
619
620 if (TREE_CODE (t) != ADDR_EXPR)
621 return false;
622
623 op = TREE_OPERAND (t, 0);
624 while (handled_component_p (op))
625 {
626 if ((TREE_CODE (op) == ARRAY_REF
627 || TREE_CODE (op) == ARRAY_RANGE_REF)
628 && !is_gimple_val (TREE_OPERAND (op, 1)))
629 return false;
630
631 op = TREE_OPERAND (op, 0);
632 }
633
634 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
635 return true;
636
637 switch (TREE_CODE (op))
638 {
639 case PARM_DECL:
640 case RESULT_DECL:
641 case LABEL_DECL:
642 case FUNCTION_DECL:
643 case VAR_DECL:
644 case CONST_DECL:
645 return true;
646
647 default:
648 return false;
649 }
650 }
651
652 /* Return true if T is a gimple invariant address. */
653
654 bool
655 is_gimple_invariant_address (const_tree t)
656 {
657 const_tree op;
658
659 if (TREE_CODE (t) != ADDR_EXPR)
660 return false;
661
662 op = strip_invariant_refs (TREE_OPERAND (t, 0));
663 if (!op)
664 return false;
665
666 if (TREE_CODE (op) == MEM_REF)
667 {
668 const_tree op0 = TREE_OPERAND (op, 0);
669 return (TREE_CODE (op0) == ADDR_EXPR
670 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
671 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
672 }
673
674 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
675 }
676
677 /* Return true if T is a gimple invariant address at IPA level
678 (so addresses of variables on stack are not allowed). */
679
680 bool
681 is_gimple_ip_invariant_address (const_tree t)
682 {
683 const_tree op;
684
685 if (TREE_CODE (t) != ADDR_EXPR)
686 return false;
687
688 op = strip_invariant_refs (TREE_OPERAND (t, 0));
689 if (!op)
690 return false;
691
692 if (TREE_CODE (op) == MEM_REF)
693 {
694 const_tree op0 = TREE_OPERAND (op, 0);
695 return (TREE_CODE (op0) == ADDR_EXPR
696 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
697 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
698 }
699
700 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
701 }
702
703 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
704 form of function invariant. */
705
706 bool
707 is_gimple_min_invariant (const_tree t)
708 {
709 if (TREE_CODE (t) == ADDR_EXPR)
710 return is_gimple_invariant_address (t);
711
712 return is_gimple_constant (t);
713 }
714
715 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
716 form of gimple minimal invariant. */
717
718 bool
719 is_gimple_ip_invariant (const_tree t)
720 {
721 if (TREE_CODE (t) == ADDR_EXPR)
722 return is_gimple_ip_invariant_address (t);
723
724 return is_gimple_constant (t);
725 }
726
727 /* Return true if T is a non-aggregate register variable. */
728
729 bool
730 is_gimple_reg (tree t)
731 {
732 if (virtual_operand_p (t))
733 return false;
734
735 if (TREE_CODE (t) == SSA_NAME)
736 return true;
737
738 if (!is_gimple_variable (t))
739 return false;
740
741 if (!is_gimple_reg_type (TREE_TYPE (t)))
742 return false;
743
744 /* A volatile decl is not acceptable because we can't reuse it as
745 needed. We need to copy it into a temp first. */
746 if (TREE_THIS_VOLATILE (t))
747 return false;
748
749 /* We define "registers" as things that can be renamed as needed,
750 which with our infrastructure does not apply to memory. */
751 if (needs_to_live_in_memory (t))
752 return false;
753
754 /* Hard register variables are an interesting case. For those that
755 are call-clobbered, we don't know where all the calls are, since
756 we don't (want to) take into account which operations will turn
757 into libcalls at the rtl level. For those that are call-saved,
758 we don't currently model the fact that calls may in fact change
759 global hard registers, nor do we examine ASM_CLOBBERS at the tree
760 level, and so miss variable changes that might imply. All around,
761 it seems safest to not do too much optimization with these at the
762 tree level at all. We'll have to rely on the rtl optimizers to
763 clean this up, as there we've got all the appropriate bits exposed. */
764 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
765 return false;
766
767 /* Complex and vector values must have been put into SSA-like form.
768 That is, no assignments to the individual components. */
769 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
770 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
771 return DECL_GIMPLE_REG_P (t);
772
773 return true;
774 }
775
776
777 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
778
779 bool
780 is_gimple_val (tree t)
781 {
782 /* Make loads from volatiles and memory vars explicit. */
783 if (is_gimple_variable (t)
784 && is_gimple_reg_type (TREE_TYPE (t))
785 && !is_gimple_reg (t))
786 return false;
787
788 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
789 }
790
791 /* Similarly, but accept hard registers as inputs to asm statements. */
792
793 bool
794 is_gimple_asm_val (tree t)
795 {
796 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
797 return true;
798
799 return is_gimple_val (t);
800 }
801
802 /* Return true if T is a GIMPLE minimal lvalue. */
803
804 bool
805 is_gimple_min_lval (tree t)
806 {
807 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
808 return false;
809 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
810 }
811
812 /* Return true if T is a valid function operand of a CALL_EXPR. */
813
814 bool
815 is_gimple_call_addr (tree t)
816 {
817 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
818 }
819
820 /* Return true if T is a valid address operand of a MEM_REF. */
821
822 bool
823 is_gimple_mem_ref_addr (tree t)
824 {
825 return (is_gimple_reg (t)
826 || TREE_CODE (t) == INTEGER_CST
827 || (TREE_CODE (t) == ADDR_EXPR
828 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
829 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
830 }
831
832 /* Hold trees marked addressable during expand. */
833
834 static hash_set<tree> *mark_addressable_queue;
835
836 /* Mark X as addressable or queue it up if called during expand. We
837 don't want to apply it immediately during expand because decls are
838 made addressable at that point due to RTL-only concerns, such as
839 uses of memcpy for block moves, and TREE_ADDRESSABLE changes
840 is_gimple_reg, which might make it seem like a variable that used
841 to be a gimple_reg shouldn't have been an SSA name. So we queue up
842 this flag setting and only apply it when we're done with GIMPLE and
843 only RTL issues matter. */
844
845 static void
846 mark_addressable_1 (tree x)
847 {
848 if (!currently_expanding_to_rtl)
849 {
850 TREE_ADDRESSABLE (x) = 1;
851 return;
852 }
853
854 if (!mark_addressable_queue)
855 mark_addressable_queue = new hash_set<tree>();
856 mark_addressable_queue->add (x);
857 }
858
859 /* Adaptor for mark_addressable_1 for use in hash_set traversal. */
860
861 bool
862 mark_addressable_2 (tree const &x, void * ATTRIBUTE_UNUSED = NULL)
863 {
864 mark_addressable_1 (x);
865 return false;
866 }
867
868 /* Mark all queued trees as addressable, and empty the queue. To be
869 called right after clearing CURRENTLY_EXPANDING_TO_RTL. */
870
871 void
872 flush_mark_addressable_queue ()
873 {
874 gcc_assert (!currently_expanding_to_rtl);
875 if (mark_addressable_queue)
876 {
877 mark_addressable_queue->traverse<void*, mark_addressable_2> (NULL);
878 delete mark_addressable_queue;
879 mark_addressable_queue = NULL;
880 }
881 }
882
883 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
884 form and we don't do any syntax checking. */
885
886 void
887 mark_addressable (tree x)
888 {
889 while (handled_component_p (x))
890 x = TREE_OPERAND (x, 0);
891 if (TREE_CODE (x) == MEM_REF
892 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
893 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
894 if (!VAR_P (x)
895 && TREE_CODE (x) != PARM_DECL
896 && TREE_CODE (x) != RESULT_DECL)
897 return;
898 mark_addressable_1 (x);
899
900 /* Also mark the artificial SSA_NAME that points to the partition of X. */
901 if (TREE_CODE (x) == VAR_DECL
902 && !DECL_EXTERNAL (x)
903 && !TREE_STATIC (x)
904 && cfun->gimple_df != NULL
905 && cfun->gimple_df->decls_to_pointers != NULL)
906 {
907 tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
908 if (namep)
909 mark_addressable_1 (*namep);
910 }
911 }
912
913 /* Returns true iff T is a valid RHS for an assignment to a renamed
914 user -- or front-end generated artificial -- variable. */
915
916 bool
917 is_gimple_reg_rhs (tree t)
918 {
919 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
920 }
921
922 #include "gt-gimple-expr.h"
This page took 0.075253 seconds and 5 git commands to generate.