]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils2.c
Remove superfluous call to Base_Type
[gcc.git] / gcc / ada / gcc-interface / utils2.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
925b418e 9 * Copyright (C) 1992-2020, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
4d0cdd0c 29#include "memmodel.h"
a1ab4c31 30#include "tm.h"
40e23961 31#include "vec.h"
40e23961 32#include "alias.h"
a1ab4c31 33#include "tree.h"
c7131fb2 34#include "inchash.h"
65ba91b7 35#include "builtins.h"
40e23961 36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "stringpool.h"
39#include "varasm.h"
f2be3ce4 40#include "flags.h"
033ba5bf 41#include "toplev.h"
a1ab4c31 42#include "ggc.h"
f82a627c 43#include "tree-inline.h"
e6e15ec9 44
a1ab4c31
AC
45#include "ada.h"
46#include "types.h"
47#include "atree.h"
8713b7e4 48#include "elists.h"
a1ab4c31 49#include "namet.h"
8713b7e4
EB
50#include "nlists.h"
51#include "snames.h"
52#include "stringt.h"
a1ab4c31
AC
53#include "uintp.h"
54#include "fe.h"
a1ab4c31
AC
55#include "sinfo.h"
56#include "einfo.h"
57#include "ada-tree.h"
58#include "gigi.h"
31fcb30f 59
a1ab4c31
AC
60/* Return the base type of TYPE. */
61
62tree
63get_base_type (tree type)
64{
65 if (TREE_CODE (type) == RECORD_TYPE
66 && TYPE_JUSTIFIED_MODULAR_P (type))
67 type = TREE_TYPE (TYPE_FIELDS (type));
68
69 while (TREE_TYPE (type)
70 && (TREE_CODE (type) == INTEGER_TYPE
71 || TREE_CODE (type) == REAL_TYPE))
72 type = TREE_TYPE (type);
73
74 return type;
75}
ce2d0ce2 76
73a1a803
EB
77/* EXP is a GCC tree representing an address. See if we can find how strictly
78 the object at this address is aligned and, if so, return the alignment of
79 the object in bits. Otherwise return 0. */
a1ab4c31
AC
80
81unsigned int
82known_alignment (tree exp)
83{
84 unsigned int this_alignment;
85 unsigned int lhs, rhs;
86
87 switch (TREE_CODE (exp))
88 {
89 CASE_CONVERT:
90 case VIEW_CONVERT_EXPR:
91 case NON_LVALUE_EXPR:
92 /* Conversions between pointers and integers don't change the alignment
93 of the underlying object. */
94 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
95 break;
96
97 case COMPOUND_EXPR:
73a1a803 98 /* The value of a COMPOUND_EXPR is that of its second operand. */
a1ab4c31
AC
99 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
100 break;
101
102 case PLUS_EXPR:
103 case MINUS_EXPR:
73a1a803 104 /* If two addresses are added, the alignment of the result is the
a1ab4c31
AC
105 minimum of the two alignments. */
106 lhs = known_alignment (TREE_OPERAND (exp, 0));
107 rhs = known_alignment (TREE_OPERAND (exp, 1));
108 this_alignment = MIN (lhs, rhs);
109 break;
110
111 case POINTER_PLUS_EXPR:
73a1a803
EB
112 /* If this is the pattern built for aligning types, decode it. */
113 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
114 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
115 {
116 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
117 return
118 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
119 }
120
a1ab4c31
AC
121 /* If we don't know the alignment of the offset, we assume that
122 of the base. */
73a1a803
EB
123 lhs = known_alignment (TREE_OPERAND (exp, 0));
124 rhs = known_alignment (TREE_OPERAND (exp, 1));
125
a1ab4c31
AC
126 if (rhs == 0)
127 this_alignment = lhs;
128 else
129 this_alignment = MIN (lhs, rhs);
130 break;
131
132 case COND_EXPR:
73a1a803 133 /* If there is a choice between two values, use the smaller one. */
a1ab4c31
AC
134 lhs = known_alignment (TREE_OPERAND (exp, 1));
135 rhs = known_alignment (TREE_OPERAND (exp, 2));
136 this_alignment = MIN (lhs, rhs);
137 break;
138
139 case INTEGER_CST:
140 {
141 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
142 /* The first part of this represents the lowest bit in the constant,
143 but it is originally in bytes, not bits. */
73a1a803 144 this_alignment = (c & -c) * BITS_PER_UNIT;
a1ab4c31
AC
145 }
146 break;
147
148 case MULT_EXPR:
149 /* If we know the alignment of just one side, use it. Otherwise,
150 use the product of the alignments. */
151 lhs = known_alignment (TREE_OPERAND (exp, 0));
152 rhs = known_alignment (TREE_OPERAND (exp, 1));
153
154 if (lhs == 0)
155 this_alignment = rhs;
156 else if (rhs == 0)
157 this_alignment = lhs;
158 else
159 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
160 break;
161
162 case BIT_AND_EXPR:
163 /* A bit-and expression is as aligned as the maximum alignment of the
164 operands. We typically get here for a complex lhs and a constant
165 negative power of two on the rhs to force an explicit alignment, so
166 don't bother looking at the lhs. */
167 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
168 break;
169
170 case ADDR_EXPR:
65ba91b7
EB
171 if (DECL_P (TREE_OPERAND (exp, 0)))
172 this_alignment = DECL_ALIGN (TREE_OPERAND (exp, 0));
173 else
174 this_alignment = get_object_alignment (TREE_OPERAND (exp, 0));
a1ab4c31
AC
175 break;
176
f82a627c
EB
177 case CALL_EXPR:
178 {
9182f718
EB
179 tree fndecl = get_callee_fndecl (exp);
180 if (fndecl == malloc_decl || fndecl == realloc_decl)
0317e5a2
EB
181 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
182
f82a627c
EB
183 tree t = maybe_inline_call_in_expr (exp);
184 if (t)
185 return known_alignment (t);
186 }
187
9c453de7 188 /* ... fall through ... */
f82a627c 189
a1ab4c31
AC
190 default:
191 /* For other pointer expressions, we assume that the pointed-to object
192 is at least as aligned as the pointed-to type. Beware that we can
193 have a dummy type here (e.g. a Taft Amendment type), for which the
194 alignment is meaningless and should be ignored. */
195 if (POINTER_TYPE_P (TREE_TYPE (exp))
9182f718
EB
196 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
197 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
a1ab4c31
AC
198 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
199 else
200 this_alignment = 0;
201 break;
202 }
203
204 return this_alignment;
205}
ce2d0ce2 206
a1ab4c31
AC
207/* We have a comparison or assignment operation on two types, T1 and T2, which
208 are either both array types or both record types. T1 is assumed to be for
209 the left hand side operand, and T2 for the right hand side. Return the
210 type that both operands should be converted to for the operation, if any.
211 Otherwise return zero. */
212
213static tree
214find_common_type (tree t1, tree t2)
215{
e3c4580e 216 /* ??? As of today, various constructs lead to here with types of different
a1ab4c31
AC
217 sizes even when both constants (e.g. tagged types, packable vs regular
218 component types, padded vs unpadded types, ...). While some of these
219 would better be handled upstream (types should be made consistent before
220 calling into build_binary_op), some others are really expected and we
221 have to be careful. */
222
036a2fa2
EB
223 const bool variable_record_on_lhs
224 = (TREE_CODE (t1) == RECORD_TYPE
225 && TREE_CODE (t2) == RECORD_TYPE
226 && get_variant_part (t1)
227 && !get_variant_part (t2));
228
229 const bool variable_array_on_lhs
230 = (TREE_CODE (t1) == ARRAY_TYPE
231 && TREE_CODE (t2) == ARRAY_TYPE
232 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
233 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
234
805e60a0 235 /* We must avoid writing more than what the target can hold if this is for
a1ab4c31 236 an assignment and the case of tagged types is handled in build_binary_op
805e60a0
EB
237 so we use the lhs type if it is known to be smaller or of constant size
238 and the rhs type is not, whatever the modes. We also force t1 in case of
a1ab4c31 239 constant size equality to minimize occurrences of view conversions on the
036a2fa2
EB
240 lhs of an assignment, except for the case of types with a variable part
241 on the lhs but not on the rhs to make the conversion simpler. */
a1ab4c31
AC
242 if (TREE_CONSTANT (TYPE_SIZE (t1))
243 && (!TREE_CONSTANT (TYPE_SIZE (t2))
805e60a0
EB
244 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
245 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
036a2fa2
EB
246 && !variable_record_on_lhs
247 && !variable_array_on_lhs)))
a1ab4c31
AC
248 return t1;
249
036a2fa2
EB
250 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
251 a non-BLKmode rhs and array types with a variable part on the lhs but not
252 on the rhs to make sure the conversion is preserved during gimplification.
253 Note that we know that we will not have any alignment problems since, if
254 we did, the non-BLKmode type could not have been used. */
255 if (TYPE_MODE (t1) != BLKmode
256 && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
a1ab4c31
AC
257 return t1;
258
259 /* If the rhs type is of constant size, use it whatever the modes. At
260 this point it is known to be smaller, or of constant size and the
261 lhs type is not. */
262 if (TREE_CONSTANT (TYPE_SIZE (t2)))
263 return t2;
264
265 /* Otherwise, if the rhs type is non-BLKmode, use it. */
266 if (TYPE_MODE (t2) != BLKmode)
267 return t2;
268
269 /* In this case, both types have variable size and BLKmode. It's
270 probably best to leave the "type mismatch" because changing it
271 could cause a bad self-referential reference. */
272 return NULL_TREE;
273}
ce2d0ce2 274
ced57283
EB
275/* Return an expression tree representing an equality comparison of A1 and A2,
276 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
a1ab4c31 277
ced57283
EB
278 Two arrays are equal in one of two ways: (1) if both have zero length in
279 some dimension (not necessarily the same dimension) or (2) if the lengths
280 in each dimension are equal and the data is equal. We perform the length
281 tests in as efficient a manner as possible. */
a1ab4c31
AC
282
283static tree
6532e8a0 284compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
a1ab4c31 285{
1139f2e8
EB
286 tree result = convert (result_type, boolean_true_node);
287 tree a1_is_null = convert (result_type, boolean_false_node);
288 tree a2_is_null = convert (result_type, boolean_false_node);
a1ab4c31
AC
289 tree t1 = TREE_TYPE (a1);
290 tree t2 = TREE_TYPE (a2);
ced57283
EB
291 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
292 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
a1ab4c31
AC
293 bool length_zero_p = false;
294
881cdd61
EB
295 /* If the operands have side-effects, they need to be evaluated only once
296 in spite of the multiple references in the comparison. */
ced57283 297 if (a1_side_effects_p)
7d7a1fe8 298 a1 = gnat_protect_expr (a1);
ced57283
EB
299
300 if (a2_side_effects_p)
7d7a1fe8 301 a2 = gnat_protect_expr (a2);
ced57283 302
a1ab4c31 303 /* Process each dimension separately and compare the lengths. If any
1081f5a7
EB
304 dimension has a length known to be zero, set LENGTH_ZERO_P to true
305 in order to suppress the comparison of the data at the end. */
a1ab4c31
AC
306 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
307 {
683ccd05
EB
308 tree dom1 = TYPE_DOMAIN (t1);
309 tree dom2 = TYPE_DOMAIN (t2);
310 tree length1 = size_binop (PLUS_EXPR,
311 size_binop (MINUS_EXPR,
312 TYPE_MAX_VALUE (dom1),
313 TYPE_MIN_VALUE (dom1)),
1081f5a7 314 size_one_node);
683ccd05
EB
315 tree length2 = size_binop (PLUS_EXPR,
316 size_binop (MINUS_EXPR,
317 TYPE_MAX_VALUE (dom2),
318 TYPE_MIN_VALUE (dom2)),
1081f5a7 319 size_one_node);
683ccd05
EB
320 tree ind1 = TYPE_INDEX_TYPE (dom1);
321 tree ind2 = TYPE_INDEX_TYPE (dom2);
322 tree base_type = maybe_character_type (get_base_type (ind1));
323 tree lb1 = convert (base_type, TYPE_MIN_VALUE (ind1));
324 tree ub1 = convert (base_type, TYPE_MAX_VALUE (ind1));
325 tree lb2 = convert (base_type, TYPE_MIN_VALUE (ind2));
326 tree ub2 = convert (base_type, TYPE_MAX_VALUE (ind2));
a1ab4c31
AC
327 tree comparison, this_a1_is_null, this_a2_is_null;
328
683ccd05
EB
329 /* If the length of the first array is a constant and that of the second
330 array is not, swap our operands to have the constant second. */
331 if (TREE_CODE (length1) == INTEGER_CST
332 && TREE_CODE (length2) != INTEGER_CST)
a1ab4c31 333 {
1081f5a7
EB
334 tree tem;
335 bool btem;
336
a1ab4c31
AC
337 tem = a1, a1 = a2, a2 = tem;
338 tem = t1, t1 = t2, t2 = tem;
339 tem = lb1, lb1 = lb2, lb2 = tem;
340 tem = ub1, ub1 = ub2, ub2 = tem;
341 tem = length1, length1 = length2, length2 = tem;
342 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
ced57283
EB
343 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
344 a2_side_effects_p = btem;
a1ab4c31
AC
345 }
346
1081f5a7
EB
347 /* If the length of the second array is the constant zero, we can just
348 use the original stored bounds for the first array and see whether
349 last < first holds. */
350 if (integer_zerop (length2))
a1ab4c31 351 {
1081f5a7 352 length_zero_p = true;
a1ab4c31 353
683ccd05
EB
354 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
355 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
1081f5a7 356
6532e8a0 357 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
1fc24649 358 if (EXPR_P (comparison))
6532e8a0 359 SET_EXPR_LOCATION (comparison, loc);
1fc24649 360
a1ab4c31 361 this_a1_is_null = comparison;
1139f2e8 362 this_a2_is_null = convert (result_type, boolean_true_node);
a1ab4c31
AC
363 }
364
1081f5a7
EB
365 /* Otherwise, if the length is some other constant value, we know that
366 this dimension in the second array cannot be superflat, so we can
367 just use its length computed from the actual stored bounds. */
a1ab4c31
AC
368 else if (TREE_CODE (length2) == INTEGER_CST)
369 {
683ccd05 370 /* Note that we know that LB2 and UB2 are constant and hence
1fc24649 371 cannot contain a PLACEHOLDER_EXPR. */
683ccd05
EB
372 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
373 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
a1ab4c31
AC
374
375 comparison
6532e8a0 376 = fold_build2_loc (loc, EQ_EXPR, result_type,
683ccd05
EB
377 build_binary_op (MINUS_EXPR, base_type,
378 ub1, lb1),
379 build_binary_op (MINUS_EXPR, base_type,
380 ub2, lb2));
1fc24649 381 if (EXPR_P (comparison))
6532e8a0 382 SET_EXPR_LOCATION (comparison, loc);
1fc24649 383
6532e8a0
EB
384 this_a1_is_null
385 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
1081f5a7 386
1139f2e8 387 this_a2_is_null = convert (result_type, boolean_false_node);
a1ab4c31
AC
388 }
389
1081f5a7 390 /* Otherwise, compare the computed lengths. */
a1ab4c31
AC
391 else
392 {
393 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
394 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
395
396 comparison
6532e8a0 397 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
683ccd05
EB
398 if (EXPR_P (comparison))
399 SET_EXPR_LOCATION (comparison, loc);
a1ab4c31 400
683ccd05
EB
401 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
402 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
403
404 this_a1_is_null
405 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
406
407 lb2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb2, a2);
408 ub2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub2, a2);
409
410 this_a2_is_null
411 = fold_build2_loc (loc, LT_EXPR, result_type, ub2, lb2);
a1ab4c31
AC
412 }
413
1081f5a7 414 /* Append expressions for this dimension to the final expressions. */
a1ab4c31
AC
415 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
416 result, comparison);
417
418 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
419 this_a1_is_null, a1_is_null);
1081f5a7 420
a1ab4c31
AC
421 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
422 this_a2_is_null, a2_is_null);
423
424 t1 = TREE_TYPE (t1);
425 t2 = TREE_TYPE (t2);
426 }
427
1081f5a7 428 /* Unless the length of some dimension is known to be zero, compare the
a1ab4c31
AC
429 data in the array. */
430 if (!length_zero_p)
431 {
432 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
1fc24649 433 tree comparison;
a1ab4c31
AC
434
435 if (type)
ced57283
EB
436 {
437 a1 = convert (type, a1),
438 a2 = convert (type, a2);
439 }
a1ab4c31 440
6532e8a0 441 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
1fc24649
EB
442
443 result
444 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
a1ab4c31
AC
445 }
446
447 /* The result is also true if both sizes are zero. */
448 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
449 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
450 a1_is_null, a2_is_null),
451 result);
452
881cdd61
EB
453 /* If the operands have side-effects, they need to be evaluated before
454 doing the tests above since the place they otherwise would end up
455 being evaluated at run time could be wrong. */
ced57283 456 if (a1_side_effects_p)
a1ab4c31
AC
457 result = build2 (COMPOUND_EXPR, result_type, a1, result);
458
ced57283 459 if (a2_side_effects_p)
a1ab4c31
AC
460 result = build2 (COMPOUND_EXPR, result_type, a2, result);
461
462 return result;
463}
50179d58
EB
464
465/* Return an expression tree representing an equality comparison of P1 and P2,
466 two objects of fat pointer type. The result should be of type RESULT_TYPE.
467
468 Two fat pointers are equal in one of two ways: (1) if both have a null
469 pointer to the array or (2) if they contain the same couple of pointers.
470 We perform the comparison in as efficient a manner as possible. */
471
472static tree
473compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
474{
475 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
476 tree p1_array_is_null, p2_array_is_null;
477
478 /* If either operand has side-effects, they have to be evaluated only once
479 in spite of the multiple references to the operand in the comparison. */
480 p1 = gnat_protect_expr (p1);
481 p2 = gnat_protect_expr (p2);
482
483 /* The constant folder doesn't fold fat pointer types so we do it here. */
484 if (TREE_CODE (p1) == CONSTRUCTOR)
2117b9bb 485 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
50179d58 486 else
64235766 487 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
50179d58
EB
488
489 p1_array_is_null
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
491 fold_convert_loc (loc, TREE_TYPE (p1_array),
492 null_pointer_node));
493
494 if (TREE_CODE (p2) == CONSTRUCTOR)
2117b9bb 495 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
50179d58 496 else
64235766 497 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
50179d58
EB
498
499 p2_array_is_null
500 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
501 fold_convert_loc (loc, TREE_TYPE (p2_array),
502 null_pointer_node));
503
504 /* If one of the pointers to the array is null, just compare the other. */
505 if (integer_zerop (p1_array))
506 return p2_array_is_null;
507 else if (integer_zerop (p2_array))
508 return p1_array_is_null;
509
510 /* Otherwise, do the fully-fledged comparison. */
511 same_array
512 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
513
514 if (TREE_CODE (p1) == CONSTRUCTOR)
2117b9bb 515 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
50179d58
EB
516 else
517 p1_bounds
64235766
EB
518 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
519 true);
50179d58
EB
520
521 if (TREE_CODE (p2) == CONSTRUCTOR)
2117b9bb 522 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
50179d58
EB
523 else
524 p2_bounds
64235766
EB
525 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
526 true);
50179d58
EB
527
528 same_bounds
529 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
530
531 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
532 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
533 build_binary_op (TRUTH_ORIF_EXPR, result_type,
534 p1_array_is_null, same_bounds));
535}
ce2d0ce2 536
a1ab4c31
AC
537/* Compute the result of applying OP_CODE to LHS and RHS, where both are of
538 type TYPE. We know that TYPE is a modular type with a nonbinary
539 modulus. */
540
541static tree
542nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
543 tree rhs)
544{
545 tree modulus = TYPE_MODULUS (type);
546 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
547 unsigned int precision;
548 bool unsignedp = true;
549 tree op_type = type;
550 tree result;
551
552 /* If this is an addition of a constant, convert it to a subtraction
553 of a constant since we can do that faster. */
554 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
555 {
556 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
557 op_code = MINUS_EXPR;
558 }
559
560 /* For the logical operations, we only need PRECISION bits. For
561 addition and subtraction, we need one more and for multiplication we
562 need twice as many. But we never want to make a size smaller than
563 our size. */
564 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
565 needed_precision += 1;
566 else if (op_code == MULT_EXPR)
567 needed_precision *= 2;
568
569 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
570
571 /* Unsigned will do for everything but subtraction. */
572 if (op_code == MINUS_EXPR)
573 unsignedp = false;
574
575 /* If our type is the wrong signedness or isn't wide enough, make a new
576 type and convert both our operands to it. */
577 if (TYPE_PRECISION (op_type) < precision
578 || TYPE_UNSIGNED (op_type) != unsignedp)
579 {
afc737f0
EB
580 /* Copy the type so we ensure it can be modified to make it modular. */
581 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
a1ab4c31
AC
582 modulus = convert (op_type, modulus);
583 SET_TYPE_MODULUS (op_type, modulus);
584 TYPE_MODULAR_P (op_type) = 1;
585 lhs = convert (op_type, lhs);
586 rhs = convert (op_type, rhs);
587 }
588
589 /* Do the operation, then we'll fix it up. */
590 result = fold_build2 (op_code, op_type, lhs, rhs);
591
592 /* For multiplication, we have no choice but to do a full modulus
593 operation. However, we want to do this in the narrowest
594 possible size. */
595 if (op_code == MULT_EXPR)
596 {
afc737f0
EB
597 /* Copy the type so we ensure it can be modified to make it modular. */
598 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
a1ab4c31
AC
599 modulus = convert (div_type, modulus);
600 SET_TYPE_MODULUS (div_type, modulus);
601 TYPE_MODULAR_P (div_type) = 1;
602 result = convert (op_type,
603 fold_build2 (TRUNC_MOD_EXPR, div_type,
604 convert (div_type, result), modulus));
605 }
606
607 /* For subtraction, add the modulus back if we are negative. */
608 else if (op_code == MINUS_EXPR)
609 {
7d7a1fe8 610 result = gnat_protect_expr (result);
a1ab4c31 611 result = fold_build3 (COND_EXPR, op_type,
1139f2e8 612 fold_build2 (LT_EXPR, boolean_type_node, result,
9a1bdc31 613 build_int_cst (op_type, 0)),
a1ab4c31
AC
614 fold_build2 (PLUS_EXPR, op_type, result, modulus),
615 result);
616 }
617
618 /* For the other operations, subtract the modulus if we are >= it. */
619 else
620 {
7d7a1fe8 621 result = gnat_protect_expr (result);
a1ab4c31 622 result = fold_build3 (COND_EXPR, op_type,
1139f2e8 623 fold_build2 (GE_EXPR, boolean_type_node,
a1ab4c31
AC
624 result, modulus),
625 fold_build2 (MINUS_EXPR, op_type,
626 result, modulus),
627 result);
628 }
629
630 return convert (type, result);
631}
ce2d0ce2 632
033ba5bf
EB
633/* This page contains routines that implement the Ada semantics with regard
634 to atomic objects. They are fully piggybacked on the middle-end support
635 for atomic loads and stores.
636
637 *** Memory barriers and volatile objects ***
638
639 We implement the weakened form of the C.6(16) clause that was introduced
640 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
641 implementable without significant performance hits on modern platforms.
642
643 We also take advantage of the requirements imposed on shared variables by
644 9.10 (conditions for sequential actions) to have non-erroneous execution
645 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
646 volatile updates with regard to sequential actions, i.e. with regard to
647 reads or updates of atomic objects.
648
649 As such, an update of an atomic object by a task requires that all earlier
650 accesses to volatile objects have completed. Similarly, later accesses to
651 volatile objects cannot be reordered before the update of the atomic object.
652 So, memory barriers both before and after the atomic update are needed.
653
654 For a read of an atomic object, to avoid seeing writes of volatile objects
655 by a task earlier than by the other tasks, a memory barrier is needed before
656 the atomic read. Finally, to avoid reordering later reads or updates of
657 volatile objects to before the atomic read, a barrier is needed after the
658 atomic read.
659
660 So, memory barriers are needed before and after atomic reads and updates.
661 And, in order to simplify the implementation, we use full memory barriers
662 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
663
664/* Return the size of TYPE, which must be a positive power of 2. */
665
666static unsigned int
667resolve_atomic_size (tree type)
668{
ae7e9ddd 669 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
033ba5bf
EB
670
671 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
672 return size;
673
674 /* We shouldn't reach here without having already detected that the size
675 isn't compatible with an atomic access. */
676 gcc_assert (Serious_Errors_Detected);
677
678 return 0;
679}
680
f797c2b7
EB
681/* Build an atomic load for the underlying atomic object in SRC. SYNC is
682 true if the load requires synchronization. */
033ba5bf
EB
683
684tree
f797c2b7 685build_atomic_load (tree src, bool sync)
033ba5bf
EB
686{
687 tree ptr_type
688 = build_pointer_type
f797c2b7
EB
689 (build_qualified_type (void_type_node,
690 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
691 tree mem_model
692 = build_int_cst (integer_type_node,
693 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
033ba5bf 694 tree orig_src = src;
537e035c 695 tree t, addr, val;
033ba5bf
EB
696 unsigned int size;
697 int fncode;
698
537e035c 699 /* Remove conversions to get the address of the underlying object. */
033ba5bf
EB
700 src = remove_conversions (src, false);
701 size = resolve_atomic_size (TREE_TYPE (src));
702 if (size == 0)
703 return orig_src;
704
705 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
706 t = builtin_decl_implicit ((enum built_in_function) fncode);
707
537e035c
EB
708 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
709 val = build_call_expr (t, 2, addr, mem_model);
033ba5bf 710
537e035c
EB
711 /* First reinterpret the loaded bits in the original type of the load,
712 then convert to the expected result type. */
713 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
714 return convert (TREE_TYPE (orig_src), t);
033ba5bf
EB
715}
716
f797c2b7
EB
717/* Build an atomic store from SRC to the underlying atomic object in DEST.
718 SYNC is true if the store requires synchronization. */
033ba5bf
EB
719
720tree
f797c2b7 721build_atomic_store (tree dest, tree src, bool sync)
033ba5bf
EB
722{
723 tree ptr_type
724 = build_pointer_type
f797c2b7
EB
725 (build_qualified_type (void_type_node,
726 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
727 tree mem_model
728 = build_int_cst (integer_type_node,
729 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
033ba5bf 730 tree orig_dest = dest;
537e035c 731 tree t, int_type, addr;
033ba5bf
EB
732 unsigned int size;
733 int fncode;
734
537e035c 735 /* Remove conversions to get the address of the underlying object. */
033ba5bf
EB
736 dest = remove_conversions (dest, false);
737 size = resolve_atomic_size (TREE_TYPE (dest));
738 if (size == 0)
739 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
740
741 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
742 t = builtin_decl_implicit ((enum built_in_function) fncode);
743 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
744
537e035c
EB
745 /* First convert the bits to be stored to the original type of the store,
746 then reinterpret them in the effective type. But if the original type
747 is a padded type with the same size, convert to the inner type instead,
748 as we don't want to artificially introduce a CONSTRUCTOR here. */
749 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
750 && TYPE_SIZE (TREE_TYPE (dest))
751 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
752 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
753 else
754 src = convert (TREE_TYPE (dest), src);
755 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
756 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
033ba5bf 757
537e035c 758 return build_call_expr (t, 3, addr, src, mem_model);
033ba5bf 759}
f797c2b7 760
f797c2b7 761/* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
241125b2
EB
762 the location of the sequence. Note that, even though the load and the store
763 are both atomic, the sequence itself is not atomic. */
f797c2b7
EB
764
765tree
766build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
767{
241125b2
EB
768 /* We will be modifying DEST below so we build a copy. */
769 dest = copy_node (dest);
f797c2b7
EB
770 tree ref = dest;
771
772 while (handled_component_p (ref))
773 {
774 /* The load should already have been generated during the translation
775 of the GNAT destination tree; find it out in the GNU tree. */
776 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
777 {
778 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
779 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
780 {
781 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
782 tree t = CALL_EXPR_ARG (op, 0);
783 tree obj, temp, stmt;
784
785 /* Find out the loaded object. */
786 if (TREE_CODE (t) == NOP_EXPR)
787 t = TREE_OPERAND (t, 0);
788 if (TREE_CODE (t) == ADDR_EXPR)
789 obj = TREE_OPERAND (t, 0);
790 else
791 obj = build1 (INDIRECT_REF, type, t);
792
793 /* Drop atomic and volatile qualifiers for the temporary. */
794 type = TYPE_MAIN_VARIANT (type);
795
796 /* And drop BLKmode, if need be, to put it into a register. */
797 if (TYPE_MODE (type) == BLKmode)
798 {
799 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
800 type = copy_type (type);
f4b31647
RS
801 machine_mode mode = int_mode_for_size (size, 0).else_blk ();
802 SET_TYPE_MODE (type, mode);
f797c2b7
EB
803 }
804
805 /* Create the temporary by inserting a SAVE_EXPR. */
806 temp = build1 (SAVE_EXPR, type,
807 build1 (VIEW_CONVERT_EXPR, type, op));
808 TREE_OPERAND (ref, 0) = temp;
809
810 start_stmt_group ();
811
812 /* Build the modify of the temporary. */
813 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
814 add_stmt_with_node (stmt, gnat_node);
815
816 /* Build the store to the object. */
817 stmt = build_atomic_store (obj, temp, false);
818 add_stmt_with_node (stmt, gnat_node);
819
820 return end_stmt_group ();
821 }
822 }
823
241125b2 824 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
f797c2b7
EB
825 ref = TREE_OPERAND (ref, 0);
826 }
827
828 /* Something went wrong earlier if we have not found the atomic load. */
829 gcc_unreachable ();
830}
ce2d0ce2 831
a1ab4c31
AC
832/* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
833 desired for the result. Usually the operation is to be performed
d8e38554
EB
834 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
835 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
836 case the type to be used will be derived from the operands.
08ef2c16 837 Don't fold the result if NO_FOLD is true.
a1ab4c31
AC
838
839 This function is very much unlike the ones for C and C++ since we
840 have already done any type conversion and matching required. All we
841 have to do here is validate the work done by SEM and handle subtypes. */
842
843tree
844build_binary_op (enum tree_code op_code, tree result_type,
08ef2c16
PMR
845 tree left_operand, tree right_operand,
846 bool no_fold)
a1ab4c31 847{
1366ba41 848 tree left_type = TREE_TYPE (left_operand);
a1ab4c31
AC
849 tree right_type = TREE_TYPE (right_operand);
850 tree left_base_type = get_base_type (left_type);
851 tree right_base_type = get_base_type (right_type);
852 tree operation_type = result_type;
853 tree best_type = NULL_TREE;
854 tree modulus, result;
855 bool has_side_effects = false;
856
857 if (operation_type
858 && TREE_CODE (operation_type) == RECORD_TYPE
859 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
860 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
861
683ccd05 862 if (operation_type && TYPE_IS_EXTRA_SUBTYPE_P (operation_type))
a1ab4c31
AC
863 operation_type = get_base_type (operation_type);
864
865 modulus = (operation_type
866 && TREE_CODE (operation_type) == INTEGER_TYPE
867 && TYPE_MODULAR_P (operation_type)
868 ? TYPE_MODULUS (operation_type) : NULL_TREE);
869
870 switch (op_code)
871 {
d47d0a8d 872 case INIT_EXPR:
a1ab4c31 873 case MODIFY_EXPR:
7c775aca 874 gcc_checking_assert (!result_type);
9abe8b74 875
a1ab4c31
AC
876 /* If there were integral or pointer conversions on the LHS, remove
877 them; we'll be putting them back below if needed. Likewise for
5dce843f
EB
878 conversions between record types, except for justified modular types.
879 But don't do this if the right operand is not BLKmode (for packed
880 arrays) unless we are not changing the mode. */
a1ab4c31
AC
881 while ((CONVERT_EXPR_P (left_operand)
882 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
883 && (((INTEGRAL_TYPE_P (left_type)
884 || POINTER_TYPE_P (left_type))
5dce843f
EB
885 && (INTEGRAL_TYPE_P (operand_type (left_operand))
886 || POINTER_TYPE_P (operand_type (left_operand))))
887 || (TREE_CODE (left_type) == RECORD_TYPE
888 && !TYPE_JUSTIFIED_MODULAR_P (left_type)
889 && TREE_CODE (operand_type (left_operand)) == RECORD_TYPE
a1ab4c31 890 && (TYPE_MODE (right_type) == BLKmode
5dce843f
EB
891 || TYPE_MODE (left_type)
892 == TYPE_MODE (operand_type (left_operand))))))
a1ab4c31
AC
893 {
894 left_operand = TREE_OPERAND (left_operand, 0);
895 left_type = TREE_TYPE (left_operand);
896 }
897
898 /* If a class-wide type may be involved, force use of the RHS type. */
899 if ((TREE_CODE (right_type) == RECORD_TYPE
900 || TREE_CODE (right_type) == UNION_TYPE)
901 && TYPE_ALIGN_OK (right_type))
902 operation_type = right_type;
903
904 /* If we are copying between padded objects with compatible types, use
905 the padded view of the objects, this is very likely more efficient.
342f368c
EB
906 Likewise for a padded object that is assigned a constructor, if we
907 can convert the constructor to the inner type, to avoid putting a
908 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
909 actually copied anything. */
315cff15 910 else if (TYPE_IS_PADDING_P (left_type)
a1ab4c31
AC
911 && TREE_CONSTANT (TYPE_SIZE (left_type))
912 && ((TREE_CODE (right_operand) == COMPONENT_REF
842d4ee2 913 && TYPE_MAIN_VARIANT (left_type)
5dce843f 914 == TYPE_MAIN_VARIANT (operand_type (right_operand)))
342f368c
EB
915 || (TREE_CODE (right_operand) == CONSTRUCTOR
916 && !CONTAINS_PLACEHOLDER_P
917 (DECL_SIZE (TYPE_FIELDS (left_type)))))
a1ab4c31 918 && !integer_zerop (TYPE_SIZE (right_type)))
842d4ee2
EB
919 {
920 /* We make an exception for a BLKmode type padding a non-BLKmode
921 inner type and do the conversion of the LHS right away, since
922 unchecked_convert wouldn't do it properly. */
923 if (TYPE_MODE (left_type) == BLKmode
924 && TYPE_MODE (right_type) != BLKmode
925 && TREE_CODE (right_operand) != CONSTRUCTOR)
926 {
927 operation_type = right_type;
928 left_operand = convert (operation_type, left_operand);
929 left_type = operation_type;
930 }
931 else
932 operation_type = left_type;
933 }
a1ab4c31 934
fc7a823e
EB
935 /* If we have a call to a function that returns with variable size, use
936 the RHS type in case we want to use the return slot optimization. */
e3c4580e 937 else if (TREE_CODE (right_operand) == CALL_EXPR
fc7a823e 938 && return_type_with_variable_size_p (right_type))
e3c4580e
EB
939 operation_type = right_type;
940
a1ab4c31
AC
941 /* Find the best type to use for copying between aggregate types. */
942 else if (((TREE_CODE (left_type) == ARRAY_TYPE
943 && TREE_CODE (right_type) == ARRAY_TYPE)
944 || (TREE_CODE (left_type) == RECORD_TYPE
945 && TREE_CODE (right_type) == RECORD_TYPE))
946 && (best_type = find_common_type (left_type, right_type)))
947 operation_type = best_type;
948
949 /* Otherwise use the LHS type. */
d8e38554 950 else
a1ab4c31
AC
951 operation_type = left_type;
952
953 /* Ensure everything on the LHS is valid. If we have a field reference,
954 strip anything that get_inner_reference can handle. Then remove any
955 conversions between types having the same code and mode. And mark
956 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
f797c2b7 957 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
a1ab4c31
AC
958 result = left_operand;
959 while (true)
960 {
961 tree restype = TREE_TYPE (result);
962
963 if (TREE_CODE (result) == COMPONENT_REF
964 || TREE_CODE (result) == ARRAY_REF
965 || TREE_CODE (result) == ARRAY_RANGE_REF)
966 while (handled_component_p (result))
967 result = TREE_OPERAND (result, 0);
5dce843f 968
a1ab4c31
AC
969 else if (TREE_CODE (result) == REALPART_EXPR
970 || TREE_CODE (result) == IMAGPART_EXPR
971 || (CONVERT_EXPR_P (result)
972 && (((TREE_CODE (restype)
5dce843f
EB
973 == TREE_CODE (operand_type (result))
974 && TYPE_MODE (restype)
855bb998 975 == TYPE_MODE (operand_type (result))))
a1ab4c31
AC
976 || TYPE_ALIGN_OK (restype))))
977 result = TREE_OPERAND (result, 0);
5dce843f 978
a1ab4c31
AC
979 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
980 {
981 TREE_ADDRESSABLE (result) = 1;
982 result = TREE_OPERAND (result, 0);
983 }
5dce843f 984
a1ab4c31
AC
985 else
986 break;
987 }
988
989 gcc_assert (TREE_CODE (result) == INDIRECT_REF
990 || TREE_CODE (result) == NULL_EXPR
f797c2b7 991 || TREE_CODE (result) == SAVE_EXPR
a1ab4c31
AC
992 || DECL_P (result));
993
994 /* Convert the right operand to the operation type unless it is
995 either already of the correct type or if the type involves a
996 placeholder, since the RHS may not have the same record type. */
997 if (operation_type != right_type
998 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
999 {
1000 right_operand = convert (operation_type, right_operand);
1001 right_type = operation_type;
1002 }
1003
1004 /* If the left operand is not of the same type as the operation
1005 type, wrap it up in a VIEW_CONVERT_EXPR. */
1006 if (left_type != operation_type)
1007 left_operand = unchecked_convert (operation_type, left_operand, false);
1008
1009 has_side_effects = true;
1010 modulus = NULL_TREE;
1011 break;
1012
1013 case ARRAY_REF:
1014 if (!operation_type)
1015 operation_type = TREE_TYPE (left_type);
1016
9c453de7 1017 /* ... fall through ... */
a1ab4c31
AC
1018
1019 case ARRAY_RANGE_REF:
1020 /* First look through conversion between type variants. Note that
1021 this changes neither the operation type nor the type domain. */
1022 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1023 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1024 == TYPE_MAIN_VARIANT (left_type))
1025 {
1026 left_operand = TREE_OPERAND (left_operand, 0);
1027 left_type = TREE_TYPE (left_operand);
1028 }
1029
3f273c8a
EB
1030 /* For a range, make sure the element type is consistent. */
1031 if (op_code == ARRAY_RANGE_REF
1032 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
2448ee85
EB
1033 {
1034 operation_type
1035 = build_nonshared_array_type (TREE_TYPE (left_type),
1036 TYPE_DOMAIN (operation_type));
1037 /* Declare it now since it will never be declared otherwise. This
1038 is necessary to ensure that its subtrees are properly marked. */
1039 create_type_decl (TYPE_NAME (operation_type), operation_type, true,
1040 false, Empty);
1041 }
3f273c8a 1042
84fb43a1
EB
1043 /* Then convert the right operand to its base type. This will prevent
1044 unneeded sign conversions when sizetype is wider than integer. */
a1ab4c31 1045 right_operand = convert (right_base_type, right_operand);
15bf7d19 1046 right_operand = convert_to_index_type (right_operand);
a1ab4c31
AC
1047 modulus = NULL_TREE;
1048 break;
1049
1139f2e8
EB
1050 case TRUTH_ANDIF_EXPR:
1051 case TRUTH_ORIF_EXPR:
1052 case TRUTH_AND_EXPR:
1053 case TRUTH_OR_EXPR:
1054 case TRUTH_XOR_EXPR:
ae1158c4
EB
1055 gcc_checking_assert
1056 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1139f2e8
EB
1057 operation_type = left_base_type;
1058 left_operand = convert (operation_type, left_operand);
1059 right_operand = convert (operation_type, right_operand);
1060 break;
1061
a1ab4c31
AC
1062 case GE_EXPR:
1063 case LE_EXPR:
1064 case GT_EXPR:
1065 case LT_EXPR:
a1ab4c31
AC
1066 case EQ_EXPR:
1067 case NE_EXPR:
ae1158c4
EB
1068 gcc_checking_assert
1069 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
a1ab4c31
AC
1070 /* If either operand is a NULL_EXPR, just return a new one. */
1071 if (TREE_CODE (left_operand) == NULL_EXPR)
1072 return build2 (op_code, result_type,
1073 build1 (NULL_EXPR, integer_type_node,
1074 TREE_OPERAND (left_operand, 0)),
1075 integer_zero_node);
1076
1077 else if (TREE_CODE (right_operand) == NULL_EXPR)
1078 return build2 (op_code, result_type,
1079 build1 (NULL_EXPR, integer_type_node,
1080 TREE_OPERAND (right_operand, 0)),
1081 integer_zero_node);
1082
1083 /* If either object is a justified modular types, get the
1084 fields from within. */
1085 if (TREE_CODE (left_type) == RECORD_TYPE
1086 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1087 {
1088 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1089 left_operand);
1090 left_type = TREE_TYPE (left_operand);
1091 left_base_type = get_base_type (left_type);
1092 }
1093
1094 if (TREE_CODE (right_type) == RECORD_TYPE
1095 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1096 {
1097 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1098 right_operand);
1099 right_type = TREE_TYPE (right_operand);
1100 right_base_type = get_base_type (right_type);
1101 }
1102
1103 /* If both objects are arrays, compare them specially. */
1104 if ((TREE_CODE (left_type) == ARRAY_TYPE
1105 || (TREE_CODE (left_type) == INTEGER_TYPE
1106 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1107 && (TREE_CODE (right_type) == ARRAY_TYPE
1108 || (TREE_CODE (right_type) == INTEGER_TYPE
1109 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1110 {
6532e8a0
EB
1111 result = compare_arrays (input_location,
1112 result_type, left_operand, right_operand);
a1ab4c31 1113 if (op_code == NE_EXPR)
658a41ac 1114 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
a1ab4c31
AC
1115 else
1116 gcc_assert (op_code == EQ_EXPR);
1117
1118 return result;
1119 }
1120
58f1b706
EB
1121 /* Otherwise, the base types must be the same, unless they are both fat
1122 pointer types or record types. In the latter case, use the best type
1123 and convert both operands to that type. */
a1ab4c31
AC
1124 if (left_base_type != right_base_type)
1125 {
315cff15 1126 if (TYPE_IS_FAT_POINTER_P (left_base_type)
58f1b706
EB
1127 && TYPE_IS_FAT_POINTER_P (right_base_type))
1128 {
1129 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1130 == TYPE_MAIN_VARIANT (right_base_type));
1131 best_type = left_base_type;
1132 }
1133
a1ab4c31
AC
1134 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1135 && TREE_CODE (right_base_type) == RECORD_TYPE)
1136 {
58f1b706
EB
1137 /* The only way this is permitted is if both types have the same
1138 name. In that case, one of them must not be self-referential.
1139 Use it as the best type. Even better with a fixed size. */
a1ab4c31 1140 gcc_assert (TYPE_NAME (left_base_type)
58f1b706
EB
1141 && TYPE_NAME (left_base_type)
1142 == TYPE_NAME (right_base_type));
a1ab4c31
AC
1143
1144 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1145 best_type = left_base_type;
1146 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1147 best_type = right_base_type;
1148 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1149 best_type = left_base_type;
1150 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1151 best_type = right_base_type;
1152 else
1153 gcc_unreachable ();
1154 }
58f1b706 1155
aef308d0
PMR
1156 else if (POINTER_TYPE_P (left_base_type)
1157 && POINTER_TYPE_P (right_base_type))
1158 {
1159 gcc_assert (TREE_TYPE (left_base_type)
1160 == TREE_TYPE (right_base_type));
1161 best_type = left_base_type;
1162 }
a1ab4c31
AC
1163 else
1164 gcc_unreachable ();
1165
1166 left_operand = convert (best_type, left_operand);
1167 right_operand = convert (best_type, right_operand);
1168 }
a1ab4c31
AC
1169 else
1170 {
1171 left_operand = convert (left_base_type, left_operand);
1172 right_operand = convert (right_base_type, right_operand);
1173 }
1174
50179d58
EB
1175 /* If both objects are fat pointers, compare them specially. */
1176 if (TYPE_IS_FAT_POINTER_P (left_base_type))
58f1b706 1177 {
50179d58
EB
1178 result
1179 = compare_fat_pointers (input_location,
1180 result_type, left_operand, right_operand);
1181 if (op_code == NE_EXPR)
1182 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1183 else
1184 gcc_assert (op_code == EQ_EXPR);
1185
1186 return result;
58f1b706
EB
1187 }
1188
a1ab4c31
AC
1189 modulus = NULL_TREE;
1190 break;
1191
a1ab4c31
AC
1192 case LSHIFT_EXPR:
1193 case RSHIFT_EXPR:
1194 case LROTATE_EXPR:
1195 case RROTATE_EXPR:
1196 /* The RHS of a shift can be any type. Also, ignore any modulus
1197 (we used to abort, but this is needed for unchecked conversion
1198 to modular types). Otherwise, processing is the same as normal. */
1199 gcc_assert (operation_type == left_base_type);
1200 modulus = NULL_TREE;
1201 left_operand = convert (operation_type, left_operand);
1202 break;
1203
a1ab4c31
AC
1204 case BIT_AND_EXPR:
1205 case BIT_IOR_EXPR:
1206 case BIT_XOR_EXPR:
1207 /* For binary modulus, if the inputs are in range, so are the
1208 outputs. */
1209 if (modulus && integer_pow2p (modulus))
1210 modulus = NULL_TREE;
a1ab4c31
AC
1211 goto common;
1212
1213 case COMPLEX_EXPR:
1214 gcc_assert (TREE_TYPE (result_type) == left_base_type
1215 && TREE_TYPE (result_type) == right_base_type);
1216 left_operand = convert (left_base_type, left_operand);
1217 right_operand = convert (right_base_type, right_operand);
1218 break;
1219
1220 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1221 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1222 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1223 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1224 /* These always produce results lower than either operand. */
1225 modulus = NULL_TREE;
1226 goto common;
1227
1228 case POINTER_PLUS_EXPR:
1229 gcc_assert (operation_type == left_base_type
1230 && sizetype == right_base_type);
1231 left_operand = convert (operation_type, left_operand);
1232 right_operand = convert (sizetype, right_operand);
1233 break;
1234
82d3b03a
EB
1235 case PLUS_NOMOD_EXPR:
1236 case MINUS_NOMOD_EXPR:
1237 if (op_code == PLUS_NOMOD_EXPR)
1238 op_code = PLUS_EXPR;
1239 else
1240 op_code = MINUS_EXPR;
1241 modulus = NULL_TREE;
1242
9c453de7 1243 /* ... fall through ... */
82d3b03a 1244
d2143736
EB
1245 case PLUS_EXPR:
1246 case MINUS_EXPR:
b7babd5d
EB
1247 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1248 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1249 these types but can generate addition/subtraction for Succ/Pred. */
1250 if (operation_type
1251 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1252 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1253 operation_type = left_base_type = right_base_type
1254 = gnat_type_for_mode (TYPE_MODE (operation_type),
1255 TYPE_UNSIGNED (operation_type));
82d3b03a 1256
9c453de7 1257 /* ... fall through ... */
d2143736 1258
a1ab4c31
AC
1259 default:
1260 common:
1261 /* The result type should be the same as the base types of the
1262 both operands (and they should be the same). Convert
1263 everything to the result type. */
1264
1265 gcc_assert (operation_type == left_base_type
1266 && left_base_type == right_base_type);
1267 left_operand = convert (operation_type, left_operand);
1268 right_operand = convert (operation_type, right_operand);
1269 }
1270
1271 if (modulus && !integer_pow2p (modulus))
1272 {
1273 result = nonbinary_modular_operation (op_code, operation_type,
1274 left_operand, right_operand);
1275 modulus = NULL_TREE;
1276 }
1277 /* If either operand is a NULL_EXPR, just return a new one. */
1278 else if (TREE_CODE (left_operand) == NULL_EXPR)
1279 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1280 else if (TREE_CODE (right_operand) == NULL_EXPR)
1281 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1282 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
08ef2c16
PMR
1283 {
1284 result = build4 (op_code, operation_type, left_operand, right_operand,
1285 NULL_TREE, NULL_TREE);
1286 if (!no_fold)
1287 result = fold (result);
1288 }
d8e38554
EB
1289 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1290 result = build2 (op_code, void_type_node, left_operand, right_operand);
08ef2c16
PMR
1291 else if (no_fold)
1292 result = build2 (op_code, operation_type, left_operand, right_operand);
a1ab4c31
AC
1293 else
1294 result
1295 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1296
a61c3633
EB
1297 if (TREE_CONSTANT (result))
1298 ;
1299 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1300 {
a61c3633
EB
1301 if (TYPE_VOLATILE (operation_type))
1302 TREE_THIS_VOLATILE (result) = 1;
1303 }
1304 else
1305 TREE_CONSTANT (result)
1306 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
a1ab4c31 1307
a61c3633 1308 TREE_SIDE_EFFECTS (result) |= has_side_effects;
a1ab4c31
AC
1309
1310 /* If we are working with modular types, perform the MOD operation
1311 if something above hasn't eliminated the need for it. */
1312 if (modulus)
08ef2c16
PMR
1313 {
1314 modulus = convert (operation_type, modulus);
1315 if (no_fold)
1316 result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1317 else
1318 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1319 }
a1ab4c31
AC
1320
1321 if (result_type && result_type != operation_type)
1322 result = convert (result_type, result);
1323
1324 return result;
1325}
ce2d0ce2 1326
a1ab4c31
AC
1327/* Similar, but for unary operations. */
1328
1329tree
1330build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1331{
1332 tree type = TREE_TYPE (operand);
1333 tree base_type = get_base_type (type);
1334 tree operation_type = result_type;
1335 tree result;
a1ab4c31
AC
1336
1337 if (operation_type
1338 && TREE_CODE (operation_type) == RECORD_TYPE
1339 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1340 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1341
1342 if (operation_type
4f1a975c 1343 && TREE_CODE (operation_type) == INTEGER_TYPE
a1ab4c31
AC
1344 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1345 operation_type = get_base_type (operation_type);
1346
1347 switch (op_code)
1348 {
1349 case REALPART_EXPR:
1350 case IMAGPART_EXPR:
1351 if (!operation_type)
1352 result_type = operation_type = TREE_TYPE (type);
1353 else
1354 gcc_assert (result_type == TREE_TYPE (type));
1355
1356 result = fold_build1 (op_code, operation_type, operand);
1357 break;
1358
1359 case TRUTH_NOT_EXPR:
ae1158c4
EB
1360 gcc_checking_assert
1361 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
658a41ac 1362 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
f2be3ce4
EB
1363 /* When not optimizing, fold the result as invert_truthvalue_loc
1364 doesn't fold the result of comparisons. This is intended to undo
1365 the trick used for boolean rvalues in gnat_to_gnu. */
1366 if (!optimize)
1367 result = fold (result);
a1ab4c31
AC
1368 break;
1369
1370 case ATTR_ADDR_EXPR:
1371 case ADDR_EXPR:
1372 switch (TREE_CODE (operand))
1373 {
1374 case INDIRECT_REF:
1375 case UNCONSTRAINED_ARRAY_REF:
1376 result = TREE_OPERAND (operand, 0);
1377
1378 /* Make sure the type here is a pointer, not a reference.
1379 GCC wants pointer types for function addresses. */
1380 if (!result_type)
1381 result_type = build_pointer_type (type);
1382
1383 /* If the underlying object can alias everything, propagate the
1384 property since we are effectively retrieving the object. */
1385 if (POINTER_TYPE_P (TREE_TYPE (result))
1386 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1387 {
1388 if (TREE_CODE (result_type) == POINTER_TYPE
1389 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1390 result_type
1391 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1392 TYPE_MODE (result_type),
1393 true);
1394 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1395 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1396 result_type
1397 = build_reference_type_for_mode (TREE_TYPE (result_type),
1398 TYPE_MODE (result_type),
1399 true);
1400 }
1401 break;
1402
1403 case NULL_EXPR:
1404 result = operand;
1405 TREE_TYPE (result) = type = build_pointer_type (type);
1406 break;
1407
0b3467c4
EB
1408 case COMPOUND_EXPR:
1409 /* Fold a compound expression if it has unconstrained array type
1410 since the middle-end cannot handle it. But we don't it in the
1411 general case because it may introduce aliasing issues if the
1412 first operand is an indirect assignment and the second operand
93e708f9
EB
1413 the corresponding address, e.g. for an allocator. However do
1414 it for a return value to expose it for later recognition. */
1415 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1416 || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1417 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
0b3467c4
EB
1418 {
1419 result = build_unary_op (ADDR_EXPR, result_type,
1420 TREE_OPERAND (operand, 1));
1421 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1422 TREE_OPERAND (operand, 0), result);
1423 break;
1424 }
1425 goto common;
1426
a1ab4c31
AC
1427 case ARRAY_REF:
1428 case ARRAY_RANGE_REF:
1429 case COMPONENT_REF:
1430 case BIT_FIELD_REF:
f5631ae6
EB
1431 /* If this is for 'Address, find the address of the prefix and add
1432 the offset to the field. Otherwise, do this the normal way. */
a1ab4c31
AC
1433 if (op_code == ATTR_ADDR_EXPR)
1434 {
f37fac2b
RS
1435 poly_int64 bitsize;
1436 poly_int64 bitpos;
a1ab4c31 1437 tree offset, inner;
ef4bddc2 1438 machine_mode mode;
ee45a32d 1439 int unsignedp, reversep, volatilep;
a1ab4c31
AC
1440
1441 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
ee45a32d 1442 &mode, &unsignedp, &reversep,
25b75a48 1443 &volatilep);
a1ab4c31
AC
1444
1445 /* If INNER is a padding type whose field has a self-referential
1446 size, convert to that inner type. We know the offset is zero
1447 and we need to have that type visible. */
fc7a823e 1448 if (type_is_padding_self_referential (TREE_TYPE (inner)))
a1ab4c31
AC
1449 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1450 inner);
1451
1452 /* Compute the offset as a byte offset from INNER. */
1453 if (!offset)
1454 offset = size_zero_node;
1455
f37fac2b
RS
1456 offset
1457 = size_binop (PLUS_EXPR, offset,
1458 size_int (bits_to_bytes_round_down (bitpos)));
a1ab4c31 1459
2117b9bb
EB
1460 /* Take the address of INNER, convert it to a pointer to our type
1461 and add the offset. */
1462 inner = build_unary_op (ADDR_EXPR,
1463 build_pointer_type (TREE_TYPE (operand)),
1464 inner);
1465 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
a1ab4c31 1466 inner, offset);
a1ab4c31
AC
1467 break;
1468 }
1469 goto common;
1470
1471 case CONSTRUCTOR:
1472 /* If this is just a constructor for a padded record, we can
1473 just take the address of the single field and convert it to
1474 a pointer to our type. */
315cff15 1475 if (TYPE_IS_PADDING_P (type))
a1ab4c31 1476 {
2117b9bb
EB
1477 result
1478 = build_unary_op (ADDR_EXPR,
1479 build_pointer_type (TREE_TYPE (operand)),
1480 CONSTRUCTOR_ELT (operand, 0)->value);
a1ab4c31
AC
1481 break;
1482 }
a1ab4c31
AC
1483 goto common;
1484
1485 case NOP_EXPR:
1486 if (AGGREGATE_TYPE_P (type)
1487 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1488 return build_unary_op (ADDR_EXPR, result_type,
1489 TREE_OPERAND (operand, 0));
1490
9c453de7 1491 /* ... fallthru ... */
a1ab4c31
AC
1492
1493 case VIEW_CONVERT_EXPR:
1494 /* If this just a variant conversion or if the conversion doesn't
1495 change the mode, get the result type from this type and go down.
1496 This is needed for conversions of CONST_DECLs, to eventually get
1497 to the address of their CORRESPONDING_VARs. */
1498 if ((TYPE_MAIN_VARIANT (type)
1499 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1500 || (TYPE_MODE (type) != BLKmode
1501 && (TYPE_MODE (type)
1502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1503 return build_unary_op (ADDR_EXPR,
1504 (result_type ? result_type
1505 : build_pointer_type (type)),
1506 TREE_OPERAND (operand, 0));
1507 goto common;
1508
1509 case CONST_DECL:
1510 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1511
9c453de7 1512 /* ... fall through ... */
a1ab4c31
AC
1513
1514 default:
1515 common:
1516
184d436a
EB
1517 /* If we are taking the address of a padded record whose field
1518 contains a template, take the address of the field. */
315cff15 1519 if (TYPE_IS_PADDING_P (type)
a1ab4c31
AC
1520 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1521 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1522 {
1523 type = TREE_TYPE (TYPE_FIELDS (type));
1524 operand = convert (type, operand);
1525 }
1526
a1ab4c31 1527 gnat_mark_addressable (operand);
1fc24649 1528 result = build_fold_addr_expr (operand);
a1ab4c31
AC
1529 }
1530
1531 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1532 break;
1533
1534 case INDIRECT_REF:
a1c7d797 1535 {
722356ce
EB
1536 tree t = remove_conversions (operand, false);
1537 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
a1c7d797 1538
184d436a
EB
1539 /* If TYPE is a thin pointer, either first retrieve the base if this
1540 is an expression with an offset built for the initialization of an
1541 object with an unconstrained nominal subtype, or else convert to
1542 the fat pointer. */
1543 if (TYPE_IS_THIN_POINTER_P (type))
a1c7d797 1544 {
184d436a
EB
1545 tree rec_type = TREE_TYPE (type);
1546
1547 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
2b45154d
EB
1548 && TREE_OPERAND (operand, 1)
1549 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
184d436a
EB
1550 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1551 {
1552 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1553 type = TREE_TYPE (operand);
1554 }
1555 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1556 {
1557 operand
1558 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1559 operand);
1560 type = TREE_TYPE (operand);
1561 }
a1c7d797 1562 }
a1ab4c31 1563
a1c7d797
EB
1564 /* If we want to refer to an unconstrained array, use the appropriate
1565 expression. But this will never survive down to the back-end. */
1566 if (TYPE_IS_FAT_POINTER_P (type))
1567 {
1568 result = build1 (UNCONSTRAINED_ARRAY_REF,
1569 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1570 TREE_READONLY (result)
1571 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1572 }
1fc24649 1573
a1c7d797
EB
1574 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1575 else if (TREE_CODE (operand) == ADDR_EXPR)
1576 result = TREE_OPERAND (operand, 0);
a1ab4c31 1577
a1c7d797
EB
1578 /* Otherwise, build and fold the indirect reference. */
1579 else
1580 {
1581 result = build_fold_indirect_ref (operand);
1582 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1583 }
a1ab4c31 1584
a1c7d797
EB
1585 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1586 {
1587 TREE_SIDE_EFFECTS (result) = 1;
1588 if (TREE_CODE (result) == INDIRECT_REF)
1589 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1590 }
1591
1592 if ((TREE_CODE (result) == INDIRECT_REF
1593 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1594 && can_never_be_null)
1595 TREE_THIS_NOTRAP (result) = 1;
1596
1597 break;
1598 }
a1ab4c31
AC
1599
1600 case NEGATE_EXPR:
1601 case BIT_NOT_EXPR:
1602 {
1603 tree modulus = ((operation_type
1604 && TREE_CODE (operation_type) == INTEGER_TYPE
1605 && TYPE_MODULAR_P (operation_type))
1606 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1607 int mod_pow2 = modulus && integer_pow2p (modulus);
1608
1609 /* If this is a modular type, there are various possibilities
1610 depending on the operation and whether the modulus is a
1611 power of two or not. */
1612
1613 if (modulus)
1614 {
1615 gcc_assert (operation_type == base_type);
1616 operand = convert (operation_type, operand);
1617
1618 /* The fastest in the negate case for binary modulus is
1619 the straightforward code; the TRUNC_MOD_EXPR below
1620 is an AND operation. */
1621 if (op_code == NEGATE_EXPR && mod_pow2)
1622 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1623 fold_build1 (NEGATE_EXPR, operation_type,
1624 operand),
1625 modulus);
1626
1627 /* For nonbinary negate case, return zero for zero operand,
1628 else return the modulus minus the operand. If the modulus
1629 is a power of two minus one, we can do the subtraction
1630 as an XOR since it is equivalent and faster on most machines. */
1631 else if (op_code == NEGATE_EXPR && !mod_pow2)
1632 {
1633 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1634 modulus,
9a1bdc31
EB
1635 build_int_cst (operation_type,
1636 1))))
a1ab4c31
AC
1637 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1638 operand, modulus);
1639 else
1640 result = fold_build2 (MINUS_EXPR, operation_type,
1641 modulus, operand);
1642
1643 result = fold_build3 (COND_EXPR, operation_type,
1644 fold_build2 (NE_EXPR,
1139f2e8 1645 boolean_type_node,
a1ab4c31 1646 operand,
9a1bdc31
EB
1647 build_int_cst
1648 (operation_type, 0)),
a1ab4c31
AC
1649 result, operand);
1650 }
1651 else
1652 {
1653 /* For the NOT cases, we need a constant equal to
1654 the modulus minus one. For a binary modulus, we
1655 XOR against the constant and subtract the operand from
1656 that constant for nonbinary modulus. */
1657
1658 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
9a1bdc31 1659 build_int_cst (operation_type, 1));
a1ab4c31
AC
1660
1661 if (mod_pow2)
1662 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1663 operand, cnst);
1664 else
1665 result = fold_build2 (MINUS_EXPR, operation_type,
1666 cnst, operand);
1667 }
1668
1669 break;
1670 }
1671 }
1672
9c453de7 1673 /* ... fall through ... */
a1ab4c31
AC
1674
1675 default:
1676 gcc_assert (operation_type == base_type);
1677 result = fold_build1 (op_code, operation_type,
1678 convert (operation_type, operand));
1679 }
1680
a1ab4c31
AC
1681 if (result_type && TREE_TYPE (result) != result_type)
1682 result = convert (result_type, result);
1683
1684 return result;
1685}
ce2d0ce2 1686
a1ab4c31
AC
1687/* Similar, but for COND_EXPR. */
1688
1689tree
1690build_cond_expr (tree result_type, tree condition_operand,
1691 tree true_operand, tree false_operand)
1692{
a1ab4c31 1693 bool addr_p = false;
1275de7d 1694 tree result;
a1ab4c31 1695
1275de7d
EB
1696 /* The front-end verified that result, true and false operands have
1697 same base type. Convert everything to the result type. */
1698 true_operand = convert (result_type, true_operand);
a1ab4c31
AC
1699 false_operand = convert (result_type, false_operand);
1700
87fa3d34
EB
1701 /* If the result type is unconstrained, take the address of the operands and
1702 then dereference the result. Likewise if the result type is passed by
a0b8b1b7 1703 reference, because creating a temporary of this type is not allowed. */
a1ab4c31 1704 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
a0b8b1b7 1705 || TYPE_IS_BY_REFERENCE_P (result_type)
cb3d597d 1706 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
a1ab4c31 1707 {
a1ab4c31
AC
1708 result_type = build_pointer_type (result_type);
1709 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1710 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1275de7d 1711 addr_p = true;
a1ab4c31
AC
1712 }
1713
1714 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1715 true_operand, false_operand);
1716
1275de7d
EB
1717 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1718 in both arms, make sure it gets evaluated by moving it ahead of the
1719 conditional expression. This is necessary because it is evaluated
1720 in only one place at run time and would otherwise be uninitialized
1721 in one of the arms. */
1722 true_operand = skip_simple_arithmetic (true_operand);
a1ab4c31
AC
1723 false_operand = skip_simple_arithmetic (false_operand);
1724
1275de7d 1725 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
a1ab4c31
AC
1726 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1727
1275de7d 1728 if (addr_p)
a1ab4c31
AC
1729 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1730
1731 return result;
1732}
1733
39ab2e8f
RK
1734/* Similar, but for COMPOUND_EXPR. */
1735
1736tree
1737build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1738{
1739 bool addr_p = false;
1740 tree result;
1741
1742 /* If the result type is unconstrained, take the address of the operand and
1743 then dereference the result. Likewise if the result type is passed by
1744 reference, but this is natively handled in the gimplifier. */
1745 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1746 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1747 {
1748 result_type = build_pointer_type (result_type);
1749 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1750 addr_p = true;
1751 }
1752
1753 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1754 expr_operand);
1755
1756 if (addr_p)
1757 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1758
1759 return result;
1760}
ce2d0ce2 1761
dddf8120
EB
1762/* Conveniently construct a function call expression. FNDECL names the
1763 function to be called, N is the number of arguments, and the "..."
1764 parameters are the argument expressions. Unlike build_call_expr
1765 this doesn't fold the call, hence it will always return a CALL_EXPR. */
a1ab4c31
AC
1766
1767tree
dddf8120 1768build_call_n_expr (tree fndecl, int n, ...)
a1ab4c31 1769{
dddf8120
EB
1770 va_list ap;
1771 tree fntype = TREE_TYPE (fndecl);
1772 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1773
1774 va_start (ap, n);
1775 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1776 va_end (ap);
1777 return fn;
a1ab4c31 1778}
ce2d0ce2 1779
9a1bdc31
EB
1780/* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1781 MSG gives the exception's identity for the call to Local_Raise, if any. */
1782
1783static tree
8f8f531f 1784build_goto_raise (Entity_Id gnat_label, int msg)
9a1bdc31 1785{
8f8f531f
PMR
1786 tree gnu_label = gnat_to_gnu_entity (gnat_label, NULL_TREE, false);
1787 tree gnu_result = build1 (GOTO_EXPR, void_type_node, gnu_label);
9a1bdc31
EB
1788 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1789
1790 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1791 if (Present (local_raise))
1792 {
afc737f0
EB
1793 tree gnu_local_raise
1794 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
9a1bdc31 1795 tree gnu_exception_entity
afc737f0 1796 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
9a1bdc31
EB
1797 tree gnu_call
1798 = build_call_n_expr (gnu_local_raise, 1,
1799 build_unary_op (ADDR_EXPR, NULL_TREE,
1800 gnu_exception_entity));
1801 gnu_result
1802 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1803 }
1804
8f8f531f 1805 TREE_USED (gnu_label) = 1;
9a1bdc31
EB
1806 return gnu_result;
1807}
1808
ba464315
EB
1809/* Expand the SLOC of GNAT_NODE, if present, into tree location information
1810 pointed to by FILENAME, LINE and COL. Fall back to the current location
1811 if GNAT_NODE is absent or has no SLOC. */
a1ab4c31 1812
ba464315
EB
1813static void
1814expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1815{
1816 const char *str;
1817 int line_number, column_number;
1818
1819 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1820 {
1821 str = "";
1822 line_number = 0;
1823 column_number = 0;
1824 }
1825 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1826 {
1827 str = Get_Name_String
1828 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1829 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1830 column_number = Get_Column_Number (Sloc (gnat_node));
1831 }
1832 else
1833 {
1834 str = lbasename (LOCATION_FILE (input_location));
1835 line_number = LOCATION_LINE (input_location);
1836 column_number = LOCATION_COLUMN (input_location);
1837 }
a1ab4c31 1838
ba464315
EB
1839 const int len = strlen (str);
1840 *filename = build_string (len, str);
825da0d2 1841 TREE_TYPE (*filename) = build_array_type (char_type_node,
ba464315
EB
1842 build_index_type (size_int (len)));
1843 *line = build_int_cst (NULL_TREE, line_number);
1844 if (col)
1845 *col = build_int_cst (NULL_TREE, column_number);
1846}
1847
1848/* Build a call to a function that raises an exception and passes file name
1849 and line number, if requested. MSG says which exception function to call.
1850 GNAT_NODE is the node conveying the source location for which the error
1851 should be signaled, or Empty in which case the error is signaled for the
1852 current location. KIND says which kind of exception node this is for,
1853 among N_Raise_{Constraint,Storage,Program}_Error. */
a1ab4c31
AC
1854
1855tree
1856build_call_raise (int msg, Node_Id gnat_node, char kind)
1857{
8f8f531f 1858 Entity_Id gnat_label = get_exception_label (kind);
a1ab4c31 1859 tree fndecl = gnat_raise_decls[msg];
ba464315 1860 tree filename, line;
a1ab4c31
AC
1861
1862 /* If this is to be done as a goto, handle that case. */
8f8f531f
PMR
1863 if (Present (gnat_label))
1864 return build_goto_raise (gnat_label, msg);
a1ab4c31 1865
ba464315 1866 expand_sloc (gnat_node, &filename, &line, NULL);
a1ab4c31
AC
1867
1868 return
dddf8120 1869 build_call_n_expr (fndecl, 2,
6936c61a 1870 build1 (ADDR_EXPR,
825da0d2 1871 build_pointer_type (char_type_node),
a1ab4c31 1872 filename),
ba464315 1873 line);
a1ab4c31 1874}
437f8c1e 1875
ba464315
EB
1876/* Similar to build_call_raise, with extra information about the column
1877 where the check failed. */
437f8c1e
AC
1878
1879tree
9a1bdc31 1880build_call_raise_column (int msg, Node_Id gnat_node, char kind)
437f8c1e 1881{
8f8f531f 1882 Entity_Id gnat_label = get_exception_label (kind);
437f8c1e 1883 tree fndecl = gnat_raise_decls_ext[msg];
ba464315 1884 tree filename, line, col;
437f8c1e 1885
9a1bdc31 1886 /* If this is to be done as a goto, handle that case. */
8f8f531f
PMR
1887 if (Present (gnat_label))
1888 return build_goto_raise (gnat_label, msg);
9a1bdc31 1889
ba464315 1890 expand_sloc (gnat_node, &filename, &line, &col);
437f8c1e 1891
dddf8120 1892 return
ba464315 1893 build_call_n_expr (fndecl, 3,
dddf8120 1894 build1 (ADDR_EXPR,
825da0d2 1895 build_pointer_type (char_type_node),
dddf8120 1896 filename),
ba464315 1897 line, col);
437f8c1e
AC
1898}
1899
ba464315
EB
1900/* Similar to build_call_raise_column, for an index or range check exception ,
1901 with extra information of the form "INDEX out of range FIRST..LAST". */
437f8c1e
AC
1902
1903tree
9a1bdc31 1904build_call_raise_range (int msg, Node_Id gnat_node, char kind,
ba464315 1905 tree index, tree first, tree last)
437f8c1e 1906{
8f8f531f 1907 Entity_Id gnat_label = get_exception_label (kind);
437f8c1e 1908 tree fndecl = gnat_raise_decls_ext[msg];
ba464315 1909 tree filename, line, col;
437f8c1e 1910
9a1bdc31 1911 /* If this is to be done as a goto, handle that case. */
8f8f531f
PMR
1912 if (Present (gnat_label))
1913 return build_goto_raise (gnat_label, msg);
9a1bdc31 1914
ba464315 1915 expand_sloc (gnat_node, &filename, &line, &col);
437f8c1e 1916
dddf8120 1917 return
ba464315 1918 build_call_n_expr (fndecl, 6,
dddf8120 1919 build1 (ADDR_EXPR,
825da0d2 1920 build_pointer_type (char_type_node),
dddf8120 1921 filename),
ba464315
EB
1922 line, col,
1923 convert (integer_type_node, index),
1924 convert (integer_type_node, first),
1925 convert (integer_type_node, last));
437f8c1e 1926}
ce2d0ce2 1927
a1ab4c31
AC
1928/* qsort comparer for the bit positions of two constructor elements
1929 for record components. */
1930
1931static int
1932compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1933{
bb5faf73
JW
1934 const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
1935 const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
0e228dd9
NF
1936 const_tree const field1 = elmt1->index;
1937 const_tree const field2 = elmt2->index;
a1ab4c31
AC
1938 const int ret
1939 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1940
1941 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1942}
1943
0e228dd9 1944/* Return a CONSTRUCTOR of TYPE whose elements are V. */
a1ab4c31
AC
1945
1946tree
9771b263 1947gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
a1ab4c31 1948{
a1ab4c31 1949 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
88293f03 1950 bool read_only = true;
a1ab4c31 1951 bool side_effects = false;
0e228dd9
NF
1952 tree result, obj, val;
1953 unsigned int n_elmts;
a1ab4c31
AC
1954
1955 /* Scan the elements to see if they are all constant or if any has side
1956 effects, to let us set global flags on the resulting constructor. Count
1957 the elements along the way for possible sorting purposes below. */
0e228dd9 1958 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
a1ab4c31 1959 {
324c9b02 1960 /* The predicate must be in keeping with output_constructor. */
cce30ea8 1961 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
a1ab4c31 1962 || (TREE_CODE (type) == RECORD_TYPE
324c9b02
OH
1963 && CONSTRUCTOR_BITFIELD_P (obj)
1964 && !initializer_constant_valid_for_bitfield_p (val))
ee45a32d
EB
1965 || !initializer_constant_valid_p (val,
1966 TREE_TYPE (val),
1967 TYPE_REVERSE_STORAGE_ORDER (type)))
a1ab4c31
AC
1968 allconstant = false;
1969
88293f03
EB
1970 if (!TREE_READONLY (val))
1971 read_only = false;
1972
324c9b02 1973 if (TREE_SIDE_EFFECTS (val))
a1ab4c31 1974 side_effects = true;
a1ab4c31
AC
1975 }
1976
1977 /* For record types with constant components only, sort field list
1978 by increasing bit position. This is necessary to ensure the
1979 constructor can be output as static data. */
1980 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
9771b263 1981 v->qsort (compare_elmt_bitpos);
a1ab4c31 1982
0e228dd9 1983 result = build_constructor (type, v);
1448093c 1984 CONSTRUCTOR_NO_CLEARING (result) = 1;
a1ab4c31
AC
1985 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1986 TREE_SIDE_EFFECTS (result) = side_effects;
88293f03 1987 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
a1ab4c31
AC
1988 return result;
1989}
ce2d0ce2 1990
64235766
EB
1991/* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1992 is not found in the record. Don't fold the result if NO_FOLD is true. */
a1ab4c31 1993
64235766
EB
1994static tree
1995build_simple_component_ref (tree record, tree field, bool no_fold)
a1ab4c31 1996{
64235766
EB
1997 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
1998 tree ref;
a1ab4c31 1999
bb24f343
EB
2000 /* The failure of this assertion will very likely come from a missing
2001 insertion of an explicit dereference. */
64235766 2002 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
a1ab4c31 2003
64235766
EB
2004 /* Try to fold a conversion from another record or union type unless the type
2005 contains a placeholder as it might be needed for a later substitution. */
2006 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2007 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2008 && !type_contains_placeholder_p (type))
2009 {
2010 tree op = TREE_OPERAND (record, 0);
2011
2012 /* If this is an unpadding operation, convert the underlying object to
2013 the unpadded type directly. */
2014 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2015 return convert (TREE_TYPE (field), op);
2016
2017 /* Otherwise try to access FIELD directly in the underlying type, but
2018 make sure that the form of the reference doesn't change too much;
2019 this can happen for an unconstrained bit-packed array type whose
2020 constrained form can be an integer type. */
2021 ref = build_simple_component_ref (op, field, no_fold);
2022 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2023 return ref;
2024 }
a1ab4c31 2025
50a6af05
EB
2026 /* If this field is not in the specified record, see if we can find a field
2027 in the specified record whose original field is the same as this one. */
64235766 2028 if (DECL_CONTEXT (field) != type)
a1ab4c31
AC
2029 {
2030 tree new_field;
2031
d4aef883 2032 /* First loop through normal components. */
64235766 2033 for (new_field = TYPE_FIELDS (type);
42acad07 2034 new_field;
910ad8de 2035 new_field = DECL_CHAIN (new_field))
cb3d597d 2036 if (SAME_FIELD_P (field, new_field))
a1ab4c31
AC
2037 break;
2038
d4aef883 2039 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
42acad07
EB
2040 component in the first search. Doing this search in two steps is
2041 required to avoid hidden homonymous fields in the _Parent field. */
a1ab4c31 2042 if (!new_field)
64235766 2043 for (new_field = TYPE_FIELDS (type);
42acad07 2044 new_field;
910ad8de 2045 new_field = DECL_CHAIN (new_field))
64235766
EB
2046 if (DECL_INTERNAL_P (new_field)
2047 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
a1ab4c31
AC
2048 {
2049 tree field_ref
64235766
EB
2050 = build_simple_component_ref (record, new_field, no_fold);
2051 ref = build_simple_component_ref (field_ref, field, no_fold);
a1ab4c31
AC
2052 if (ref)
2053 return ref;
2054 }
2055
2056 field = new_field;
2057 }
2058
2059 if (!field)
2060 return NULL_TREE;
2061
42acad07
EB
2062 /* If the field's offset has overflowed, do not try to access it, as doing
2063 so may trigger sanity checks deeper in the back-end. Note that we don't
2064 need to warn since this will be done on trying to declare the object. */
a1ab4c31
AC
2065 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2066 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2067 return NULL_TREE;
2068
64235766 2069 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
a1ab4c31 2070
64235766 2071 if (TREE_READONLY (record)
42acad07 2072 || TREE_READONLY (field)
64235766 2073 || TYPE_READONLY (type))
a1ab4c31 2074 TREE_READONLY (ref) = 1;
42acad07 2075
64235766 2076 if (TREE_THIS_VOLATILE (record)
42acad07 2077 || TREE_THIS_VOLATILE (field)
64235766 2078 || TYPE_VOLATILE (type))
a1ab4c31
AC
2079 TREE_THIS_VOLATILE (ref) = 1;
2080
64235766 2081 if (no_fold)
a1ab4c31
AC
2082 return ref;
2083
2084 /* The generic folder may punt in this case because the inner array type
2085 can be self-referential, but folding is in fact not problematic. */
64235766
EB
2086 if (TREE_CODE (record) == CONSTRUCTOR
2087 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
a1ab4c31 2088 {
64235766
EB
2089 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2090 unsigned HOST_WIDE_INT idx;
2091 tree index, value;
2092 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2093 if (index == field)
2094 return value;
a1ab4c31
AC
2095 return ref;
2096 }
2097
42acad07 2098 return fold (ref);
a1ab4c31 2099}
73a1a803 2100
64235766
EB
2101/* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2102 field is not found in the record. */
a1ab4c31
AC
2103
2104tree
64235766 2105build_component_ref (tree record, tree field, bool no_fold)
a1ab4c31 2106{
64235766 2107 tree ref = build_simple_component_ref (record, field, no_fold);
a1ab4c31
AC
2108 if (ref)
2109 return ref;
2110
64235766 2111 /* Assume this is an invalid user field so raise Constraint_Error. */
a1ab4c31
AC
2112 return build1 (NULL_EXPR, TREE_TYPE (field),
2113 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2114 N_Raise_Constraint_Error));
2115}
ce2d0ce2 2116
ff346f70
OH
2117/* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2118 identically. Process the case where a GNAT_PROC to call is provided. */
a1ab4c31 2119
ff346f70
OH
2120static inline tree
2121build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2122 Entity_Id gnat_proc, Entity_Id gnat_pool)
a1ab4c31 2123{
ff346f70 2124 tree gnu_proc = gnat_to_gnu (gnat_proc);
ff346f70 2125 tree gnu_call;
a1ab4c31 2126
7b50c4a3
AC
2127 /* A storage pool's underlying type is a record type (for both predefined
2128 storage pools and GNAT simple storage pools). The secondary stack uses
2129 the same mechanism, but its pool object (SS_Pool) is an integer. */
2130 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
a1ab4c31 2131 {
ff346f70
OH
2132 /* The size is the third parameter; the alignment is the
2133 same type. */
2134 Entity_Id gnat_size_type
2135 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2136 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2137
2138 tree gnu_pool = gnat_to_gnu (gnat_pool);
2139 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2140 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2141
2142 gnu_size = convert (gnu_size_type, gnu_size);
2143 gnu_align = convert (gnu_size_type, gnu_align);
2144
2145 /* The first arg is always the address of the storage pool; next
2146 comes the address of the object, for a deallocator, then the
2147 size and alignment. */
2148 if (gnu_obj)
dddf8120
EB
2149 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2150 gnu_size, gnu_align);
ff346f70 2151 else
dddf8120
EB
2152 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2153 gnu_size, gnu_align);
ff346f70 2154 }
a1ab4c31 2155
ff346f70
OH
2156 /* Secondary stack case. */
2157 else
2158 {
2159 /* The size is the second parameter. */
2160 Entity_Id gnat_size_type
2161 = Etype (Next_Formal (First_Formal (gnat_proc)));
2162 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2163
2164 gnu_size = convert (gnu_size_type, gnu_size);
2165
2166 /* The first arg is the address of the object, for a deallocator,
2167 then the size. */
2168 if (gnu_obj)
dddf8120 2169 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
a1ab4c31 2170 else
dddf8120 2171 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
a1ab4c31
AC
2172 }
2173
ff346f70
OH
2174 return gnu_call;
2175}
2176
2177/* Helper for build_call_alloc_dealloc, to build and return an allocator for
2178 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2179 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2180 latter offers. */
2181
2182static inline tree
2183maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2184{
2185 /* When the DATA_TYPE alignment is stricter than what malloc offers
2186 (super-aligned case), we allocate an "aligning" wrapper type and return
2187 the address of its single data field with the malloc's return value
2188 stored just in front. */
2189
2190 unsigned int data_align = TYPE_ALIGN (data_type);
c5ecd6b7
AC
2191 unsigned int system_allocator_alignment
2192 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
ff346f70
OH
2193
2194 tree aligning_type
c5ecd6b7 2195 = ((data_align > system_allocator_alignment)
ff346f70 2196 ? make_aligning_type (data_type, data_align, data_size,
c5ecd6b7 2197 system_allocator_alignment,
0746af5e
EB
2198 POINTER_SIZE / BITS_PER_UNIT,
2199 gnat_node)
ff346f70 2200 : NULL_TREE);
a1ab4c31 2201
ff346f70
OH
2202 tree size_to_malloc
2203 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
6f61bd41 2204
1eb58520 2205 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
ff346f70
OH
2206
2207 if (aligning_type)
2208 {
2209 /* Latch malloc's return value and get a pointer to the aligning field
2210 first. */
7d7a1fe8 2211 tree storage_ptr = gnat_protect_expr (malloc_ptr);
ff346f70
OH
2212
2213 tree aligning_record_addr
2214 = convert (build_pointer_type (aligning_type), storage_ptr);
2215
2216 tree aligning_record
2217 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2218
2219 tree aligning_field
64235766
EB
2220 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2221 false);
ff346f70
OH
2222
2223 tree aligning_field_addr
2224 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2225
2226 /* Then arrange to store the allocator's return value ahead
2227 and return. */
2228 tree storage_ptr_slot_addr
1366ba41
EB
2229 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2230 convert (ptr_type_node, aligning_field_addr),
437926c0
JJ
2231 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2232 / BITS_PER_UNIT));
ff346f70
OH
2233
2234 tree storage_ptr_slot
2235 = build_unary_op (INDIRECT_REF, NULL_TREE,
1366ba41 2236 convert (build_pointer_type (ptr_type_node),
ff346f70
OH
2237 storage_ptr_slot_addr));
2238
2239 return
2240 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
0d24bf76 2241 build_binary_op (INIT_EXPR, NULL_TREE,
ff346f70
OH
2242 storage_ptr_slot, storage_ptr),
2243 aligning_field_addr);
2244 }
2245 else
2246 return malloc_ptr;
2247}
2248
2249/* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2250 designated by DATA_PTR using the __gnat_free entry point. */
2251
2252static inline tree
2253maybe_wrap_free (tree data_ptr, tree data_type)
2254{
2255 /* In the regular alignment case, we pass the data pointer straight to free.
2256 In the superaligned case, we need to retrieve the initial allocator
2257 return value, stored in front of the data block at allocation time. */
2258
2259 unsigned int data_align = TYPE_ALIGN (data_type);
c5ecd6b7
AC
2260 unsigned int system_allocator_alignment
2261 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
6f61bd41 2262
ff346f70
OH
2263 tree free_ptr;
2264
c5ecd6b7 2265 if (data_align > system_allocator_alignment)
ff346f70
OH
2266 {
2267 /* DATA_FRONT_PTR (void *)
2268 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2269 tree data_front_ptr
2270 = build_binary_op
1366ba41
EB
2271 (POINTER_PLUS_EXPR, ptr_type_node,
2272 convert (ptr_type_node, data_ptr),
437926c0 2273 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
ff346f70
OH
2274
2275 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2276 free_ptr
2277 = build_unary_op
2278 (INDIRECT_REF, NULL_TREE,
1366ba41 2279 convert (build_pointer_type (ptr_type_node), data_front_ptr));
ff346f70
OH
2280 }
2281 else
2282 free_ptr = data_ptr;
2283
dddf8120 2284 return build_call_n_expr (free_decl, 1, free_ptr);
ff346f70
OH
2285}
2286
2287/* Build a GCC tree to call an allocation or deallocation function.
2288 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2289 generate an allocator.
2290
2291 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2292 object type, used to determine the to-be-honored address alignment.
2293 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2294 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2295 to provide an error location for restriction violation messages. */
2296
2297tree
2298build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2299 Entity_Id gnat_proc, Entity_Id gnat_pool,
2300 Node_Id gnat_node)
2301{
ff346f70
OH
2302 /* Explicit proc to call ? This one is assumed to deal with the type
2303 alignment constraints. */
2304 if (Present (gnat_proc))
2305 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2306 gnat_proc, gnat_pool);
2307
2308 /* Otherwise, object to "free" or "malloc" with possible special processing
2309 for alignments stricter than what the default allocator honors. */
2310 else if (gnu_obj)
2311 return maybe_wrap_free (gnu_obj, gnu_type);
2312 else
2313 {
2314 /* Assert that we no longer can be called with this special pool. */
2315 gcc_assert (gnat_pool != -1);
2316
2317 /* Check that we aren't violating the associated restriction. */
2318 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
9733088f
TG
2319 {
2320 Check_No_Implicit_Heap_Alloc (gnat_node);
2321 if (Has_Task (Etype (gnat_node)))
2322 Check_No_Implicit_Task_Alloc (gnat_node);
2323 if (Has_Protected (Etype (gnat_node)))
2324 Check_No_Implicit_Protected_Alloc (gnat_node);
2325 }
ff346f70
OH
2326 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2327 }
a1ab4c31 2328}
ce2d0ce2 2329
bdbebf66 2330/* Build a GCC tree that corresponds to allocating an object of TYPE whose
a1ab4c31 2331 initial value is INIT, if INIT is nonzero. Convert the expression to
bdbebf66 2332 RESULT_TYPE, which must be some pointer type, and return the result.
6f61bd41 2333
a1ab4c31
AC
2334 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2335 the storage pool to use. GNAT_NODE is used to provide an error
6f61bd41 2336 location for restriction violation messages. If IGNORE_INIT_TYPE is
a1ab4c31
AC
2337 true, ignore the type of INIT for the purpose of determining the size;
2338 this will cause the maximum size to be allocated if TYPE is of
2339 self-referential size. */
2340
2341tree
2342build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2343 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2344{
bdbebf66 2345 tree size, storage, storage_deref, storage_init;
a1ab4c31
AC
2346
2347 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2348 if (init && TREE_CODE (init) == NULL_EXPR)
2349 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2350
f4657d60
EB
2351 /* If we are just annotating types, also return a NULL_EXPR. */
2352 else if (type_annotate_only)
2353 return build1 (NULL_EXPR, result_type,
2354 build_call_raise (CE_Range_Check_Failed, gnat_node,
2355 N_Raise_Constraint_Error));
2356
7e169899
EB
2357 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2358 else if (init && TREE_CODE (init) == COND_EXPR)
2359 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2360 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2361 gnat_proc, gnat_pool, gnat_node,
2362 ignore_init_type),
2363 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2364 gnat_proc, gnat_pool, gnat_node,
2365 ignore_init_type));
2366
a1ab4c31
AC
2367 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2368 sizes of the object and its template. Allocate the whole thing and
2369 fill in the parts that are known. */
315cff15 2370 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
a1ab4c31
AC
2371 {
2372 tree storage_type
2373 = build_unc_object_type_from_ptr (result_type, type,
928dfa4b 2374 get_identifier ("ALLOC"), false);
a1ab4c31
AC
2375 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2376 tree storage_ptr_type = build_pointer_type (storage_type);
a1ab4c31
AC
2377
2378 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2379 init);
2380
bdbebf66 2381 /* If the size overflows, pass -1 so Storage_Error will be raised. */
ce3da0d0 2382 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
f54ee980 2383 size = size_int (-1);
a1ab4c31 2384
ff346f70 2385 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
a1ab4c31 2386 gnat_proc, gnat_pool, gnat_node);
7d7a1fe8 2387 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
bdbebf66
EB
2388 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2389 TREE_THIS_NOTRAP (storage_deref) = 1;
a1ab4c31 2390
8ffac116
EB
2391 /* If there is an initializing expression, then make a constructor for
2392 the entire object including the bounds and copy it into the object.
2393 If there is no initializing expression, just set the bounds. */
a1ab4c31
AC
2394 if (init)
2395 {
9771b263
DN
2396 vec<constructor_elt, va_gc> *v;
2397 vec_alloc (v, 2);
0e228dd9
NF
2398
2399 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2400 build_template (template_type, type, init));
910ad8de 2401 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
0e228dd9 2402 init);
bdbebf66 2403 storage_init
0d24bf76 2404 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
bdbebf66 2405 gnat_build_constructor (storage_type, v));
a1ab4c31
AC
2406 }
2407 else
bdbebf66 2408 storage_init
0d24bf76 2409 = build_binary_op (INIT_EXPR, NULL_TREE,
64235766 2410 build_component_ref (storage_deref,
bdbebf66
EB
2411 TYPE_FIELDS (storage_type),
2412 false),
2413 build_template (template_type, type, NULL_TREE));
2414
2415 return build2 (COMPOUND_EXPR, result_type,
2416 storage_init, convert (result_type, storage));
a1ab4c31
AC
2417 }
2418
bdbebf66
EB
2419 size = TYPE_SIZE_UNIT (type);
2420
a1ab4c31
AC
2421 /* If we have an initializing expression, see if its size is simpler
2422 than the size from the type. */
2423 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2424 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2425 || CONTAINS_PLACEHOLDER_P (size)))
2426 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2427
2428 /* If the size is still self-referential, reference the initializing
683ccd05
EB
2429 expression, if it is present. If not, this must have been a call
2430 to allocate a library-level object, in which case we just use the
2431 maximum size. */
2432 if (!ignore_init_type && init)
2433 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2434 else if (CONTAINS_PLACEHOLDER_P (size))
2435 size = max_size (size, true);
a1ab4c31 2436
bdbebf66 2437 /* If the size overflows, pass -1 so Storage_Error will be raised. */
ce3da0d0 2438 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
f54ee980 2439 size = size_int (-1);
a1ab4c31 2440
bdbebf66
EB
2441 storage = convert (result_type,
2442 build_call_alloc_dealloc (NULL_TREE, size, type,
2443 gnat_proc, gnat_pool,
2444 gnat_node));
a1ab4c31 2445
ced57283
EB
2446 /* If we have an initial value, protect the new address, assign the value
2447 and return the address with a COMPOUND_EXPR. */
a1ab4c31
AC
2448 if (init)
2449 {
bdbebf66
EB
2450 storage = gnat_protect_expr (storage);
2451 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2452 TREE_THIS_NOTRAP (storage_deref) = 1;
2453 storage_init
0d24bf76 2454 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
bdbebf66 2455 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
a1ab4c31
AC
2456 }
2457
bdbebf66 2458 return storage;
a1ab4c31 2459}
ce2d0ce2 2460
ced57283 2461/* Indicate that we need to take the address of T and that it therefore
7c775aca 2462 should not be allocated in a register. Return true if successful. */
a1ab4c31
AC
2463
2464bool
ced57283 2465gnat_mark_addressable (tree t)
a1ab4c31 2466{
ced57283
EB
2467 while (true)
2468 switch (TREE_CODE (t))
a1ab4c31
AC
2469 {
2470 case ADDR_EXPR:
2471 case COMPONENT_REF:
2472 case ARRAY_REF:
2473 case ARRAY_RANGE_REF:
2474 case REALPART_EXPR:
2475 case IMAGPART_EXPR:
2476 case VIEW_CONVERT_EXPR:
2477 case NON_LVALUE_EXPR:
2478 CASE_CONVERT:
ced57283 2479 t = TREE_OPERAND (t, 0);
a1ab4c31
AC
2480 break;
2481
0b3467c4
EB
2482 case COMPOUND_EXPR:
2483 t = TREE_OPERAND (t, 1);
2484 break;
2485
a1ab4c31 2486 case CONSTRUCTOR:
ced57283 2487 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2488 return true;
2489
2490 case VAR_DECL:
2491 case PARM_DECL:
2492 case RESULT_DECL:
ced57283 2493 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2494 return true;
2495
2496 case FUNCTION_DECL:
ced57283 2497 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2498 return true;
2499
2500 case CONST_DECL:
ced57283
EB
2501 return DECL_CONST_CORRESPONDING_VAR (t)
2502 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2503
a1ab4c31
AC
2504 default:
2505 return true;
2506 }
2507}
ce2d0ce2 2508
7194767c
EB
2509/* Return true if EXP is a stable expression for the purpose of the functions
2510 below and, therefore, can be returned unmodified by them. We accept things
2511 that are actual constants or that have already been handled. */
2512
2513static bool
2514gnat_stable_expr_p (tree exp)
2515{
2516 enum tree_code code = TREE_CODE (exp);
2517 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2518}
2519
7d7a1fe8
EB
2520/* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2521 but we know how to handle our own nodes. */
2522
2523tree
2524gnat_save_expr (tree exp)
2525{
2526 tree type = TREE_TYPE (exp);
2527 enum tree_code code = TREE_CODE (exp);
2528
7194767c 2529 if (gnat_stable_expr_p (exp))
7d7a1fe8
EB
2530 return exp;
2531
2532 if (code == UNCONSTRAINED_ARRAY_REF)
2533 {
2534 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2535 TREE_READONLY (t) = TYPE_READONLY (type);
2536 return t;
2537 }
2538
2539 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2540 This may be more efficient, but will also allow us to more easily find
2541 the match for the PLACEHOLDER_EXPR. */
2542 if (code == COMPONENT_REF
2543 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2544 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
552cc590 2545 TREE_OPERAND (exp, 1), NULL_TREE);
7d7a1fe8
EB
2546
2547 return save_expr (exp);
2548}
2549
2550/* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2551 is optimized under the assumption that EXP's value doesn't change before
2552 its subsequent reuse(s) except through its potential reevaluation. */
2553
2554tree
2555gnat_protect_expr (tree exp)
2556{
2557 tree type = TREE_TYPE (exp);
2558 enum tree_code code = TREE_CODE (exp);
2559
7194767c 2560 if (gnat_stable_expr_p (exp))
7d7a1fe8
EB
2561 return exp;
2562
308e6f3a 2563 /* If EXP has no side effects, we theoretically don't need to do anything.
7d7a1fe8
EB
2564 However, we may be recursively passed more and more complex expressions
2565 involving checks which will be reused multiple times and eventually be
2566 unshared for gimplification; in order to avoid a complexity explosion
2567 at that point, we protect any expressions more complex than a simple
2568 arithmetic expression. */
58c8f770
EB
2569 if (!TREE_SIDE_EFFECTS (exp))
2570 {
2571 tree inner = skip_simple_arithmetic (exp);
2572 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2573 return exp;
2574 }
7d7a1fe8
EB
2575
2576 /* If this is a conversion, protect what's inside the conversion. */
2577 if (code == NON_LVALUE_EXPR
2578 || CONVERT_EXPR_CODE_P (code)
2579 || code == VIEW_CONVERT_EXPR)
2580 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2581
2582 /* If we're indirectly referencing something, we only need to protect the
2583 address since the data itself can't change in these situations. */
2584 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2585 {
2586 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2587 TREE_READONLY (t) = TYPE_READONLY (type);
2588 return t;
2589 }
2590
96826e28
EB
2591 /* Likewise if we're indirectly referencing part of something. */
2592 if (code == COMPONENT_REF
2593 && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
2594 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2595 TREE_OPERAND (exp, 1), NULL_TREE);
2596
7d7a1fe8
EB
2597 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2598 This may be more efficient, but will also allow us to more easily find
2599 the match for the PLACEHOLDER_EXPR. */
2600 if (code == COMPONENT_REF
2601 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2602 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
552cc590 2603 TREE_OPERAND (exp, 1), NULL_TREE);
7d7a1fe8 2604
bf17fe3f
EB
2605 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2606 for a CALL_EXPR as large objects are returned via invisible reference
2607 in most ABIs so the temporary will directly be filled by the callee. */
7d7a1fe8 2608 if (TYPE_IS_FAT_POINTER_P (type)
bf17fe3f 2609 || !AGGREGATE_TYPE_P (type)
7d7a1fe8
EB
2610 || code == CALL_EXPR)
2611 return save_expr (exp);
2612
2613 /* Otherwise reference, protect the address and dereference. */
2614 return
2615 build_unary_op (INDIRECT_REF, type,
96826e28 2616 save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
7d7a1fe8
EB
2617}
2618
2619/* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2620 argument to force evaluation of everything. */
2621
2622static tree
fc7a823e 2623gnat_stabilize_reference_1 (tree e, void *data)
7d7a1fe8 2624{
241125b2 2625 const bool force = *(bool *)data;
7d7a1fe8
EB
2626 enum tree_code code = TREE_CODE (e);
2627 tree type = TREE_TYPE (e);
2628 tree result;
2629
7194767c 2630 if (gnat_stable_expr_p (e))
7d7a1fe8
EB
2631 return e;
2632
2633 switch (TREE_CODE_CLASS (code))
2634 {
2635 case tcc_exceptional:
2636 case tcc_declaration:
2637 case tcc_comparison:
2638 case tcc_expression:
2639 case tcc_reference:
2640 case tcc_vl_exp:
2641 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2642 fat pointer. This may be more efficient, but will also allow
2643 us to more easily find the match for the PLACEHOLDER_EXPR. */
2644 if (code == COMPONENT_REF
2645 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2646 result
2647 = build3 (code, type,
fc7a823e 2648 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
552cc590 2649 TREE_OPERAND (e, 1), NULL_TREE);
7d7a1fe8
EB
2650 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2651 so that it will only be evaluated once. */
2652 /* The tcc_reference and tcc_comparison classes could be handled as
2653 below, but it is generally faster to only evaluate them once. */
2654 else if (TREE_SIDE_EFFECTS (e) || force)
2655 return save_expr (e);
2656 else
2657 return e;
2658 break;
2659
2660 case tcc_binary:
2661 /* Recursively stabilize each operand. */
2662 result
2663 = build2 (code, type,
fc7a823e
EB
2664 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2665 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
7d7a1fe8
EB
2666 break;
2667
2668 case tcc_unary:
2669 /* Recursively stabilize each operand. */
2670 result
2671 = build1 (code, type,
fc7a823e 2672 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
7d7a1fe8
EB
2673 break;
2674
2675 default:
2676 gcc_unreachable ();
2677 }
2678
7d7a1fe8
EB
2679 TREE_READONLY (result) = TREE_READONLY (e);
2680 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2681 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2682
2683 return result;
2684}
2685
2686/* This is equivalent to stabilize_reference in tree.c but we know how to
2687 handle our own nodes and we take extra arguments. FORCE says whether to
fc7a823e
EB
2688 force evaluation of everything in REF. INIT is set to the first arm of
2689 a COMPOUND_EXPR present in REF, if any. */
7d7a1fe8
EB
2690
2691tree
fc7a823e 2692gnat_stabilize_reference (tree ref, bool force, tree *init)
241125b2 2693{
fc7a823e
EB
2694 return
2695 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
241125b2
EB
2696}
2697
2698/* Rewrite reference REF and call FUNC on each expression within REF in the
fc7a823e
EB
2699 process. DATA is passed unmodified to FUNC. INIT is set to the first
2700 arm of a COMPOUND_EXPR present in REF, if any. */
241125b2
EB
2701
2702tree
fc7a823e 2703gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
7d7a1fe8
EB
2704{
2705 tree type = TREE_TYPE (ref);
2706 enum tree_code code = TREE_CODE (ref);
2707 tree result;
2708
7d7a1fe8
EB
2709 switch (code)
2710 {
2711 case CONST_DECL:
2712 case VAR_DECL:
2713 case PARM_DECL:
2714 case RESULT_DECL:
2715 /* No action is needed in this case. */
2716 return ref;
2717
7d7a1fe8
EB
2718 CASE_CONVERT:
2719 case FLOAT_EXPR:
2720 case FIX_TRUNC_EXPR:
60424a41
EB
2721 case REALPART_EXPR:
2722 case IMAGPART_EXPR:
7d7a1fe8
EB
2723 case VIEW_CONVERT_EXPR:
2724 result
2725 = build1 (code, type,
241125b2 2726 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
fc7a823e 2727 init));
7d7a1fe8
EB
2728 break;
2729
2730 case INDIRECT_REF:
2731 case UNCONSTRAINED_ARRAY_REF:
fc7a823e 2732 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
7d7a1fe8
EB
2733 break;
2734
2735 case COMPONENT_REF:
ea292448 2736 result = build3 (COMPONENT_REF, type,
241125b2 2737 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
fc7a823e 2738 data, init),
ea292448 2739 TREE_OPERAND (ref, 1), NULL_TREE);
7d7a1fe8
EB
2740 break;
2741
2742 case BIT_FIELD_REF:
2743 result = build3 (BIT_FIELD_REF, type,
241125b2 2744 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
fc7a823e 2745 data, init),
ea814c66 2746 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
ee45a32d 2747 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
7d7a1fe8
EB
2748 break;
2749
2750 case ARRAY_REF:
2751 case ARRAY_RANGE_REF:
7194767c
EB
2752 result
2753 = build4 (code, type,
241125b2 2754 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
fc7a823e
EB
2755 init),
2756 func (TREE_OPERAND (ref, 1), data),
552cc590 2757 TREE_OPERAND (ref, 2), NULL_TREE);
7d7a1fe8
EB
2758 break;
2759
fc7a823e 2760 case COMPOUND_EXPR:
7c775aca 2761 gcc_assert (!*init);
fc7a823e
EB
2762 *init = TREE_OPERAND (ref, 0);
2763 /* We expect only the pattern built in Call_to_gnu. */
92d5f5ab
EB
2764 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2765 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2766 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
fc7a823e
EB
2767 return TREE_OPERAND (ref, 1);
2768
7d7a1fe8 2769 case CALL_EXPR:
7194767c
EB
2770 {
2771 /* This can only be an atomic load. */
2772 gcc_assert (call_is_atomic_load (ref));
2773
2774 /* An atomic load is an INDIRECT_REF of its first argument. */
2775 tree t = CALL_EXPR_ARG (ref, 0);
2776 if (TREE_CODE (t) == NOP_EXPR)
2777 t = TREE_OPERAND (t, 0);
2778 if (TREE_CODE (t) == ADDR_EXPR)
2779 t = build1 (ADDR_EXPR, TREE_TYPE (t),
241125b2 2780 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
fc7a823e 2781 init));
7194767c 2782 else
fc7a823e 2783 t = func (t, data);
7194767c 2784 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
0b3467c4 2785
7194767c
EB
2786 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2787 t, CALL_EXPR_ARG (ref, 1));
2788 }
7d7a1fe8
EB
2789 break;
2790
2791 case ERROR_MARK:
d747d005
EB
2792 case NULL_EXPR:
2793 return ref;
7d7a1fe8 2794
7d7a1fe8 2795 default:
7194767c 2796 gcc_unreachable ();
7d7a1fe8
EB
2797 }
2798
2799 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2800 may not be sustained across some paths, such as the way via build1 for
2801 INDIRECT_REF. We reset those flags here in the general case, which is
2802 consistent with the GCC version of this routine.
2803
2804 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2805 paths introduce side-effects where there was none initially (e.g. if a
2806 SAVE_EXPR is built) and we also want to keep track of that. */
2807 TREE_READONLY (result) = TREE_READONLY (ref);
2808 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2809 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2810
a1c7d797
EB
2811 if (code == INDIRECT_REF
2812 || code == UNCONSTRAINED_ARRAY_REF
2813 || code == ARRAY_REF
2814 || code == ARRAY_RANGE_REF)
3bfc61cf
EB
2815 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2816
7d7a1fe8
EB
2817 return result;
2818}
15bf7d19 2819
ea292448
EB
2820/* This is equivalent to get_inner_reference in expr.c but it returns the
2821 ultimate containing object only if the reference (lvalue) is constant,
2822 i.e. if it doesn't depend on the context in which it is evaluated. */
2823
2824tree
2825get_inner_constant_reference (tree exp)
2826{
2827 while (true)
2828 {
2829 switch (TREE_CODE (exp))
2830 {
2831 case BIT_FIELD_REF:
2832 break;
2833
2834 case COMPONENT_REF:
ea292448
EB
2835 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2836 return NULL_TREE;
2837 break;
2838
2839 case ARRAY_REF:
2840 case ARRAY_RANGE_REF:
2841 {
552cc590 2842 if (TREE_OPERAND (exp, 2))
ea292448
EB
2843 return NULL_TREE;
2844
2845 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2846 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2847 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2848 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2849 return NULL_TREE;
2850 }
2851 break;
2852
2853 case REALPART_EXPR:
2854 case IMAGPART_EXPR:
2855 case VIEW_CONVERT_EXPR:
2856 break;
2857
2858 default:
2859 goto done;
2860 }
2861
2862 exp = TREE_OPERAND (exp, 0);
2863 }
2864
2865done:
2866 return exp;
2867}
2868
933a7325
EB
2869/* Return true if EXPR is the addition or the subtraction of a constant and,
2870 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2871 if this is a subtraction. */
2872
2873bool
2874is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2875{
2876 /* Skip overflow checks. */
2877 if (TREE_CODE (expr) == COND_EXPR
2878 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2879 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2880 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2881 == gnat_raise_decls[CE_Overflow_Check_Failed])
2882 expr = COND_EXPR_ELSE (expr);
2883
2884 if (TREE_CODE (expr) == PLUS_EXPR)
2885 {
2886 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2887 {
2888 *add = TREE_OPERAND (expr, 1);
2889 *cst = TREE_OPERAND (expr, 0);
2890 *minus_p = false;
2891 return true;
2892 }
2893 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2894 {
2895 *add = TREE_OPERAND (expr, 0);
2896 *cst = TREE_OPERAND (expr, 1);
2897 *minus_p = false;
2898 return true;
2899 }
2900 }
2901 else if (TREE_CODE (expr) == MINUS_EXPR)
2902 {
2903 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2904 {
2905 *add = TREE_OPERAND (expr, 0);
2906 *cst = TREE_OPERAND (expr, 1);
2907 *minus_p = true;
2908 return true;
2909 }
2910 }
2911
2912 return false;
2913}
2914
15bf7d19
EB
2915/* If EXPR is an expression that is invariant in the current function, in the
2916 sense that it can be evaluated anywhere in the function and any number of
2917 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2918
2919tree
2920gnat_invariant_expr (tree expr)
2921{
1e3cabd4 2922 tree type = TREE_TYPE (expr);
933a7325
EB
2923 tree add, cst;
2924 bool minus_p;
15bf7d19 2925
722356ce 2926 expr = remove_conversions (expr, false);
15bf7d19 2927
64235766 2928 /* Look through temporaries created to capture values. */
15bf7d19
EB
2929 while ((TREE_CODE (expr) == CONST_DECL
2930 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2931 && decl_function_context (expr) == current_function_decl
2932 && DECL_INITIAL (expr))
90b4c164
EB
2933 {
2934 expr = DECL_INITIAL (expr);
2935 /* Look into CONSTRUCTORs built to initialize padded types. */
ad00a297 2936 expr = maybe_padded_object (expr);
90b4c164
EB
2937 expr = remove_conversions (expr, false);
2938 }
15bf7d19 2939
61e0b233
EB
2940 /* We are only interested in scalar types at the moment and, even if we may
2941 have gone through padding types in the above loop, we must be back to a
2942 scalar value at this point. */
2943 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2944 return NULL_TREE;
2945
15bf7d19
EB
2946 if (TREE_CONSTANT (expr))
2947 return fold_convert (type, expr);
2948
64235766 2949 /* Deal with addition or subtraction of constants. */
933a7325 2950 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
64235766 2951 {
933a7325
EB
2952 add = gnat_invariant_expr (add);
2953 if (add)
f8125f0c 2954 return
933a7325
EB
2955 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2956 fold_convert (type, add), fold_convert (type, cst));
64235766
EB
2957 else
2958 return NULL_TREE;
2959 }
2960
2961 bool invariant_p = false;
2962 tree t = expr;
15bf7d19
EB
2963
2964 while (true)
2965 {
2966 switch (TREE_CODE (t))
2967 {
2968 case COMPONENT_REF:
64235766 2969 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
15bf7d19
EB
2970 break;
2971
2972 case ARRAY_REF:
2973 case ARRAY_RANGE_REF:
552cc590 2974 if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
15bf7d19
EB
2975 return NULL_TREE;
2976 break;
2977
2978 case BIT_FIELD_REF:
15bf7d19
EB
2979 case REALPART_EXPR:
2980 case IMAGPART_EXPR:
64235766
EB
2981 case VIEW_CONVERT_EXPR:
2982 CASE_CONVERT:
15bf7d19
EB
2983 break;
2984
2985 case INDIRECT_REF:
64235766 2986 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
15bf7d19 2987 return NULL_TREE;
64235766 2988 invariant_p = false;
15bf7d19
EB
2989 break;
2990
2991 default:
2992 goto object;
2993 }
2994
2995 t = TREE_OPERAND (t, 0);
2996 }
2997
2998object:
2999 if (TREE_SIDE_EFFECTS (t))
3000 return NULL_TREE;
3001
3002 if (TREE_CODE (t) == CONST_DECL
3003 && (DECL_EXTERNAL (t)
3004 || decl_function_context (t) != current_function_decl))
3005 return fold_convert (type, expr);
3006
64235766 3007 if (!invariant_p && !TREE_READONLY (t))
15bf7d19
EB
3008 return NULL_TREE;
3009
90b4c164 3010 if (TREE_CODE (t) == PARM_DECL)
15bf7d19
EB
3011 return fold_convert (type, expr);
3012
3013 if (TREE_CODE (t) == VAR_DECL
3014 && (DECL_EXTERNAL (t)
3015 || decl_function_context (t) != current_function_decl))
3016 return fold_convert (type, expr);
3017
3018 return NULL_TREE;
3019}
This page took 4.846772 seconds and 5 git commands to generate.