]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils2.cc
ada: Minor generic tweaks left and and right
[gcc.git] / gcc / ada / gcc-interface / utils2.cc
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
b2263428 9 * Copyright (C) 1992-2023, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
4d0cdd0c 29#include "memmodel.h"
a1ab4c31 30#include "tm.h"
40e23961 31#include "vec.h"
40e23961 32#include "alias.h"
a1ab4c31 33#include "tree.h"
c7131fb2 34#include "inchash.h"
65ba91b7 35#include "builtins.h"
40e23961 36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "stringpool.h"
39#include "varasm.h"
f2be3ce4 40#include "flags.h"
033ba5bf 41#include "toplev.h"
a1ab4c31 42#include "ggc.h"
f82a627c 43#include "tree-inline.h"
e6e15ec9 44
a1ab4c31
AC
45#include "ada.h"
46#include "types.h"
47#include "atree.h"
8713b7e4 48#include "elists.h"
a1ab4c31 49#include "namet.h"
8713b7e4
EB
50#include "nlists.h"
51#include "snames.h"
52#include "stringt.h"
a1ab4c31
AC
53#include "uintp.h"
54#include "fe.h"
a1ab4c31
AC
55#include "sinfo.h"
56#include "einfo.h"
57#include "ada-tree.h"
58#include "gigi.h"
31fcb30f 59
a1ab4c31
AC
60/* Return the base type of TYPE. */
61
62tree
63get_base_type (tree type)
64{
65 if (TREE_CODE (type) == RECORD_TYPE
66 && TYPE_JUSTIFIED_MODULAR_P (type))
67 type = TREE_TYPE (TYPE_FIELDS (type));
68
69 while (TREE_TYPE (type)
70 && (TREE_CODE (type) == INTEGER_TYPE
04682fe7 71 || SCALAR_FLOAT_TYPE_P (type)))
a1ab4c31
AC
72 type = TREE_TYPE (type);
73
74 return type;
75}
ce2d0ce2 76
73a1a803
EB
77/* EXP is a GCC tree representing an address. See if we can find how strictly
78 the object at this address is aligned and, if so, return the alignment of
79 the object in bits. Otherwise return 0. */
a1ab4c31
AC
80
81unsigned int
82known_alignment (tree exp)
83{
84 unsigned int this_alignment;
85 unsigned int lhs, rhs;
86
87 switch (TREE_CODE (exp))
88 {
89 CASE_CONVERT:
90 case VIEW_CONVERT_EXPR:
91 case NON_LVALUE_EXPR:
92 /* Conversions between pointers and integers don't change the alignment
93 of the underlying object. */
94 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
95 break;
96
97 case COMPOUND_EXPR:
73a1a803 98 /* The value of a COMPOUND_EXPR is that of its second operand. */
a1ab4c31
AC
99 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
100 break;
101
102 case PLUS_EXPR:
103 case MINUS_EXPR:
73a1a803 104 /* If two addresses are added, the alignment of the result is the
a1ab4c31
AC
105 minimum of the two alignments. */
106 lhs = known_alignment (TREE_OPERAND (exp, 0));
107 rhs = known_alignment (TREE_OPERAND (exp, 1));
108 this_alignment = MIN (lhs, rhs);
109 break;
110
111 case POINTER_PLUS_EXPR:
73a1a803
EB
112 /* If this is the pattern built for aligning types, decode it. */
113 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
114 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
115 {
116 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
117 return
118 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
119 }
120
a1ab4c31
AC
121 /* If we don't know the alignment of the offset, we assume that
122 of the base. */
73a1a803
EB
123 lhs = known_alignment (TREE_OPERAND (exp, 0));
124 rhs = known_alignment (TREE_OPERAND (exp, 1));
125
a1ab4c31
AC
126 if (rhs == 0)
127 this_alignment = lhs;
128 else
129 this_alignment = MIN (lhs, rhs);
130 break;
131
132 case COND_EXPR:
73a1a803 133 /* If there is a choice between two values, use the smaller one. */
a1ab4c31
AC
134 lhs = known_alignment (TREE_OPERAND (exp, 1));
135 rhs = known_alignment (TREE_OPERAND (exp, 2));
136 this_alignment = MIN (lhs, rhs);
137 break;
138
139 case INTEGER_CST:
140 {
141 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
142 /* The first part of this represents the lowest bit in the constant,
143 but it is originally in bytes, not bits. */
73a1a803 144 this_alignment = (c & -c) * BITS_PER_UNIT;
a1ab4c31
AC
145 }
146 break;
147
148 case MULT_EXPR:
149 /* If we know the alignment of just one side, use it. Otherwise,
150 use the product of the alignments. */
151 lhs = known_alignment (TREE_OPERAND (exp, 0));
152 rhs = known_alignment (TREE_OPERAND (exp, 1));
153
154 if (lhs == 0)
155 this_alignment = rhs;
156 else if (rhs == 0)
157 this_alignment = lhs;
158 else
159 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
160 break;
161
162 case BIT_AND_EXPR:
163 /* A bit-and expression is as aligned as the maximum alignment of the
164 operands. We typically get here for a complex lhs and a constant
165 negative power of two on the rhs to force an explicit alignment, so
166 don't bother looking at the lhs. */
167 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
168 break;
169
170 case ADDR_EXPR:
65ba91b7
EB
171 if (DECL_P (TREE_OPERAND (exp, 0)))
172 this_alignment = DECL_ALIGN (TREE_OPERAND (exp, 0));
173 else
174 this_alignment = get_object_alignment (TREE_OPERAND (exp, 0));
a1ab4c31
AC
175 break;
176
f82a627c
EB
177 case CALL_EXPR:
178 {
9182f718
EB
179 tree fndecl = get_callee_fndecl (exp);
180 if (fndecl == malloc_decl || fndecl == realloc_decl)
0317e5a2
EB
181 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
182
f82a627c
EB
183 tree t = maybe_inline_call_in_expr (exp);
184 if (t)
185 return known_alignment (t);
186 }
187
9c453de7 188 /* ... fall through ... */
f82a627c 189
a1ab4c31
AC
190 default:
191 /* For other pointer expressions, we assume that the pointed-to object
192 is at least as aligned as the pointed-to type. Beware that we can
193 have a dummy type here (e.g. a Taft Amendment type), for which the
194 alignment is meaningless and should be ignored. */
195 if (POINTER_TYPE_P (TREE_TYPE (exp))
9182f718
EB
196 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
197 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
a1ab4c31
AC
198 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
199 else
200 this_alignment = 0;
201 break;
202 }
203
204 return this_alignment;
205}
ce2d0ce2 206
a1ab4c31
AC
207/* We have a comparison or assignment operation on two types, T1 and T2, which
208 are either both array types or both record types. T1 is assumed to be for
209 the left hand side operand, and T2 for the right hand side. Return the
210 type that both operands should be converted to for the operation, if any.
211 Otherwise return zero. */
212
213static tree
214find_common_type (tree t1, tree t2)
215{
e3c4580e 216 /* ??? As of today, various constructs lead to here with types of different
a1ab4c31
AC
217 sizes even when both constants (e.g. tagged types, packable vs regular
218 component types, padded vs unpadded types, ...). While some of these
219 would better be handled upstream (types should be made consistent before
220 calling into build_binary_op), some others are really expected and we
221 have to be careful. */
222
036a2fa2
EB
223 const bool variable_record_on_lhs
224 = (TREE_CODE (t1) == RECORD_TYPE
225 && TREE_CODE (t2) == RECORD_TYPE
226 && get_variant_part (t1)
227 && !get_variant_part (t2));
228
229 const bool variable_array_on_lhs
230 = (TREE_CODE (t1) == ARRAY_TYPE
231 && TREE_CODE (t2) == ARRAY_TYPE
232 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
233 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
234
805e60a0 235 /* We must avoid writing more than what the target can hold if this is for
a1ab4c31 236 an assignment and the case of tagged types is handled in build_binary_op
805e60a0
EB
237 so we use the lhs type if it is known to be smaller or of constant size
238 and the rhs type is not, whatever the modes. We also force t1 in case of
a1ab4c31 239 constant size equality to minimize occurrences of view conversions on the
036a2fa2
EB
240 lhs of an assignment, except for the case of types with a variable part
241 on the lhs but not on the rhs to make the conversion simpler. */
a1ab4c31
AC
242 if (TREE_CONSTANT (TYPE_SIZE (t1))
243 && (!TREE_CONSTANT (TYPE_SIZE (t2))
805e60a0
EB
244 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
245 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
036a2fa2
EB
246 && !variable_record_on_lhs
247 && !variable_array_on_lhs)))
a1ab4c31
AC
248 return t1;
249
036a2fa2
EB
250 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
251 a non-BLKmode rhs and array types with a variable part on the lhs but not
252 on the rhs to make sure the conversion is preserved during gimplification.
253 Note that we know that we will not have any alignment problems since, if
254 we did, the non-BLKmode type could not have been used. */
255 if (TYPE_MODE (t1) != BLKmode
256 && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
a1ab4c31
AC
257 return t1;
258
259 /* If the rhs type is of constant size, use it whatever the modes. At
260 this point it is known to be smaller, or of constant size and the
261 lhs type is not. */
262 if (TREE_CONSTANT (TYPE_SIZE (t2)))
263 return t2;
264
265 /* Otherwise, if the rhs type is non-BLKmode, use it. */
266 if (TYPE_MODE (t2) != BLKmode)
267 return t2;
268
269 /* In this case, both types have variable size and BLKmode. It's
270 probably best to leave the "type mismatch" because changing it
271 could cause a bad self-referential reference. */
272 return NULL_TREE;
273}
ce2d0ce2 274
ced57283
EB
275/* Return an expression tree representing an equality comparison of A1 and A2,
276 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
a1ab4c31 277
ced57283
EB
278 Two arrays are equal in one of two ways: (1) if both have zero length in
279 some dimension (not necessarily the same dimension) or (2) if the lengths
280 in each dimension are equal and the data is equal. We perform the length
281 tests in as efficient a manner as possible. */
a1ab4c31
AC
282
283static tree
6532e8a0 284compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
a1ab4c31 285{
1139f2e8
EB
286 tree result = convert (result_type, boolean_true_node);
287 tree a1_is_null = convert (result_type, boolean_false_node);
288 tree a2_is_null = convert (result_type, boolean_false_node);
a1ab4c31
AC
289 tree t1 = TREE_TYPE (a1);
290 tree t2 = TREE_TYPE (a2);
ced57283
EB
291 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
292 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
a1ab4c31
AC
293 bool length_zero_p = false;
294
881cdd61
EB
295 /* If the operands have side-effects, they need to be evaluated only once
296 in spite of the multiple references in the comparison. */
ced57283 297 if (a1_side_effects_p)
7d7a1fe8 298 a1 = gnat_protect_expr (a1);
ced57283
EB
299
300 if (a2_side_effects_p)
7d7a1fe8 301 a2 = gnat_protect_expr (a2);
ced57283 302
a1ab4c31 303 /* Process each dimension separately and compare the lengths. If any
1081f5a7
EB
304 dimension has a length known to be zero, set LENGTH_ZERO_P to true
305 in order to suppress the comparison of the data at the end. */
a1ab4c31
AC
306 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
307 {
683ccd05
EB
308 tree dom1 = TYPE_DOMAIN (t1);
309 tree dom2 = TYPE_DOMAIN (t2);
310 tree length1 = size_binop (PLUS_EXPR,
311 size_binop (MINUS_EXPR,
312 TYPE_MAX_VALUE (dom1),
313 TYPE_MIN_VALUE (dom1)),
1081f5a7 314 size_one_node);
683ccd05
EB
315 tree length2 = size_binop (PLUS_EXPR,
316 size_binop (MINUS_EXPR,
317 TYPE_MAX_VALUE (dom2),
318 TYPE_MIN_VALUE (dom2)),
1081f5a7 319 size_one_node);
683ccd05
EB
320 tree ind1 = TYPE_INDEX_TYPE (dom1);
321 tree ind2 = TYPE_INDEX_TYPE (dom2);
322 tree base_type = maybe_character_type (get_base_type (ind1));
323 tree lb1 = convert (base_type, TYPE_MIN_VALUE (ind1));
324 tree ub1 = convert (base_type, TYPE_MAX_VALUE (ind1));
325 tree lb2 = convert (base_type, TYPE_MIN_VALUE (ind2));
326 tree ub2 = convert (base_type, TYPE_MAX_VALUE (ind2));
a1ab4c31
AC
327 tree comparison, this_a1_is_null, this_a2_is_null;
328
683ccd05
EB
329 /* If the length of the first array is a constant and that of the second
330 array is not, swap our operands to have the constant second. */
331 if (TREE_CODE (length1) == INTEGER_CST
332 && TREE_CODE (length2) != INTEGER_CST)
a1ab4c31 333 {
1081f5a7
EB
334 tree tem;
335 bool btem;
336
a1ab4c31
AC
337 tem = a1, a1 = a2, a2 = tem;
338 tem = t1, t1 = t2, t2 = tem;
339 tem = lb1, lb1 = lb2, lb2 = tem;
340 tem = ub1, ub1 = ub2, ub2 = tem;
341 tem = length1, length1 = length2, length2 = tem;
342 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
ced57283
EB
343 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
344 a2_side_effects_p = btem;
a1ab4c31
AC
345 }
346
1081f5a7
EB
347 /* If the length of the second array is the constant zero, we can just
348 use the original stored bounds for the first array and see whether
349 last < first holds. */
350 if (integer_zerop (length2))
a1ab4c31 351 {
1081f5a7 352 length_zero_p = true;
a1ab4c31 353
683ccd05
EB
354 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
355 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
1081f5a7 356
6532e8a0 357 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
1fc24649 358 if (EXPR_P (comparison))
6532e8a0 359 SET_EXPR_LOCATION (comparison, loc);
1fc24649 360
a1ab4c31 361 this_a1_is_null = comparison;
1139f2e8 362 this_a2_is_null = convert (result_type, boolean_true_node);
a1ab4c31
AC
363 }
364
1081f5a7
EB
365 /* Otherwise, if the length is some other constant value, we know that
366 this dimension in the second array cannot be superflat, so we can
367 just use its length computed from the actual stored bounds. */
a1ab4c31
AC
368 else if (TREE_CODE (length2) == INTEGER_CST)
369 {
683ccd05 370 /* Note that we know that LB2 and UB2 are constant and hence
1fc24649 371 cannot contain a PLACEHOLDER_EXPR. */
683ccd05
EB
372 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
373 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
a1ab4c31
AC
374
375 comparison
6532e8a0 376 = fold_build2_loc (loc, EQ_EXPR, result_type,
683ccd05
EB
377 build_binary_op (MINUS_EXPR, base_type,
378 ub1, lb1),
379 build_binary_op (MINUS_EXPR, base_type,
380 ub2, lb2));
1fc24649 381 if (EXPR_P (comparison))
6532e8a0 382 SET_EXPR_LOCATION (comparison, loc);
1fc24649 383
6532e8a0
EB
384 this_a1_is_null
385 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
1081f5a7 386
1139f2e8 387 this_a2_is_null = convert (result_type, boolean_false_node);
a1ab4c31
AC
388 }
389
1081f5a7 390 /* Otherwise, compare the computed lengths. */
a1ab4c31
AC
391 else
392 {
393 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
394 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
395
396 comparison
6532e8a0 397 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
683ccd05
EB
398 if (EXPR_P (comparison))
399 SET_EXPR_LOCATION (comparison, loc);
a1ab4c31 400
683ccd05
EB
401 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
402 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
403
404 this_a1_is_null
405 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
406
407 lb2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb2, a2);
408 ub2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub2, a2);
409
410 this_a2_is_null
411 = fold_build2_loc (loc, LT_EXPR, result_type, ub2, lb2);
a1ab4c31
AC
412 }
413
1081f5a7 414 /* Append expressions for this dimension to the final expressions. */
a1ab4c31
AC
415 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
416 result, comparison);
417
418 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
419 this_a1_is_null, a1_is_null);
1081f5a7 420
a1ab4c31
AC
421 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
422 this_a2_is_null, a2_is_null);
423
424 t1 = TREE_TYPE (t1);
425 t2 = TREE_TYPE (t2);
426 }
427
1081f5a7 428 /* Unless the length of some dimension is known to be zero, compare the
a1ab4c31
AC
429 data in the array. */
430 if (!length_zero_p)
431 {
432 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
1fc24649 433 tree comparison;
a1ab4c31
AC
434
435 if (type)
ced57283
EB
436 {
437 a1 = convert (type, a1),
438 a2 = convert (type, a2);
439 }
a1ab4c31 440
6532e8a0 441 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
1fc24649
EB
442
443 result
444 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
a1ab4c31
AC
445 }
446
447 /* The result is also true if both sizes are zero. */
448 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
449 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
450 a1_is_null, a2_is_null),
451 result);
452
881cdd61
EB
453 /* If the operands have side-effects, they need to be evaluated before
454 doing the tests above since the place they otherwise would end up
455 being evaluated at run time could be wrong. */
ced57283 456 if (a1_side_effects_p)
a1ab4c31
AC
457 result = build2 (COMPOUND_EXPR, result_type, a1, result);
458
ced57283 459 if (a2_side_effects_p)
a1ab4c31
AC
460 result = build2 (COMPOUND_EXPR, result_type, a2, result);
461
462 return result;
463}
50179d58
EB
464
465/* Return an expression tree representing an equality comparison of P1 and P2,
466 two objects of fat pointer type. The result should be of type RESULT_TYPE.
467
468 Two fat pointers are equal in one of two ways: (1) if both have a null
469 pointer to the array or (2) if they contain the same couple of pointers.
470 We perform the comparison in as efficient a manner as possible. */
471
472static tree
473compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
474{
475 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
476 tree p1_array_is_null, p2_array_is_null;
477
478 /* If either operand has side-effects, they have to be evaluated only once
479 in spite of the multiple references to the operand in the comparison. */
480 p1 = gnat_protect_expr (p1);
481 p2 = gnat_protect_expr (p2);
482
483 /* The constant folder doesn't fold fat pointer types so we do it here. */
484 if (TREE_CODE (p1) == CONSTRUCTOR)
2117b9bb 485 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
50179d58 486 else
64235766 487 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
50179d58
EB
488
489 p1_array_is_null
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
491 fold_convert_loc (loc, TREE_TYPE (p1_array),
492 null_pointer_node));
493
494 if (TREE_CODE (p2) == CONSTRUCTOR)
2117b9bb 495 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
50179d58 496 else
64235766 497 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
50179d58
EB
498
499 p2_array_is_null
500 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
501 fold_convert_loc (loc, TREE_TYPE (p2_array),
502 null_pointer_node));
503
504 /* If one of the pointers to the array is null, just compare the other. */
505 if (integer_zerop (p1_array))
506 return p2_array_is_null;
507 else if (integer_zerop (p2_array))
508 return p1_array_is_null;
509
510 /* Otherwise, do the fully-fledged comparison. */
511 same_array
512 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
513
514 if (TREE_CODE (p1) == CONSTRUCTOR)
2117b9bb 515 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
50179d58
EB
516 else
517 p1_bounds
64235766
EB
518 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
519 true);
50179d58
EB
520
521 if (TREE_CODE (p2) == CONSTRUCTOR)
2117b9bb 522 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
50179d58
EB
523 else
524 p2_bounds
64235766
EB
525 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
526 true);
50179d58
EB
527
528 same_bounds
529 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
530
531 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
532 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
533 build_binary_op (TRUTH_ORIF_EXPR, result_type,
534 p1_array_is_null, same_bounds));
535}
ce2d0ce2 536
a1ab4c31
AC
537/* Compute the result of applying OP_CODE to LHS and RHS, where both are of
538 type TYPE. We know that TYPE is a modular type with a nonbinary
539 modulus. */
540
541static tree
542nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
543 tree rhs)
544{
545 tree modulus = TYPE_MODULUS (type);
546 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
547 unsigned int precision;
548 bool unsignedp = true;
549 tree op_type = type;
550 tree result;
551
552 /* If this is an addition of a constant, convert it to a subtraction
553 of a constant since we can do that faster. */
554 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
555 {
556 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
557 op_code = MINUS_EXPR;
558 }
559
560 /* For the logical operations, we only need PRECISION bits. For
561 addition and subtraction, we need one more and for multiplication we
562 need twice as many. But we never want to make a size smaller than
563 our size. */
564 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
565 needed_precision += 1;
566 else if (op_code == MULT_EXPR)
567 needed_precision *= 2;
568
569 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
570
571 /* Unsigned will do for everything but subtraction. */
572 if (op_code == MINUS_EXPR)
573 unsignedp = false;
574
575 /* If our type is the wrong signedness or isn't wide enough, make a new
576 type and convert both our operands to it. */
577 if (TYPE_PRECISION (op_type) < precision
578 || TYPE_UNSIGNED (op_type) != unsignedp)
579 {
afc737f0
EB
580 /* Copy the type so we ensure it can be modified to make it modular. */
581 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
a1ab4c31
AC
582 modulus = convert (op_type, modulus);
583 SET_TYPE_MODULUS (op_type, modulus);
584 TYPE_MODULAR_P (op_type) = 1;
585 lhs = convert (op_type, lhs);
586 rhs = convert (op_type, rhs);
587 }
588
589 /* Do the operation, then we'll fix it up. */
590 result = fold_build2 (op_code, op_type, lhs, rhs);
591
592 /* For multiplication, we have no choice but to do a full modulus
593 operation. However, we want to do this in the narrowest
594 possible size. */
595 if (op_code == MULT_EXPR)
596 {
afc737f0
EB
597 /* Copy the type so we ensure it can be modified to make it modular. */
598 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
a1ab4c31
AC
599 modulus = convert (div_type, modulus);
600 SET_TYPE_MODULUS (div_type, modulus);
601 TYPE_MODULAR_P (div_type) = 1;
602 result = convert (op_type,
603 fold_build2 (TRUNC_MOD_EXPR, div_type,
604 convert (div_type, result), modulus));
605 }
606
607 /* For subtraction, add the modulus back if we are negative. */
608 else if (op_code == MINUS_EXPR)
609 {
7d7a1fe8 610 result = gnat_protect_expr (result);
a1ab4c31 611 result = fold_build3 (COND_EXPR, op_type,
1139f2e8 612 fold_build2 (LT_EXPR, boolean_type_node, result,
9a1bdc31 613 build_int_cst (op_type, 0)),
a1ab4c31
AC
614 fold_build2 (PLUS_EXPR, op_type, result, modulus),
615 result);
616 }
617
618 /* For the other operations, subtract the modulus if we are >= it. */
619 else
620 {
7d7a1fe8 621 result = gnat_protect_expr (result);
a1ab4c31 622 result = fold_build3 (COND_EXPR, op_type,
1139f2e8 623 fold_build2 (GE_EXPR, boolean_type_node,
a1ab4c31
AC
624 result, modulus),
625 fold_build2 (MINUS_EXPR, op_type,
626 result, modulus),
627 result);
628 }
629
630 return convert (type, result);
631}
ce2d0ce2 632
033ba5bf
EB
633/* This page contains routines that implement the Ada semantics with regard
634 to atomic objects. They are fully piggybacked on the middle-end support
635 for atomic loads and stores.
636
637 *** Memory barriers and volatile objects ***
638
639 We implement the weakened form of the C.6(16) clause that was introduced
640 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
641 implementable without significant performance hits on modern platforms.
642
643 We also take advantage of the requirements imposed on shared variables by
644 9.10 (conditions for sequential actions) to have non-erroneous execution
645 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
646 volatile updates with regard to sequential actions, i.e. with regard to
647 reads or updates of atomic objects.
648
649 As such, an update of an atomic object by a task requires that all earlier
650 accesses to volatile objects have completed. Similarly, later accesses to
651 volatile objects cannot be reordered before the update of the atomic object.
652 So, memory barriers both before and after the atomic update are needed.
653
654 For a read of an atomic object, to avoid seeing writes of volatile objects
655 by a task earlier than by the other tasks, a memory barrier is needed before
656 the atomic read. Finally, to avoid reordering later reads or updates of
657 volatile objects to before the atomic read, a barrier is needed after the
658 atomic read.
659
660 So, memory barriers are needed before and after atomic reads and updates.
661 And, in order to simplify the implementation, we use full memory barriers
662 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
663
664/* Return the size of TYPE, which must be a positive power of 2. */
665
a40970cf 666unsigned int
033ba5bf
EB
667resolve_atomic_size (tree type)
668{
ae7e9ddd 669 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
033ba5bf
EB
670
671 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
672 return size;
673
674 /* We shouldn't reach here without having already detected that the size
675 isn't compatible with an atomic access. */
676 gcc_assert (Serious_Errors_Detected);
677
678 return 0;
679}
680
f797c2b7
EB
681/* Build an atomic load for the underlying atomic object in SRC. SYNC is
682 true if the load requires synchronization. */
033ba5bf
EB
683
684tree
f797c2b7 685build_atomic_load (tree src, bool sync)
033ba5bf
EB
686{
687 tree ptr_type
688 = build_pointer_type
f797c2b7
EB
689 (build_qualified_type (void_type_node,
690 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
691 tree mem_model
692 = build_int_cst (integer_type_node,
693 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
033ba5bf 694 tree orig_src = src;
e2c9982e 695 tree type, t, addr, val;
033ba5bf
EB
696 unsigned int size;
697 int fncode;
698
537e035c 699 /* Remove conversions to get the address of the underlying object. */
033ba5bf 700 src = remove_conversions (src, false);
e2c9982e
EB
701 type = TREE_TYPE (src);
702 size = resolve_atomic_size (type);
033ba5bf
EB
703 if (size == 0)
704 return orig_src;
705
706 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
707 t = builtin_decl_implicit ((enum built_in_function) fncode);
708
537e035c
EB
709 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
710 val = build_call_expr (t, 2, addr, mem_model);
033ba5bf 711
537e035c
EB
712 /* First reinterpret the loaded bits in the original type of the load,
713 then convert to the expected result type. */
e2c9982e 714 t = fold_build1 (VIEW_CONVERT_EXPR, type, val);
537e035c 715 return convert (TREE_TYPE (orig_src), t);
033ba5bf
EB
716}
717
f797c2b7
EB
718/* Build an atomic store from SRC to the underlying atomic object in DEST.
719 SYNC is true if the store requires synchronization. */
033ba5bf
EB
720
721tree
f797c2b7 722build_atomic_store (tree dest, tree src, bool sync)
033ba5bf
EB
723{
724 tree ptr_type
725 = build_pointer_type
f797c2b7
EB
726 (build_qualified_type (void_type_node,
727 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
728 tree mem_model
729 = build_int_cst (integer_type_node,
730 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
033ba5bf 731 tree orig_dest = dest;
e2c9982e 732 tree type, t, int_type, addr;
033ba5bf
EB
733 unsigned int size;
734 int fncode;
735
537e035c 736 /* Remove conversions to get the address of the underlying object. */
033ba5bf 737 dest = remove_conversions (dest, false);
e2c9982e
EB
738 type = TREE_TYPE (dest);
739 size = resolve_atomic_size (type);
033ba5bf
EB
740 if (size == 0)
741 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
742
743 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
744 t = builtin_decl_implicit ((enum built_in_function) fncode);
745 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
746
537e035c
EB
747 /* First convert the bits to be stored to the original type of the store,
748 then reinterpret them in the effective type. But if the original type
749 is a padded type with the same size, convert to the inner type instead,
750 as we don't want to artificially introduce a CONSTRUCTOR here. */
e2c9982e
EB
751 if (TYPE_IS_PADDING_P (type)
752 && TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (type))))
753 src = convert (TREE_TYPE (TYPE_FIELDS (type)), src);
537e035c 754 else
e2c9982e 755 src = convert (type, src);
537e035c
EB
756 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
757 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
033ba5bf 758
537e035c 759 return build_call_expr (t, 3, addr, src, mem_model);
033ba5bf 760}
f797c2b7 761
f797c2b7 762/* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
241125b2
EB
763 the location of the sequence. Note that, even though the load and the store
764 are both atomic, the sequence itself is not atomic. */
f797c2b7
EB
765
766tree
767build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
768{
241125b2
EB
769 /* We will be modifying DEST below so we build a copy. */
770 dest = copy_node (dest);
f797c2b7
EB
771 tree ref = dest;
772
773 while (handled_component_p (ref))
774 {
775 /* The load should already have been generated during the translation
776 of the GNAT destination tree; find it out in the GNU tree. */
777 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
778 {
779 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
780 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
781 {
782 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
783 tree t = CALL_EXPR_ARG (op, 0);
784 tree obj, temp, stmt;
785
786 /* Find out the loaded object. */
787 if (TREE_CODE (t) == NOP_EXPR)
788 t = TREE_OPERAND (t, 0);
789 if (TREE_CODE (t) == ADDR_EXPR)
790 obj = TREE_OPERAND (t, 0);
791 else
792 obj = build1 (INDIRECT_REF, type, t);
793
794 /* Drop atomic and volatile qualifiers for the temporary. */
795 type = TYPE_MAIN_VARIANT (type);
796
797 /* And drop BLKmode, if need be, to put it into a register. */
798 if (TYPE_MODE (type) == BLKmode)
799 {
800 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
801 type = copy_type (type);
f4b31647
RS
802 machine_mode mode = int_mode_for_size (size, 0).else_blk ();
803 SET_TYPE_MODE (type, mode);
f797c2b7
EB
804 }
805
806 /* Create the temporary by inserting a SAVE_EXPR. */
807 temp = build1 (SAVE_EXPR, type,
808 build1 (VIEW_CONVERT_EXPR, type, op));
809 TREE_OPERAND (ref, 0) = temp;
810
811 start_stmt_group ();
812
813 /* Build the modify of the temporary. */
814 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
815 add_stmt_with_node (stmt, gnat_node);
816
817 /* Build the store to the object. */
818 stmt = build_atomic_store (obj, temp, false);
819 add_stmt_with_node (stmt, gnat_node);
820
821 return end_stmt_group ();
822 }
823 }
824
241125b2 825 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
f797c2b7
EB
826 ref = TREE_OPERAND (ref, 0);
827 }
828
829 /* Something went wrong earlier if we have not found the atomic load. */
830 gcc_unreachable ();
831}
ce2d0ce2 832
a1ab4c31
AC
833/* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
834 desired for the result. Usually the operation is to be performed
d8e38554
EB
835 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
836 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
837 case the type to be used will be derived from the operands.
08ef2c16 838 Don't fold the result if NO_FOLD is true.
a1ab4c31
AC
839
840 This function is very much unlike the ones for C and C++ since we
841 have already done any type conversion and matching required. All we
842 have to do here is validate the work done by SEM and handle subtypes. */
843
844tree
845build_binary_op (enum tree_code op_code, tree result_type,
08ef2c16
PMR
846 tree left_operand, tree right_operand,
847 bool no_fold)
a1ab4c31 848{
1366ba41 849 tree left_type = TREE_TYPE (left_operand);
a1ab4c31
AC
850 tree right_type = TREE_TYPE (right_operand);
851 tree left_base_type = get_base_type (left_type);
852 tree right_base_type = get_base_type (right_type);
853 tree operation_type = result_type;
854 tree best_type = NULL_TREE;
855 tree modulus, result;
856 bool has_side_effects = false;
857
858 if (operation_type
859 && TREE_CODE (operation_type) == RECORD_TYPE
860 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
861 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
862
683ccd05 863 if (operation_type && TYPE_IS_EXTRA_SUBTYPE_P (operation_type))
a1ab4c31
AC
864 operation_type = get_base_type (operation_type);
865
866 modulus = (operation_type
867 && TREE_CODE (operation_type) == INTEGER_TYPE
868 && TYPE_MODULAR_P (operation_type)
869 ? TYPE_MODULUS (operation_type) : NULL_TREE);
870
871 switch (op_code)
872 {
d47d0a8d 873 case INIT_EXPR:
a1ab4c31 874 case MODIFY_EXPR:
7c775aca 875 gcc_checking_assert (!result_type);
9abe8b74 876
a1ab4c31
AC
877 /* If there were integral or pointer conversions on the LHS, remove
878 them; we'll be putting them back below if needed. Likewise for
5dce843f
EB
879 conversions between record types, except for justified modular types.
880 But don't do this if the right operand is not BLKmode (for packed
881 arrays) unless we are not changing the mode. */
a1ab4c31
AC
882 while ((CONVERT_EXPR_P (left_operand)
883 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
884 && (((INTEGRAL_TYPE_P (left_type)
885 || POINTER_TYPE_P (left_type))
5dce843f
EB
886 && (INTEGRAL_TYPE_P (operand_type (left_operand))
887 || POINTER_TYPE_P (operand_type (left_operand))))
888 || (TREE_CODE (left_type) == RECORD_TYPE
889 && !TYPE_JUSTIFIED_MODULAR_P (left_type)
890 && TREE_CODE (operand_type (left_operand)) == RECORD_TYPE
a1ab4c31 891 && (TYPE_MODE (right_type) == BLKmode
5dce843f
EB
892 || TYPE_MODE (left_type)
893 == TYPE_MODE (operand_type (left_operand))))))
a1ab4c31
AC
894 {
895 left_operand = TREE_OPERAND (left_operand, 0);
896 left_type = TREE_TYPE (left_operand);
897 }
898
899 /* If a class-wide type may be involved, force use of the RHS type. */
900 if ((TREE_CODE (right_type) == RECORD_TYPE
901 || TREE_CODE (right_type) == UNION_TYPE)
902 && TYPE_ALIGN_OK (right_type))
903 operation_type = right_type;
904
905 /* If we are copying between padded objects with compatible types, use
906 the padded view of the objects, this is very likely more efficient.
342f368c
EB
907 Likewise for a padded object that is assigned a constructor, if we
908 can convert the constructor to the inner type, to avoid putting a
909 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
910 actually copied anything. */
315cff15 911 else if (TYPE_IS_PADDING_P (left_type)
a1ab4c31
AC
912 && TREE_CONSTANT (TYPE_SIZE (left_type))
913 && ((TREE_CODE (right_operand) == COMPONENT_REF
842d4ee2 914 && TYPE_MAIN_VARIANT (left_type)
5dce843f 915 == TYPE_MAIN_VARIANT (operand_type (right_operand)))
342f368c
EB
916 || (TREE_CODE (right_operand) == CONSTRUCTOR
917 && !CONTAINS_PLACEHOLDER_P
918 (DECL_SIZE (TYPE_FIELDS (left_type)))))
a1ab4c31 919 && !integer_zerop (TYPE_SIZE (right_type)))
842d4ee2
EB
920 {
921 /* We make an exception for a BLKmode type padding a non-BLKmode
922 inner type and do the conversion of the LHS right away, since
923 unchecked_convert wouldn't do it properly. */
924 if (TYPE_MODE (left_type) == BLKmode
925 && TYPE_MODE (right_type) != BLKmode
926 && TREE_CODE (right_operand) != CONSTRUCTOR)
927 {
928 operation_type = right_type;
929 left_operand = convert (operation_type, left_operand);
930 left_type = operation_type;
931 }
932 else
933 operation_type = left_type;
934 }
a1ab4c31 935
fc7a823e
EB
936 /* If we have a call to a function that returns with variable size, use
937 the RHS type in case we want to use the return slot optimization. */
e3c4580e 938 else if (TREE_CODE (right_operand) == CALL_EXPR
fc7a823e 939 && return_type_with_variable_size_p (right_type))
e3c4580e
EB
940 operation_type = right_type;
941
a1ab4c31
AC
942 /* Find the best type to use for copying between aggregate types. */
943 else if (((TREE_CODE (left_type) == ARRAY_TYPE
944 && TREE_CODE (right_type) == ARRAY_TYPE)
945 || (TREE_CODE (left_type) == RECORD_TYPE
946 && TREE_CODE (right_type) == RECORD_TYPE))
947 && (best_type = find_common_type (left_type, right_type)))
948 operation_type = best_type;
949
950 /* Otherwise use the LHS type. */
d8e38554 951 else
a1ab4c31
AC
952 operation_type = left_type;
953
954 /* Ensure everything on the LHS is valid. If we have a field reference,
955 strip anything that get_inner_reference can handle. Then remove any
956 conversions between types having the same code and mode. And mark
957 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
f797c2b7 958 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
a1ab4c31
AC
959 result = left_operand;
960 while (true)
961 {
962 tree restype = TREE_TYPE (result);
963
964 if (TREE_CODE (result) == COMPONENT_REF
965 || TREE_CODE (result) == ARRAY_REF
966 || TREE_CODE (result) == ARRAY_RANGE_REF)
967 while (handled_component_p (result))
968 result = TREE_OPERAND (result, 0);
5dce843f 969
a1ab4c31
AC
970 else if (TREE_CODE (result) == REALPART_EXPR
971 || TREE_CODE (result) == IMAGPART_EXPR
972 || (CONVERT_EXPR_P (result)
973 && (((TREE_CODE (restype)
5dce843f
EB
974 == TREE_CODE (operand_type (result))
975 && TYPE_MODE (restype)
855bb998 976 == TYPE_MODE (operand_type (result))))
a1ab4c31
AC
977 || TYPE_ALIGN_OK (restype))))
978 result = TREE_OPERAND (result, 0);
5dce843f 979
a1ab4c31
AC
980 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
981 {
982 TREE_ADDRESSABLE (result) = 1;
983 result = TREE_OPERAND (result, 0);
984 }
5dce843f 985
a1ab4c31
AC
986 else
987 break;
988 }
989
04682fe7 990 gcc_assert (INDIRECT_REF_P (result)
a1ab4c31 991 || TREE_CODE (result) == NULL_EXPR
f797c2b7 992 || TREE_CODE (result) == SAVE_EXPR
a1ab4c31
AC
993 || DECL_P (result));
994
995 /* Convert the right operand to the operation type unless it is
996 either already of the correct type or if the type involves a
997 placeholder, since the RHS may not have the same record type. */
998 if (operation_type != right_type
999 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
1000 {
1001 right_operand = convert (operation_type, right_operand);
1002 right_type = operation_type;
1003 }
1004
1005 /* If the left operand is not of the same type as the operation
1006 type, wrap it up in a VIEW_CONVERT_EXPR. */
1007 if (left_type != operation_type)
1008 left_operand = unchecked_convert (operation_type, left_operand, false);
1009
1010 has_side_effects = true;
1011 modulus = NULL_TREE;
1012 break;
1013
1014 case ARRAY_REF:
1015 if (!operation_type)
1016 operation_type = TREE_TYPE (left_type);
1017
9c453de7 1018 /* ... fall through ... */
a1ab4c31
AC
1019
1020 case ARRAY_RANGE_REF:
1021 /* First look through conversion between type variants. Note that
1022 this changes neither the operation type nor the type domain. */
1023 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1024 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1025 == TYPE_MAIN_VARIANT (left_type))
1026 {
1027 left_operand = TREE_OPERAND (left_operand, 0);
1028 left_type = TREE_TYPE (left_operand);
1029 }
1030
3f273c8a
EB
1031 /* For a range, make sure the element type is consistent. */
1032 if (op_code == ARRAY_RANGE_REF
1033 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
2448ee85
EB
1034 {
1035 operation_type
1036 = build_nonshared_array_type (TREE_TYPE (left_type),
1037 TYPE_DOMAIN (operation_type));
1038 /* Declare it now since it will never be declared otherwise. This
1039 is necessary to ensure that its subtrees are properly marked. */
1040 create_type_decl (TYPE_NAME (operation_type), operation_type, true,
1041 false, Empty);
1042 }
3f273c8a 1043
84fb43a1
EB
1044 /* Then convert the right operand to its base type. This will prevent
1045 unneeded sign conversions when sizetype is wider than integer. */
a1ab4c31 1046 right_operand = convert (right_base_type, right_operand);
15bf7d19 1047 right_operand = convert_to_index_type (right_operand);
a1ab4c31
AC
1048 modulus = NULL_TREE;
1049 break;
1050
1139f2e8
EB
1051 case TRUTH_ANDIF_EXPR:
1052 case TRUTH_ORIF_EXPR:
1053 case TRUTH_AND_EXPR:
1054 case TRUTH_OR_EXPR:
1055 case TRUTH_XOR_EXPR:
ae1158c4
EB
1056 gcc_checking_assert
1057 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1139f2e8
EB
1058 operation_type = left_base_type;
1059 left_operand = convert (operation_type, left_operand);
1060 right_operand = convert (operation_type, right_operand);
1061 break;
1062
a1ab4c31
AC
1063 case GE_EXPR:
1064 case LE_EXPR:
1065 case GT_EXPR:
1066 case LT_EXPR:
a1ab4c31
AC
1067 case EQ_EXPR:
1068 case NE_EXPR:
ae1158c4
EB
1069 gcc_checking_assert
1070 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
a1ab4c31
AC
1071 /* If either operand is a NULL_EXPR, just return a new one. */
1072 if (TREE_CODE (left_operand) == NULL_EXPR)
1073 return build2 (op_code, result_type,
1074 build1 (NULL_EXPR, integer_type_node,
1075 TREE_OPERAND (left_operand, 0)),
1076 integer_zero_node);
1077
1078 else if (TREE_CODE (right_operand) == NULL_EXPR)
1079 return build2 (op_code, result_type,
1080 build1 (NULL_EXPR, integer_type_node,
1081 TREE_OPERAND (right_operand, 0)),
1082 integer_zero_node);
1083
1084 /* If either object is a justified modular types, get the
1085 fields from within. */
1086 if (TREE_CODE (left_type) == RECORD_TYPE
1087 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1088 {
1089 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1090 left_operand);
1091 left_type = TREE_TYPE (left_operand);
1092 left_base_type = get_base_type (left_type);
1093 }
1094
1095 if (TREE_CODE (right_type) == RECORD_TYPE
1096 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1097 {
1098 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1099 right_operand);
1100 right_type = TREE_TYPE (right_operand);
1101 right_base_type = get_base_type (right_type);
1102 }
1103
1104 /* If both objects are arrays, compare them specially. */
1105 if ((TREE_CODE (left_type) == ARRAY_TYPE
1106 || (TREE_CODE (left_type) == INTEGER_TYPE
1107 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1108 && (TREE_CODE (right_type) == ARRAY_TYPE
1109 || (TREE_CODE (right_type) == INTEGER_TYPE
1110 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1111 {
6532e8a0
EB
1112 result = compare_arrays (input_location,
1113 result_type, left_operand, right_operand);
a1ab4c31 1114 if (op_code == NE_EXPR)
658a41ac 1115 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
a1ab4c31
AC
1116 else
1117 gcc_assert (op_code == EQ_EXPR);
1118
1119 return result;
1120 }
1121
7317a461 1122 /* Otherwise, the base types must be the same, unless they are both (fat)
58f1b706
EB
1123 pointer types or record types. In the latter case, use the best type
1124 and convert both operands to that type. */
a1ab4c31
AC
1125 if (left_base_type != right_base_type)
1126 {
315cff15 1127 if (TYPE_IS_FAT_POINTER_P (left_base_type)
58f1b706
EB
1128 && TYPE_IS_FAT_POINTER_P (right_base_type))
1129 {
1130 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1131 == TYPE_MAIN_VARIANT (right_base_type));
1132 best_type = left_base_type;
1133 }
1134
7317a461
EB
1135 else if (POINTER_TYPE_P (left_base_type)
1136 && POINTER_TYPE_P (right_base_type))
1137 {
d927cb52
EB
1138 tree left_ref_type = TREE_TYPE (left_base_type);
1139 tree right_ref_type = TREE_TYPE (right_base_type);
1140
7317a461 1141 /* Anonymous access types in Ada 2005 can point to different
d927cb52
EB
1142 members of a tagged hierarchy or different function types. */
1143 gcc_assert (TYPE_MAIN_VARIANT (left_ref_type)
1144 == TYPE_MAIN_VARIANT (right_ref_type)
1145 || (TYPE_ALIGN_OK (left_ref_type)
1146 && TYPE_ALIGN_OK (right_ref_type))
1147 || (TREE_CODE (left_ref_type) == FUNCTION_TYPE
1148 && TREE_CODE (right_ref_type) == FUNCTION_TYPE));
7317a461
EB
1149 best_type = left_base_type;
1150 }
1151
a1ab4c31
AC
1152 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1153 && TREE_CODE (right_base_type) == RECORD_TYPE)
1154 {
58f1b706
EB
1155 /* The only way this is permitted is if both types have the same
1156 name. In that case, one of them must not be self-referential.
1157 Use it as the best type. Even better with a fixed size. */
a1ab4c31 1158 gcc_assert (TYPE_NAME (left_base_type)
58f1b706
EB
1159 && TYPE_NAME (left_base_type)
1160 == TYPE_NAME (right_base_type));
a1ab4c31
AC
1161
1162 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1163 best_type = left_base_type;
1164 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1165 best_type = right_base_type;
1166 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1167 best_type = left_base_type;
1168 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1169 best_type = right_base_type;
1170 else
1171 gcc_unreachable ();
1172 }
58f1b706 1173
a1ab4c31
AC
1174 else
1175 gcc_unreachable ();
1176
1177 left_operand = convert (best_type, left_operand);
1178 right_operand = convert (best_type, right_operand);
1179 }
a1ab4c31
AC
1180 else
1181 {
1182 left_operand = convert (left_base_type, left_operand);
1183 right_operand = convert (right_base_type, right_operand);
1184 }
1185
50179d58
EB
1186 /* If both objects are fat pointers, compare them specially. */
1187 if (TYPE_IS_FAT_POINTER_P (left_base_type))
58f1b706 1188 {
50179d58
EB
1189 result
1190 = compare_fat_pointers (input_location,
1191 result_type, left_operand, right_operand);
1192 if (op_code == NE_EXPR)
1193 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1194 else
1195 gcc_assert (op_code == EQ_EXPR);
1196
1197 return result;
58f1b706
EB
1198 }
1199
a1ab4c31
AC
1200 modulus = NULL_TREE;
1201 break;
1202
a1ab4c31
AC
1203 case LSHIFT_EXPR:
1204 case RSHIFT_EXPR:
1205 case LROTATE_EXPR:
1206 case RROTATE_EXPR:
1207 /* The RHS of a shift can be any type. Also, ignore any modulus
1208 (we used to abort, but this is needed for unchecked conversion
1209 to modular types). Otherwise, processing is the same as normal. */
1210 gcc_assert (operation_type == left_base_type);
1211 modulus = NULL_TREE;
1212 left_operand = convert (operation_type, left_operand);
1213 break;
1214
a1ab4c31
AC
1215 case BIT_AND_EXPR:
1216 case BIT_IOR_EXPR:
1217 case BIT_XOR_EXPR:
1218 /* For binary modulus, if the inputs are in range, so are the
1219 outputs. */
1220 if (modulus && integer_pow2p (modulus))
1221 modulus = NULL_TREE;
a1ab4c31
AC
1222 goto common;
1223
1224 case COMPLEX_EXPR:
1225 gcc_assert (TREE_TYPE (result_type) == left_base_type
1226 && TREE_TYPE (result_type) == right_base_type);
1227 left_operand = convert (left_base_type, left_operand);
1228 right_operand = convert (right_base_type, right_operand);
1229 break;
1230
1231 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1232 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1233 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1234 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1235 /* These always produce results lower than either operand. */
1236 modulus = NULL_TREE;
1237 goto common;
1238
1239 case POINTER_PLUS_EXPR:
1240 gcc_assert (operation_type == left_base_type
1241 && sizetype == right_base_type);
1242 left_operand = convert (operation_type, left_operand);
1243 right_operand = convert (sizetype, right_operand);
1244 break;
1245
82d3b03a
EB
1246 case PLUS_NOMOD_EXPR:
1247 case MINUS_NOMOD_EXPR:
1248 if (op_code == PLUS_NOMOD_EXPR)
1249 op_code = PLUS_EXPR;
1250 else
1251 op_code = MINUS_EXPR;
1252 modulus = NULL_TREE;
1253
9c453de7 1254 /* ... fall through ... */
82d3b03a 1255
d2143736
EB
1256 case PLUS_EXPR:
1257 case MINUS_EXPR:
b7babd5d
EB
1258 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1259 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1260 these types but can generate addition/subtraction for Succ/Pred. */
1261 if (operation_type
1262 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1263 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1264 operation_type = left_base_type = right_base_type
1265 = gnat_type_for_mode (TYPE_MODE (operation_type),
1266 TYPE_UNSIGNED (operation_type));
82d3b03a 1267
9c453de7 1268 /* ... fall through ... */
d2143736 1269
a1ab4c31
AC
1270 default:
1271 common:
1272 /* The result type should be the same as the base types of the
1273 both operands (and they should be the same). Convert
1274 everything to the result type. */
1275
1276 gcc_assert (operation_type == left_base_type
1277 && left_base_type == right_base_type);
1278 left_operand = convert (operation_type, left_operand);
1279 right_operand = convert (operation_type, right_operand);
1280 }
1281
1282 if (modulus && !integer_pow2p (modulus))
1283 {
1284 result = nonbinary_modular_operation (op_code, operation_type,
1285 left_operand, right_operand);
1286 modulus = NULL_TREE;
1287 }
1288 /* If either operand is a NULL_EXPR, just return a new one. */
1289 else if (TREE_CODE (left_operand) == NULL_EXPR)
1290 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1291 else if (TREE_CODE (right_operand) == NULL_EXPR)
1292 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1293 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
08ef2c16
PMR
1294 {
1295 result = build4 (op_code, operation_type, left_operand, right_operand,
1296 NULL_TREE, NULL_TREE);
1297 if (!no_fold)
1298 result = fold (result);
1299 }
d8e38554
EB
1300 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1301 result = build2 (op_code, void_type_node, left_operand, right_operand);
08ef2c16
PMR
1302 else if (no_fold)
1303 result = build2 (op_code, operation_type, left_operand, right_operand);
a1ab4c31
AC
1304 else
1305 result
1306 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1307
a61c3633
EB
1308 if (TREE_CONSTANT (result))
1309 ;
1310 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1311 {
a61c3633
EB
1312 if (TYPE_VOLATILE (operation_type))
1313 TREE_THIS_VOLATILE (result) = 1;
1314 }
fccc47dd
EB
1315 else if (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand))
1316 TREE_CONSTANT (result) = 1;
a1ab4c31 1317
fccc47dd
EB
1318 if (has_side_effects)
1319 TREE_SIDE_EFFECTS (result) = 1;
a1ab4c31
AC
1320
1321 /* If we are working with modular types, perform the MOD operation
1322 if something above hasn't eliminated the need for it. */
1323 if (modulus)
08ef2c16
PMR
1324 {
1325 modulus = convert (operation_type, modulus);
1326 if (no_fold)
1327 result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1328 else
1329 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1330 }
a1ab4c31
AC
1331
1332 if (result_type && result_type != operation_type)
1333 result = convert (result_type, result);
1334
1335 return result;
1336}
ce2d0ce2 1337
a1ab4c31
AC
1338/* Similar, but for unary operations. */
1339
1340tree
1341build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1342{
1343 tree type = TREE_TYPE (operand);
1344 tree base_type = get_base_type (type);
1345 tree operation_type = result_type;
1346 tree result;
a1ab4c31
AC
1347
1348 if (operation_type
1349 && TREE_CODE (operation_type) == RECORD_TYPE
1350 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1351 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1352
1353 if (operation_type
4f1a975c 1354 && TREE_CODE (operation_type) == INTEGER_TYPE
a1ab4c31
AC
1355 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1356 operation_type = get_base_type (operation_type);
1357
1358 switch (op_code)
1359 {
1360 case REALPART_EXPR:
1361 case IMAGPART_EXPR:
1362 if (!operation_type)
1363 result_type = operation_type = TREE_TYPE (type);
1364 else
1365 gcc_assert (result_type == TREE_TYPE (type));
1366
1367 result = fold_build1 (op_code, operation_type, operand);
1368 break;
1369
1370 case TRUTH_NOT_EXPR:
ae1158c4
EB
1371 gcc_checking_assert
1372 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
658a41ac 1373 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
f2be3ce4
EB
1374 /* When not optimizing, fold the result as invert_truthvalue_loc
1375 doesn't fold the result of comparisons. This is intended to undo
1376 the trick used for boolean rvalues in gnat_to_gnu. */
1377 if (!optimize)
1378 result = fold (result);
a1ab4c31
AC
1379 break;
1380
1381 case ATTR_ADDR_EXPR:
1382 case ADDR_EXPR:
1383 switch (TREE_CODE (operand))
1384 {
1385 case INDIRECT_REF:
1386 case UNCONSTRAINED_ARRAY_REF:
1387 result = TREE_OPERAND (operand, 0);
1388
1389 /* Make sure the type here is a pointer, not a reference.
1390 GCC wants pointer types for function addresses. */
1391 if (!result_type)
1392 result_type = build_pointer_type (type);
1393
1394 /* If the underlying object can alias everything, propagate the
1395 property since we are effectively retrieving the object. */
1396 if (POINTER_TYPE_P (TREE_TYPE (result))
1397 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1398 {
1399 if (TREE_CODE (result_type) == POINTER_TYPE
1400 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1401 result_type
1402 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1403 TYPE_MODE (result_type),
1404 true);
1405 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1406 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1407 result_type
1408 = build_reference_type_for_mode (TREE_TYPE (result_type),
1409 TYPE_MODE (result_type),
1410 true);
1411 }
1412 break;
1413
1414 case NULL_EXPR:
1415 result = operand;
1416 TREE_TYPE (result) = type = build_pointer_type (type);
1417 break;
1418
0b3467c4
EB
1419 case COMPOUND_EXPR:
1420 /* Fold a compound expression if it has unconstrained array type
1421 since the middle-end cannot handle it. But we don't it in the
1422 general case because it may introduce aliasing issues if the
1423 first operand is an indirect assignment and the second operand
93e708f9
EB
1424 the corresponding address, e.g. for an allocator. However do
1425 it for a return value to expose it for later recognition. */
1426 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
04682fe7 1427 || (VAR_P (TREE_OPERAND (operand, 1))
93e708f9 1428 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
0b3467c4
EB
1429 {
1430 result = build_unary_op (ADDR_EXPR, result_type,
1431 TREE_OPERAND (operand, 1));
1432 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1433 TREE_OPERAND (operand, 0), result);
1434 break;
1435 }
1436 goto common;
1437
a1ab4c31
AC
1438 case ARRAY_REF:
1439 case ARRAY_RANGE_REF:
1440 case COMPONENT_REF:
1441 case BIT_FIELD_REF:
f5631ae6
EB
1442 /* If this is for 'Address, find the address of the prefix and add
1443 the offset to the field. Otherwise, do this the normal way. */
a1ab4c31
AC
1444 if (op_code == ATTR_ADDR_EXPR)
1445 {
f37fac2b
RS
1446 poly_int64 bitsize;
1447 poly_int64 bitpos;
a1ab4c31 1448 tree offset, inner;
ef4bddc2 1449 machine_mode mode;
ee45a32d 1450 int unsignedp, reversep, volatilep;
a1ab4c31
AC
1451
1452 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
ee45a32d 1453 &mode, &unsignedp, &reversep,
25b75a48 1454 &volatilep);
a1ab4c31
AC
1455
1456 /* If INNER is a padding type whose field has a self-referential
1457 size, convert to that inner type. We know the offset is zero
1458 and we need to have that type visible. */
fc7a823e 1459 if (type_is_padding_self_referential (TREE_TYPE (inner)))
a1ab4c31
AC
1460 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1461 inner);
1462
1463 /* Compute the offset as a byte offset from INNER. */
1464 if (!offset)
1465 offset = size_zero_node;
1466
f37fac2b
RS
1467 offset
1468 = size_binop (PLUS_EXPR, offset,
1469 size_int (bits_to_bytes_round_down (bitpos)));
a1ab4c31 1470
2117b9bb
EB
1471 /* Take the address of INNER, convert it to a pointer to our type
1472 and add the offset. */
1473 inner = build_unary_op (ADDR_EXPR,
1474 build_pointer_type (TREE_TYPE (operand)),
1475 inner);
1476 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
a1ab4c31 1477 inner, offset);
a1ab4c31
AC
1478 break;
1479 }
1480 goto common;
1481
1482 case CONSTRUCTOR:
1483 /* If this is just a constructor for a padded record, we can
1484 just take the address of the single field and convert it to
1485 a pointer to our type. */
315cff15 1486 if (TYPE_IS_PADDING_P (type))
a1ab4c31 1487 {
2117b9bb
EB
1488 result
1489 = build_unary_op (ADDR_EXPR,
1490 build_pointer_type (TREE_TYPE (operand)),
1491 CONSTRUCTOR_ELT (operand, 0)->value);
a1ab4c31
AC
1492 break;
1493 }
a1ab4c31
AC
1494 goto common;
1495
1496 case NOP_EXPR:
1497 if (AGGREGATE_TYPE_P (type)
1498 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1499 return build_unary_op (ADDR_EXPR, result_type,
1500 TREE_OPERAND (operand, 0));
1501
9c453de7 1502 /* ... fallthru ... */
a1ab4c31
AC
1503
1504 case VIEW_CONVERT_EXPR:
1505 /* If this just a variant conversion or if the conversion doesn't
1506 change the mode, get the result type from this type and go down.
1507 This is needed for conversions of CONST_DECLs, to eventually get
1508 to the address of their CORRESPONDING_VARs. */
1509 if ((TYPE_MAIN_VARIANT (type)
1510 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1511 || (TYPE_MODE (type) != BLKmode
1512 && (TYPE_MODE (type)
1513 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1514 return build_unary_op (ADDR_EXPR,
1515 (result_type ? result_type
1516 : build_pointer_type (type)),
1517 TREE_OPERAND (operand, 0));
1518 goto common;
1519
1520 case CONST_DECL:
1521 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1522
9c453de7 1523 /* ... fall through ... */
a1ab4c31
AC
1524
1525 default:
1526 common:
1527
184d436a
EB
1528 /* If we are taking the address of a padded record whose field
1529 contains a template, take the address of the field. */
315cff15 1530 if (TYPE_IS_PADDING_P (type)
a1ab4c31
AC
1531 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1532 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1533 {
1534 type = TREE_TYPE (TYPE_FIELDS (type));
1535 operand = convert (type, operand);
1536 }
1537
a1ab4c31 1538 gnat_mark_addressable (operand);
1fc24649 1539 result = build_fold_addr_expr (operand);
a1ab4c31
AC
1540 }
1541
fccc47dd
EB
1542 if (TREE_CONSTANT (operand) || staticp (operand))
1543 TREE_CONSTANT (result) = 1;
1544
a1ab4c31
AC
1545 break;
1546
1547 case INDIRECT_REF:
a1c7d797 1548 {
722356ce
EB
1549 tree t = remove_conversions (operand, false);
1550 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
a1c7d797 1551
184d436a
EB
1552 /* If TYPE is a thin pointer, either first retrieve the base if this
1553 is an expression with an offset built for the initialization of an
1554 object with an unconstrained nominal subtype, or else convert to
1555 the fat pointer. */
1556 if (TYPE_IS_THIN_POINTER_P (type))
a1c7d797 1557 {
184d436a
EB
1558 tree rec_type = TREE_TYPE (type);
1559
1560 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
2b45154d
EB
1561 && TREE_OPERAND (operand, 1)
1562 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
184d436a
EB
1563 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1564 {
1565 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1566 type = TREE_TYPE (operand);
1567 }
1568 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1569 {
1570 operand
1571 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1572 operand);
1573 type = TREE_TYPE (operand);
1574 }
a1c7d797 1575 }
a1ab4c31 1576
a1c7d797
EB
1577 /* If we want to refer to an unconstrained array, use the appropriate
1578 expression. But this will never survive down to the back-end. */
1579 if (TYPE_IS_FAT_POINTER_P (type))
1580 {
1581 result = build1 (UNCONSTRAINED_ARRAY_REF,
1582 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1583 TREE_READONLY (result)
1584 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1585 }
1fc24649 1586
a1c7d797
EB
1587 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1588 else if (TREE_CODE (operand) == ADDR_EXPR)
1589 result = TREE_OPERAND (operand, 0);
a1ab4c31 1590
a1c7d797
EB
1591 /* Otherwise, build and fold the indirect reference. */
1592 else
1593 {
1594 result = build_fold_indirect_ref (operand);
1595 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1596 }
a1ab4c31 1597
a1c7d797
EB
1598 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1599 {
1600 TREE_SIDE_EFFECTS (result) = 1;
04682fe7 1601 if (INDIRECT_REF_P (result))
a1c7d797
EB
1602 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1603 }
1604
04682fe7 1605 if ((INDIRECT_REF_P (result)
a1c7d797
EB
1606 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1607 && can_never_be_null)
1608 TREE_THIS_NOTRAP (result) = 1;
1609
1610 break;
1611 }
a1ab4c31
AC
1612
1613 case NEGATE_EXPR:
1614 case BIT_NOT_EXPR:
1615 {
1616 tree modulus = ((operation_type
1617 && TREE_CODE (operation_type) == INTEGER_TYPE
1618 && TYPE_MODULAR_P (operation_type))
1619 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1620 int mod_pow2 = modulus && integer_pow2p (modulus);
1621
1622 /* If this is a modular type, there are various possibilities
1623 depending on the operation and whether the modulus is a
1624 power of two or not. */
1625
1626 if (modulus)
1627 {
1628 gcc_assert (operation_type == base_type);
1629 operand = convert (operation_type, operand);
1630
1631 /* The fastest in the negate case for binary modulus is
1632 the straightforward code; the TRUNC_MOD_EXPR below
1633 is an AND operation. */
1634 if (op_code == NEGATE_EXPR && mod_pow2)
1635 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1636 fold_build1 (NEGATE_EXPR, operation_type,
1637 operand),
1638 modulus);
1639
1640 /* For nonbinary negate case, return zero for zero operand,
1641 else return the modulus minus the operand. If the modulus
1642 is a power of two minus one, we can do the subtraction
1643 as an XOR since it is equivalent and faster on most machines. */
1644 else if (op_code == NEGATE_EXPR && !mod_pow2)
1645 {
1646 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1647 modulus,
9a1bdc31
EB
1648 build_int_cst (operation_type,
1649 1))))
a1ab4c31
AC
1650 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1651 operand, modulus);
1652 else
1653 result = fold_build2 (MINUS_EXPR, operation_type,
1654 modulus, operand);
1655
1656 result = fold_build3 (COND_EXPR, operation_type,
1657 fold_build2 (NE_EXPR,
1139f2e8 1658 boolean_type_node,
a1ab4c31 1659 operand,
9a1bdc31
EB
1660 build_int_cst
1661 (operation_type, 0)),
a1ab4c31
AC
1662 result, operand);
1663 }
1664 else
1665 {
1666 /* For the NOT cases, we need a constant equal to
1667 the modulus minus one. For a binary modulus, we
1668 XOR against the constant and subtract the operand from
1669 that constant for nonbinary modulus. */
1670
1671 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
9a1bdc31 1672 build_int_cst (operation_type, 1));
a1ab4c31
AC
1673
1674 if (mod_pow2)
1675 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1676 operand, cnst);
1677 else
1678 result = fold_build2 (MINUS_EXPR, operation_type,
1679 cnst, operand);
1680 }
1681
1682 break;
1683 }
1684 }
1685
9c453de7 1686 /* ... fall through ... */
a1ab4c31
AC
1687
1688 default:
1689 gcc_assert (operation_type == base_type);
1690 result = fold_build1 (op_code, operation_type,
1691 convert (operation_type, operand));
1692 }
1693
a1ab4c31
AC
1694 if (result_type && TREE_TYPE (result) != result_type)
1695 result = convert (result_type, result);
1696
1697 return result;
1698}
ce2d0ce2 1699
a1ab4c31
AC
1700/* Similar, but for COND_EXPR. */
1701
1702tree
1703build_cond_expr (tree result_type, tree condition_operand,
1704 tree true_operand, tree false_operand)
1705{
a1ab4c31 1706 bool addr_p = false;
1275de7d 1707 tree result;
a1ab4c31 1708
1275de7d
EB
1709 /* The front-end verified that result, true and false operands have
1710 same base type. Convert everything to the result type. */
1711 true_operand = convert (result_type, true_operand);
a1ab4c31
AC
1712 false_operand = convert (result_type, false_operand);
1713
87fa3d34
EB
1714 /* If the result type is unconstrained, take the address of the operands and
1715 then dereference the result. Likewise if the result type is passed by
a0b8b1b7 1716 reference, because creating a temporary of this type is not allowed. */
a1ab4c31 1717 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
a0b8b1b7 1718 || TYPE_IS_BY_REFERENCE_P (result_type)
cb3d597d 1719 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
a1ab4c31 1720 {
a1ab4c31
AC
1721 result_type = build_pointer_type (result_type);
1722 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1723 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1275de7d 1724 addr_p = true;
a1ab4c31
AC
1725 }
1726
1727 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1728 true_operand, false_operand);
1729
1275de7d
EB
1730 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1731 in both arms, make sure it gets evaluated by moving it ahead of the
1732 conditional expression. This is necessary because it is evaluated
1733 in only one place at run time and would otherwise be uninitialized
1734 in one of the arms. */
1735 true_operand = skip_simple_arithmetic (true_operand);
a1ab4c31
AC
1736 false_operand = skip_simple_arithmetic (false_operand);
1737
1275de7d 1738 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
a1ab4c31
AC
1739 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1740
1275de7d 1741 if (addr_p)
a1ab4c31
AC
1742 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1743
1744 return result;
1745}
1746
39ab2e8f
RK
1747/* Similar, but for COMPOUND_EXPR. */
1748
1749tree
1750build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1751{
1752 bool addr_p = false;
1753 tree result;
1754
1755 /* If the result type is unconstrained, take the address of the operand and
1756 then dereference the result. Likewise if the result type is passed by
1757 reference, but this is natively handled in the gimplifier. */
1758 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1759 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1760 {
1761 result_type = build_pointer_type (result_type);
1762 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1763 addr_p = true;
1764 }
1765
1766 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1767 expr_operand);
1768
1769 if (addr_p)
1770 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1771
1772 return result;
1773}
ce2d0ce2 1774
dddf8120
EB
1775/* Conveniently construct a function call expression. FNDECL names the
1776 function to be called, N is the number of arguments, and the "..."
1777 parameters are the argument expressions. Unlike build_call_expr
1778 this doesn't fold the call, hence it will always return a CALL_EXPR. */
a1ab4c31
AC
1779
1780tree
dddf8120 1781build_call_n_expr (tree fndecl, int n, ...)
a1ab4c31 1782{
dddf8120
EB
1783 va_list ap;
1784 tree fntype = TREE_TYPE (fndecl);
1785 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1786
1787 va_start (ap, n);
1788 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1789 va_end (ap);
1790 return fn;
a1ab4c31 1791}
ce2d0ce2 1792
9a1bdc31
EB
1793/* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1794 MSG gives the exception's identity for the call to Local_Raise, if any. */
1795
1796static tree
8f8f531f 1797build_goto_raise (Entity_Id gnat_label, int msg)
9a1bdc31 1798{
8f8f531f
PMR
1799 tree gnu_label = gnat_to_gnu_entity (gnat_label, NULL_TREE, false);
1800 tree gnu_result = build1 (GOTO_EXPR, void_type_node, gnu_label);
9a1bdc31
EB
1801 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1802
1803 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1804 if (Present (local_raise))
1805 {
afc737f0
EB
1806 tree gnu_local_raise
1807 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
9a1bdc31 1808 tree gnu_exception_entity
afc737f0 1809 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
9a1bdc31
EB
1810 tree gnu_call
1811 = build_call_n_expr (gnu_local_raise, 1,
1812 build_unary_op (ADDR_EXPR, NULL_TREE,
1813 gnu_exception_entity));
1814 gnu_result
1815 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1816 }
1817
8f8f531f 1818 TREE_USED (gnu_label) = 1;
9a1bdc31
EB
1819 return gnu_result;
1820}
1821
ba464315
EB
1822/* Expand the SLOC of GNAT_NODE, if present, into tree location information
1823 pointed to by FILENAME, LINE and COL. Fall back to the current location
1824 if GNAT_NODE is absent or has no SLOC. */
a1ab4c31 1825
ba464315
EB
1826static void
1827expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1828{
1829 const char *str;
1830 int line_number, column_number;
1831
1832 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1833 {
1834 str = "";
1835 line_number = 0;
1836 column_number = 0;
1837 }
1838 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1839 {
1840 str = Get_Name_String
1841 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1842 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1843 column_number = Get_Column_Number (Sloc (gnat_node));
1844 }
1845 else
1846 {
1847 str = lbasename (LOCATION_FILE (input_location));
1848 line_number = LOCATION_LINE (input_location);
1849 column_number = LOCATION_COLUMN (input_location);
1850 }
a1ab4c31 1851
ba464315
EB
1852 const int len = strlen (str);
1853 *filename = build_string (len, str);
825da0d2 1854 TREE_TYPE (*filename) = build_array_type (char_type_node,
ba464315
EB
1855 build_index_type (size_int (len)));
1856 *line = build_int_cst (NULL_TREE, line_number);
1857 if (col)
1858 *col = build_int_cst (NULL_TREE, column_number);
1859}
1860
1861/* Build a call to a function that raises an exception and passes file name
1862 and line number, if requested. MSG says which exception function to call.
1863 GNAT_NODE is the node conveying the source location for which the error
1864 should be signaled, or Empty in which case the error is signaled for the
1865 current location. KIND says which kind of exception node this is for,
1866 among N_Raise_{Constraint,Storage,Program}_Error. */
a1ab4c31
AC
1867
1868tree
1869build_call_raise (int msg, Node_Id gnat_node, char kind)
1870{
8f8f531f 1871 Entity_Id gnat_label = get_exception_label (kind);
a1ab4c31 1872 tree fndecl = gnat_raise_decls[msg];
ba464315 1873 tree filename, line;
a1ab4c31
AC
1874
1875 /* If this is to be done as a goto, handle that case. */
8f8f531f
PMR
1876 if (Present (gnat_label))
1877 return build_goto_raise (gnat_label, msg);
a1ab4c31 1878
ba464315 1879 expand_sloc (gnat_node, &filename, &line, NULL);
a1ab4c31
AC
1880
1881 return
dddf8120 1882 build_call_n_expr (fndecl, 2,
6936c61a 1883 build1 (ADDR_EXPR,
825da0d2 1884 build_pointer_type (char_type_node),
a1ab4c31 1885 filename),
ba464315 1886 line);
a1ab4c31 1887}
437f8c1e 1888
ba464315
EB
1889/* Similar to build_call_raise, with extra information about the column
1890 where the check failed. */
437f8c1e
AC
1891
1892tree
9a1bdc31 1893build_call_raise_column (int msg, Node_Id gnat_node, char kind)
437f8c1e 1894{
8f8f531f 1895 Entity_Id gnat_label = get_exception_label (kind);
437f8c1e 1896 tree fndecl = gnat_raise_decls_ext[msg];
ba464315 1897 tree filename, line, col;
437f8c1e 1898
9a1bdc31 1899 /* If this is to be done as a goto, handle that case. */
8f8f531f
PMR
1900 if (Present (gnat_label))
1901 return build_goto_raise (gnat_label, msg);
9a1bdc31 1902
ba464315 1903 expand_sloc (gnat_node, &filename, &line, &col);
437f8c1e 1904
dddf8120 1905 return
ba464315 1906 build_call_n_expr (fndecl, 3,
dddf8120 1907 build1 (ADDR_EXPR,
825da0d2 1908 build_pointer_type (char_type_node),
dddf8120 1909 filename),
ba464315 1910 line, col);
437f8c1e
AC
1911}
1912
ba464315
EB
1913/* Similar to build_call_raise_column, for an index or range check exception ,
1914 with extra information of the form "INDEX out of range FIRST..LAST". */
437f8c1e
AC
1915
1916tree
9a1bdc31 1917build_call_raise_range (int msg, Node_Id gnat_node, char kind,
ba464315 1918 tree index, tree first, tree last)
437f8c1e 1919{
8f8f531f 1920 Entity_Id gnat_label = get_exception_label (kind);
437f8c1e 1921 tree fndecl = gnat_raise_decls_ext[msg];
ba464315 1922 tree filename, line, col;
437f8c1e 1923
9a1bdc31 1924 /* If this is to be done as a goto, handle that case. */
8f8f531f
PMR
1925 if (Present (gnat_label))
1926 return build_goto_raise (gnat_label, msg);
9a1bdc31 1927
ba464315 1928 expand_sloc (gnat_node, &filename, &line, &col);
437f8c1e 1929
dddf8120 1930 return
ba464315 1931 build_call_n_expr (fndecl, 6,
dddf8120 1932 build1 (ADDR_EXPR,
825da0d2 1933 build_pointer_type (char_type_node),
dddf8120 1934 filename),
ba464315
EB
1935 line, col,
1936 convert (integer_type_node, index),
1937 convert (integer_type_node, first),
1938 convert (integer_type_node, last));
437f8c1e 1939}
ce2d0ce2 1940
a1ab4c31
AC
1941/* qsort comparer for the bit positions of two constructor elements
1942 for record components. */
1943
1944static int
4b865081 1945compare_elmt_bitpos (const void *rt1, const void *rt2)
a1ab4c31 1946{
bb5faf73
JW
1947 const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
1948 const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
0e228dd9
NF
1949 const_tree const field1 = elmt1->index;
1950 const_tree const field2 = elmt2->index;
a1ab4c31
AC
1951 const int ret
1952 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1953
1954 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1955}
1956
0e228dd9 1957/* Return a CONSTRUCTOR of TYPE whose elements are V. */
a1ab4c31
AC
1958
1959tree
9771b263 1960gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
a1ab4c31 1961{
a1ab4c31 1962 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
88293f03 1963 bool read_only = true;
a1ab4c31 1964 bool side_effects = false;
0e228dd9
NF
1965 tree result, obj, val;
1966 unsigned int n_elmts;
a1ab4c31
AC
1967
1968 /* Scan the elements to see if they are all constant or if any has side
1969 effects, to let us set global flags on the resulting constructor. Count
1970 the elements along the way for possible sorting purposes below. */
0e228dd9 1971 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
a1ab4c31 1972 {
fccc47dd
EB
1973 /* The predicate must be in keeping with output_constructor and, unlike
1974 initializer_constant_valid_p, we accept "&{...}" because we'll put
1975 the CONSTRUCTOR into the constant pool during gimplification. */
cce30ea8 1976 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
a1ab4c31 1977 || (TREE_CODE (type) == RECORD_TYPE
324c9b02
OH
1978 && CONSTRUCTOR_BITFIELD_P (obj)
1979 && !initializer_constant_valid_for_bitfield_p (val))
fccc47dd
EB
1980 || (!initializer_constant_valid_p (val,
1981 TREE_TYPE (val),
1982 TYPE_REVERSE_STORAGE_ORDER (type))
1983 && !(TREE_CODE (val) == ADDR_EXPR
1984 && TREE_CODE (TREE_OPERAND (val, 0)) == CONSTRUCTOR
1985 && TREE_CONSTANT (TREE_OPERAND (val, 0)))))
a1ab4c31
AC
1986 allconstant = false;
1987
88293f03
EB
1988 if (!TREE_READONLY (val))
1989 read_only = false;
1990
324c9b02 1991 if (TREE_SIDE_EFFECTS (val))
a1ab4c31 1992 side_effects = true;
a1ab4c31
AC
1993 }
1994
1995 /* For record types with constant components only, sort field list
1996 by increasing bit position. This is necessary to ensure the
1997 constructor can be output as static data. */
1998 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
9771b263 1999 v->qsort (compare_elmt_bitpos);
a1ab4c31 2000
0e228dd9 2001 result = build_constructor (type, v);
1448093c 2002 CONSTRUCTOR_NO_CLEARING (result) = 1;
a1ab4c31
AC
2003 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
2004 TREE_SIDE_EFFECTS (result) = side_effects;
88293f03 2005 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
a1ab4c31
AC
2006 return result;
2007}
ce2d0ce2 2008
64235766
EB
2009/* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
2010 is not found in the record. Don't fold the result if NO_FOLD is true. */
a1ab4c31 2011
64235766
EB
2012static tree
2013build_simple_component_ref (tree record, tree field, bool no_fold)
a1ab4c31 2014{
64235766
EB
2015 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
2016 tree ref;
a1ab4c31 2017
bb24f343
EB
2018 /* The failure of this assertion will very likely come from a missing
2019 insertion of an explicit dereference. */
64235766 2020 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
a1ab4c31 2021
64235766
EB
2022 /* Try to fold a conversion from another record or union type unless the type
2023 contains a placeholder as it might be needed for a later substitution. */
2024 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2025 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2026 && !type_contains_placeholder_p (type))
2027 {
2028 tree op = TREE_OPERAND (record, 0);
2029
2030 /* If this is an unpadding operation, convert the underlying object to
2031 the unpadded type directly. */
2032 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2033 return convert (TREE_TYPE (field), op);
2034
2035 /* Otherwise try to access FIELD directly in the underlying type, but
2036 make sure that the form of the reference doesn't change too much;
2037 this can happen for an unconstrained bit-packed array type whose
2038 constrained form can be an integer type. */
2039 ref = build_simple_component_ref (op, field, no_fold);
2040 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2041 return ref;
2042 }
a1ab4c31 2043
50a6af05
EB
2044 /* If this field is not in the specified record, see if we can find a field
2045 in the specified record whose original field is the same as this one. */
64235766 2046 if (DECL_CONTEXT (field) != type)
a1ab4c31
AC
2047 {
2048 tree new_field;
2049
d4aef883 2050 /* First loop through normal components. */
64235766 2051 for (new_field = TYPE_FIELDS (type);
42acad07 2052 new_field;
910ad8de 2053 new_field = DECL_CHAIN (new_field))
cb3d597d 2054 if (SAME_FIELD_P (field, new_field))
a1ab4c31
AC
2055 break;
2056
d4aef883 2057 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
42acad07
EB
2058 component in the first search. Doing this search in two steps is
2059 required to avoid hidden homonymous fields in the _Parent field. */
a1ab4c31 2060 if (!new_field)
64235766 2061 for (new_field = TYPE_FIELDS (type);
42acad07 2062 new_field;
910ad8de 2063 new_field = DECL_CHAIN (new_field))
64235766
EB
2064 if (DECL_INTERNAL_P (new_field)
2065 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
a1ab4c31
AC
2066 {
2067 tree field_ref
64235766
EB
2068 = build_simple_component_ref (record, new_field, no_fold);
2069 ref = build_simple_component_ref (field_ref, field, no_fold);
a1ab4c31
AC
2070 if (ref)
2071 return ref;
2072 }
2073
2074 field = new_field;
2075 }
2076
2077 if (!field)
2078 return NULL_TREE;
2079
42acad07
EB
2080 /* If the field's offset has overflowed, do not try to access it, as doing
2081 so may trigger sanity checks deeper in the back-end. Note that we don't
2082 need to warn since this will be done on trying to declare the object. */
a1ab4c31
AC
2083 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2084 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
49c2c7f6
EB
2085 return build1 (NULL_EXPR, TREE_TYPE (field),
2086 build_call_raise (SE_Object_Too_Large, Empty,
2087 N_Raise_Storage_Error));
a1ab4c31 2088
64235766 2089 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
a1ab4c31 2090
64235766 2091 if (TREE_READONLY (record)
42acad07 2092 || TREE_READONLY (field)
64235766 2093 || TYPE_READONLY (type))
a1ab4c31 2094 TREE_READONLY (ref) = 1;
42acad07 2095
64235766 2096 if (TREE_THIS_VOLATILE (record)
42acad07 2097 || TREE_THIS_VOLATILE (field)
64235766 2098 || TYPE_VOLATILE (type))
a1ab4c31
AC
2099 TREE_THIS_VOLATILE (ref) = 1;
2100
64235766 2101 if (no_fold)
a1ab4c31
AC
2102 return ref;
2103
2104 /* The generic folder may punt in this case because the inner array type
2105 can be self-referential, but folding is in fact not problematic. */
64235766
EB
2106 if (TREE_CODE (record) == CONSTRUCTOR
2107 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
a1ab4c31 2108 {
64235766
EB
2109 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2110 unsigned HOST_WIDE_INT idx;
2111 tree index, value;
2112 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2113 if (index == field)
2114 return value;
a1ab4c31
AC
2115 return ref;
2116 }
2117
42acad07 2118 return fold (ref);
a1ab4c31 2119}
73a1a803 2120
49c2c7f6 2121/* Likewise, but return NULL_EXPR and generate a Program_Error if the
64235766 2122 field is not found in the record. */
a1ab4c31
AC
2123
2124tree
64235766 2125build_component_ref (tree record, tree field, bool no_fold)
a1ab4c31 2126{
64235766 2127 tree ref = build_simple_component_ref (record, field, no_fold);
a1ab4c31
AC
2128 if (ref)
2129 return ref;
2130
49c2c7f6
EB
2131 /* The missing field should have been detected in the front-end. */
2132 gigi_checking_assert (false);
2133
2134 /* Assume this is an invalid user field so raise Program_Error. */
a1ab4c31 2135 return build1 (NULL_EXPR, TREE_TYPE (field),
49c2c7f6
EB
2136 build_call_raise (PE_Explicit_Raise, Empty,
2137 N_Raise_Program_Error));
a1ab4c31 2138}
ce2d0ce2 2139
ff346f70
OH
2140/* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2141 identically. Process the case where a GNAT_PROC to call is provided. */
a1ab4c31 2142
ff346f70
OH
2143static inline tree
2144build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2145 Entity_Id gnat_proc, Entity_Id gnat_pool)
a1ab4c31 2146{
ff346f70 2147 tree gnu_proc = gnat_to_gnu (gnat_proc);
3202e9f8
MP
2148 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2149
ff346f70 2150 tree gnu_call;
a1ab4c31 2151
c697f593
EB
2152 /* A storage pool's underlying type is a record type for both predefined
2153 storage pools and GNAT simple storage pools. The return and secondary
2154 stacks use the same mechanism, but their pool object is an integer. */
7b50c4a3 2155 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
a1ab4c31 2156 {
ff346f70
OH
2157 /* The size is the third parameter; the alignment is the
2158 same type. */
2159 Entity_Id gnat_size_type
2160 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2161 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2162
2163 tree gnu_pool = gnat_to_gnu (gnat_pool);
2164 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
ff346f70
OH
2165
2166 gnu_size = convert (gnu_size_type, gnu_size);
2167 gnu_align = convert (gnu_size_type, gnu_align);
2168
2169 /* The first arg is always the address of the storage pool; next
2170 comes the address of the object, for a deallocator, then the
2171 size and alignment. */
2172 if (gnu_obj)
dddf8120
EB
2173 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2174 gnu_size, gnu_align);
ff346f70 2175 else
dddf8120
EB
2176 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2177 gnu_size, gnu_align);
ff346f70 2178 }
a1ab4c31 2179
ff346f70
OH
2180 else
2181 {
2182 /* The size is the second parameter. */
2183 Entity_Id gnat_size_type
2184 = Etype (Next_Formal (First_Formal (gnat_proc)));
2185 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2186
2187 gnu_size = convert (gnu_size_type, gnu_size);
3202e9f8 2188 gnu_align = convert (gnu_size_type, gnu_align);
ff346f70 2189
c697f593
EB
2190 if (DECL_BUILT_IN_CLASS (gnu_proc) == BUILT_IN_FRONTEND
2191 && DECL_FE_FUNCTION_CODE (gnu_proc) == BUILT_IN_RETURN_SLOT)
2192 {
2193 /* This must be an allocation of the return stack in a function that
2194 returns by invisible reference. */
2195 gcc_assert (!gnu_obj);
2196 gcc_assert (current_function_decl
2197 && TREE_ADDRESSABLE (TREE_TYPE (current_function_decl)));
2198 tree gnu_ret_size;
2199
2200 gnu_call = DECL_RESULT (current_function_decl);
2201
3202e9f8 2202 /* The allocation has already been done by the caller so we check that
c697f593
EB
2203 we are not going to overflow the return slot. */
2204 if (TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl)))
2205 gnu_ret_size
2206 = TYPE_SIZE_UNIT
2207 (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (TREE_TYPE (gnu_call)))));
2208 else
2209 gnu_ret_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (gnu_call)));
2210
2211 gnu_call
2212 = fold_build3 (COND_EXPR, TREE_TYPE (gnu_call),
2213 fold_build2 (LE_EXPR, boolean_type_node,
2214 fold_convert (sizetype, gnu_size),
2215 gnu_ret_size),
2216 gnu_call,
2217 build_call_raise (PE_Explicit_Raise, Empty,
2218 N_Raise_Program_Error));
2219 }
2220
ff346f70
OH
2221 /* The first arg is the address of the object, for a deallocator,
2222 then the size. */
c697f593 2223 else if (gnu_obj)
dddf8120 2224 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
c697f593 2225
a1ab4c31 2226 else
3202e9f8 2227 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_size, gnu_align);
a1ab4c31
AC
2228 }
2229
ff346f70
OH
2230 return gnu_call;
2231}
2232
2233/* Helper for build_call_alloc_dealloc, to build and return an allocator for
2234 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2235 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2236 latter offers. */
2237
2238static inline tree
2239maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2240{
2241 /* When the DATA_TYPE alignment is stricter than what malloc offers
2242 (super-aligned case), we allocate an "aligning" wrapper type and return
2243 the address of its single data field with the malloc's return value
2244 stored just in front. */
2245
2246 unsigned int data_align = TYPE_ALIGN (data_type);
c5ecd6b7
AC
2247 unsigned int system_allocator_alignment
2248 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
ff346f70
OH
2249
2250 tree aligning_type
c5ecd6b7 2251 = ((data_align > system_allocator_alignment)
ff346f70 2252 ? make_aligning_type (data_type, data_align, data_size,
c5ecd6b7 2253 system_allocator_alignment,
0746af5e
EB
2254 POINTER_SIZE / BITS_PER_UNIT,
2255 gnat_node)
ff346f70 2256 : NULL_TREE);
a1ab4c31 2257
ff346f70
OH
2258 tree size_to_malloc
2259 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
6f61bd41 2260
1eb58520 2261 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
ff346f70 2262
1f3f64b9
EB
2263 Check_Restriction_No_Dependence_On_System (Name_Memory, gnat_node);
2264
ff346f70
OH
2265 if (aligning_type)
2266 {
2267 /* Latch malloc's return value and get a pointer to the aligning field
2268 first. */
7d7a1fe8 2269 tree storage_ptr = gnat_protect_expr (malloc_ptr);
ff346f70
OH
2270
2271 tree aligning_record_addr
2272 = convert (build_pointer_type (aligning_type), storage_ptr);
2273
2274 tree aligning_record
2275 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2276
2277 tree aligning_field
64235766
EB
2278 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2279 false);
ff346f70
OH
2280
2281 tree aligning_field_addr
2282 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2283
2284 /* Then arrange to store the allocator's return value ahead
2285 and return. */
2286 tree storage_ptr_slot_addr
1366ba41
EB
2287 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2288 convert (ptr_type_node, aligning_field_addr),
437926c0
JJ
2289 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2290 / BITS_PER_UNIT));
ff346f70
OH
2291
2292 tree storage_ptr_slot
2293 = build_unary_op (INDIRECT_REF, NULL_TREE,
1366ba41 2294 convert (build_pointer_type (ptr_type_node),
ff346f70
OH
2295 storage_ptr_slot_addr));
2296
2297 return
2298 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
0d24bf76 2299 build_binary_op (INIT_EXPR, NULL_TREE,
ff346f70
OH
2300 storage_ptr_slot, storage_ptr),
2301 aligning_field_addr);
2302 }
2303 else
2304 return malloc_ptr;
2305}
2306
2307/* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2308 designated by DATA_PTR using the __gnat_free entry point. */
2309
2310static inline tree
1f3f64b9 2311maybe_wrap_free (tree data_ptr, tree data_type, Node_Id gnat_node)
ff346f70
OH
2312{
2313 /* In the regular alignment case, we pass the data pointer straight to free.
2314 In the superaligned case, we need to retrieve the initial allocator
2315 return value, stored in front of the data block at allocation time. */
2316
2317 unsigned int data_align = TYPE_ALIGN (data_type);
c5ecd6b7
AC
2318 unsigned int system_allocator_alignment
2319 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
6f61bd41 2320
ff346f70
OH
2321 tree free_ptr;
2322
1f3f64b9
EB
2323 Check_Restriction_No_Dependence_On_System (Name_Memory, gnat_node);
2324
c5ecd6b7 2325 if (data_align > system_allocator_alignment)
ff346f70
OH
2326 {
2327 /* DATA_FRONT_PTR (void *)
2328 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2329 tree data_front_ptr
2330 = build_binary_op
1366ba41
EB
2331 (POINTER_PLUS_EXPR, ptr_type_node,
2332 convert (ptr_type_node, data_ptr),
437926c0 2333 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
ff346f70
OH
2334
2335 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2336 free_ptr
2337 = build_unary_op
2338 (INDIRECT_REF, NULL_TREE,
1366ba41 2339 convert (build_pointer_type (ptr_type_node), data_front_ptr));
ff346f70
OH
2340 }
2341 else
2342 free_ptr = data_ptr;
2343
dddf8120 2344 return build_call_n_expr (free_decl, 1, free_ptr);
ff346f70
OH
2345}
2346
2347/* Build a GCC tree to call an allocation or deallocation function.
2348 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
3202e9f8 2349 generate an allocation.
ff346f70
OH
2350
2351 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2352 object type, used to determine the to-be-honored address alignment.
2353 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2354 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2355 to provide an error location for restriction violation messages. */
2356
2357tree
2358build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2359 Entity_Id gnat_proc, Entity_Id gnat_pool,
2360 Node_Id gnat_node)
2361{
ff346f70
OH
2362 /* Explicit proc to call ? This one is assumed to deal with the type
2363 alignment constraints. */
2364 if (Present (gnat_proc))
2365 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2366 gnat_proc, gnat_pool);
2367
2368 /* Otherwise, object to "free" or "malloc" with possible special processing
2369 for alignments stricter than what the default allocator honors. */
2370 else if (gnu_obj)
1f3f64b9 2371 return maybe_wrap_free (gnu_obj, gnu_type, gnat_node);
ff346f70
OH
2372 else
2373 {
2374 /* Assert that we no longer can be called with this special pool. */
2375 gcc_assert (gnat_pool != -1);
2376
2377 /* Check that we aren't violating the associated restriction. */
2378 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
9733088f
TG
2379 {
2380 Check_No_Implicit_Heap_Alloc (gnat_node);
2381 if (Has_Task (Etype (gnat_node)))
2382 Check_No_Implicit_Task_Alloc (gnat_node);
2383 if (Has_Protected (Etype (gnat_node)))
2384 Check_No_Implicit_Protected_Alloc (gnat_node);
2385 }
ff346f70
OH
2386 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2387 }
a1ab4c31 2388}
ce2d0ce2 2389
bdbebf66 2390/* Build a GCC tree that corresponds to allocating an object of TYPE whose
a1ab4c31 2391 initial value is INIT, if INIT is nonzero. Convert the expression to
bdbebf66 2392 RESULT_TYPE, which must be some pointer type, and return the result.
6f61bd41 2393
a1ab4c31
AC
2394 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2395 the storage pool to use. GNAT_NODE is used to provide an error
6f61bd41 2396 location for restriction violation messages. If IGNORE_INIT_TYPE is
a1ab4c31
AC
2397 true, ignore the type of INIT for the purpose of determining the size;
2398 this will cause the maximum size to be allocated if TYPE is of
2399 self-referential size. */
2400
2401tree
2402build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2403 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2404{
544d14e1
EB
2405 const bool pool_is_storage_model
2406 = Present (gnat_pool)
2407 && Has_Storage_Model_Type_Aspect (Etype (gnat_pool))
2408 && Present (Storage_Model_Copy_To (gnat_pool));
bdbebf66 2409 tree size, storage, storage_deref, storage_init;
a1ab4c31
AC
2410
2411 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2412 if (init && TREE_CODE (init) == NULL_EXPR)
2413 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2414
f4657d60
EB
2415 /* If we are just annotating types, also return a NULL_EXPR. */
2416 else if (type_annotate_only)
2417 return build1 (NULL_EXPR, result_type,
2418 build_call_raise (CE_Range_Check_Failed, gnat_node,
2419 N_Raise_Constraint_Error));
2420
7e169899
EB
2421 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2422 else if (init && TREE_CODE (init) == COND_EXPR)
2423 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2424 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2425 gnat_proc, gnat_pool, gnat_node,
2426 ignore_init_type),
2427 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2428 gnat_proc, gnat_pool, gnat_node,
2429 ignore_init_type));
2430
a1ab4c31
AC
2431 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2432 sizes of the object and its template. Allocate the whole thing and
2433 fill in the parts that are known. */
315cff15 2434 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
a1ab4c31
AC
2435 {
2436 tree storage_type
2437 = build_unc_object_type_from_ptr (result_type, type,
928dfa4b 2438 get_identifier ("ALLOC"), false);
a1ab4c31
AC
2439 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2440 tree storage_ptr_type = build_pointer_type (storage_type);
544d14e1 2441 tree lhs, rhs;
a1ab4c31 2442
b2278f6b
EB
2443 size = TYPE_SIZE_UNIT (storage_type);
2444 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
a1ab4c31 2445
bdbebf66 2446 /* If the size overflows, pass -1 so Storage_Error will be raised. */
ce3da0d0 2447 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
f54ee980 2448 size = size_int (-1);
a1ab4c31 2449
ff346f70 2450 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
a1ab4c31 2451 gnat_proc, gnat_pool, gnat_node);
7d7a1fe8 2452 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
bdbebf66
EB
2453 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2454 TREE_THIS_NOTRAP (storage_deref) = 1;
a1ab4c31 2455
8ffac116
EB
2456 /* If there is an initializing expression, then make a constructor for
2457 the entire object including the bounds and copy it into the object.
b2278f6b
EB
2458 If there is no initializing expression, just set the bounds. Note
2459 that, if we have a storage model, we need to copy the initializing
2460 expression separately from the bounds. */
2461 if (init && !pool_is_storage_model)
a1ab4c31 2462 {
9771b263
DN
2463 vec<constructor_elt, va_gc> *v;
2464 vec_alloc (v, 2);
0e228dd9
NF
2465
2466 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2467 build_template (template_type, type, init));
910ad8de 2468 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
0e228dd9 2469 init);
544d14e1
EB
2470
2471 lhs = storage_deref;
2472 rhs = gnat_build_constructor (storage_type, v);
a1ab4c31
AC
2473 }
2474 else
544d14e1
EB
2475 {
2476 lhs = build_component_ref (storage_deref, TYPE_FIELDS (storage_type),
2477 false);
b2278f6b 2478 rhs = build_template (template_type, type, init);
544d14e1
EB
2479 }
2480
2481 if (pool_is_storage_model)
b2278f6b
EB
2482 {
2483 storage_init = build_storage_model_store (gnat_pool, lhs, rhs);
2484 if (init)
2485 {
2486 start_stmt_group ();
2487 add_stmt (storage_init);
2488 lhs
2489 = build_component_ref (storage_deref,
2490 DECL_CHAIN (TYPE_FIELDS (storage_type)),
2491 false);
2492 rhs = init;
2493 size = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
2494 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2495 tree t = build_storage_model_store (gnat_pool, lhs, rhs, size);
2496 add_stmt (t);
2497 storage_init = end_stmt_group ();
2498 }
2499 }
544d14e1
EB
2500 else
2501 storage_init = build_binary_op (INIT_EXPR, NULL_TREE, lhs, rhs);
bdbebf66
EB
2502
2503 return build2 (COMPOUND_EXPR, result_type,
2504 storage_init, convert (result_type, storage));
a1ab4c31
AC
2505 }
2506
bdbebf66
EB
2507 size = TYPE_SIZE_UNIT (type);
2508
a1ab4c31
AC
2509 /* If we have an initializing expression, see if its size is simpler
2510 than the size from the type. */
2511 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2512 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2513 || CONTAINS_PLACEHOLDER_P (size)))
2514 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2515
2516 /* If the size is still self-referential, reference the initializing
683ccd05
EB
2517 expression, if it is present. If not, this must have been a call
2518 to allocate a library-level object, in which case we just use the
2519 maximum size. */
2520 if (!ignore_init_type && init)
2521 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2522 else if (CONTAINS_PLACEHOLDER_P (size))
2523 size = max_size (size, true);
a1ab4c31 2524
bdbebf66 2525 /* If the size overflows, pass -1 so Storage_Error will be raised. */
ce3da0d0 2526 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
f54ee980 2527 size = size_int (-1);
a1ab4c31 2528
bdbebf66
EB
2529 storage = convert (result_type,
2530 build_call_alloc_dealloc (NULL_TREE, size, type,
2531 gnat_proc, gnat_pool,
2532 gnat_node));
a1ab4c31 2533
ced57283
EB
2534 /* If we have an initial value, protect the new address, assign the value
2535 and return the address with a COMPOUND_EXPR. */
a1ab4c31
AC
2536 if (init)
2537 {
bdbebf66
EB
2538 storage = gnat_protect_expr (storage);
2539 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2540 TREE_THIS_NOTRAP (storage_deref) = 1;
544d14e1
EB
2541 if (pool_is_storage_model)
2542 storage_init
b2278f6b 2543 = build_storage_model_store (gnat_pool, storage_deref, init, size);
544d14e1
EB
2544 else
2545 storage_init
2546 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
bdbebf66 2547 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
a1ab4c31
AC
2548 }
2549
bdbebf66 2550 return storage;
a1ab4c31 2551}
ce2d0ce2 2552
544d14e1
EB
2553/* Build a call to a copy procedure of a storage model given by an object.
2554 DEST, SRC and SIZE are as for a call to memcpy. GNAT_SMO is the entity
2555 for the storage model object and COPY_TO says which procedure to use. */
2556
2557static tree
2558build_storage_model_copy (Entity_Id gnat_smo, tree dest, tree src, tree size,
2559 bool copy_to)
2560{
2561 const Entity_Id gnat_copy_proc
2562 = copy_to
2563 ? Storage_Model_Copy_To (gnat_smo)
2564 : Storage_Model_Copy_From (gnat_smo);
2565 tree gnu_copy_proc = gnat_to_gnu (gnat_copy_proc);
2566 tree gnu_param_type_list = TYPE_ARG_TYPES (TREE_TYPE (gnu_copy_proc));
2567 tree t1 = TREE_VALUE (gnu_param_type_list);
2568 tree t2 = TREE_VALUE (TREE_CHAIN (gnu_param_type_list));
2569 tree t3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (gnu_param_type_list)));
2570 tree t4
2571 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (gnu_param_type_list))));
2572
2573 return
2574 build_call_n_expr (gnu_copy_proc,
2575 4,
2576 build_unary_op (ADDR_EXPR, t1, gnat_to_gnu (gnat_smo)),
2577 build_unary_op (ADDR_EXPR, t2, dest),
2578 build_unary_op (ADDR_EXPR, t3, src),
2579 convert (t4, size));
2580}
2581
2582/* Build a load of SRC using the storage model of GNAT_SMO. */
2583
2584tree
2585build_storage_model_load (Entity_Id gnat_smo, tree src)
2586{
2587 tree ret = build2 (LOAD_EXPR, TREE_TYPE (src), src, NULL_TREE);
2588
2589 /* Unconstrained array references have no size so we need to store the
2590 storage object model for future processing by the machinery. */
2591 if (TREE_CODE (src) == UNCONSTRAINED_ARRAY_REF)
2592 TREE_OPERAND (ret, 1) = build_int_cst (integer_type_node, gnat_smo);
2593 else
2594 TREE_OPERAND (ret, 1) = build_storage_model_load (gnat_smo, src, src);
2595
2596 return ret;
2597}
2598
2599/* Build a load of SRC into DEST using the storage model of GNAT_SMO.
2600 If SIZE is specified, use it, otherwise use the size of SRC. */
2601
2602tree
2603build_storage_model_load (Entity_Id gnat_smo, tree dest, tree src, tree size)
2604{
2605 gcc_assert (TREE_CODE (src) != LOAD_EXPR);
2606
2607 if (!size)
2608 {
2609 size = TYPE_SIZE_UNIT (TREE_TYPE (src));
2610 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, src);
2611 size = INSTANTIATE_LOAD_IN_EXPR (size, gnat_smo);
2612 }
2613
2614 return build_storage_model_copy (gnat_smo, dest, src, size, false);
2615}
2616
2617/* Build a store of SRC into DEST using the storage model of GNAT_SMO.
2618 If SIZE is specified, use it, otherwise use the size of DEST. */
2619
2620tree
2621build_storage_model_store (Entity_Id gnat_smo, tree dest, tree src, tree size)
2622{
2623 gcc_assert (TREE_CODE (src) != LOAD_EXPR);
2624
2625 if (!size)
2626 {
2627 size = TYPE_SIZE_UNIT (TREE_TYPE (dest));
2628 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, dest);
2629 size = INSTANTIATE_LOAD_IN_EXPR (size, gnat_smo);
2630 }
2631
2632 return build_storage_model_copy (gnat_smo, dest, src, size, true);
2633}
2634
2635/* Given a tree EXP, instantiate occurrences of LOAD_EXPR in it and associate
2636 them with the storage model of GNAT_SMO. */
2637
2638tree
2639instantiate_load_in_expr (tree exp, Entity_Id gnat_smo)
2640{
2641 const enum tree_code code = TREE_CODE (exp);
2642 tree type = TREE_TYPE (exp);
2643 tree op0, op1, op2, op3;
2644 tree new_tree;
2645
2646 /* We handle TREE_LIST and COMPONENT_REF separately. */
2647 if (code == TREE_LIST)
2648 {
2649 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_CHAIN (exp), gnat_smo);
2650 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_VALUE (exp), gnat_smo);
2651 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
2652 return exp;
2653
2654 return tree_cons (TREE_PURPOSE (exp), op1, op0);
2655 }
2656 else if (code == COMPONENT_REF)
2657 {
2658 /* The field. */
2659 op1 = TREE_OPERAND (exp, 1);
2660
2661 /* If it is a discriminant or equivalent, a LOAD_EXPR is needed. */
2662 if (DECL_DISCRIMINANT_NUMBER (op1))
2663 return build_storage_model_load (gnat_smo, exp);
2664
2665 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2666 if (op0 == TREE_OPERAND (exp, 0))
2667 return exp;
2668
2669 new_tree = fold_build3 (COMPONENT_REF, type, op0, op1, NULL_TREE);
2670 }
2671 else
2672 switch (TREE_CODE_CLASS (code))
2673 {
2674 case tcc_constant:
2675 case tcc_declaration:
2676 return exp;
2677
2678 case tcc_expression:
2679 if (code == LOAD_EXPR)
2680 return exp;
2681
2682 /* Fall through. */
2683
2684 case tcc_exceptional:
2685 case tcc_unary:
2686 case tcc_binary:
2687 case tcc_comparison:
2688 case tcc_reference:
2689 switch (TREE_CODE_LENGTH (code))
2690 {
2691 case 0:
2692 return exp;
2693
2694 case 1:
2695 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2696 if (op0 == TREE_OPERAND (exp, 0))
2697 return exp;
2698
2699 new_tree = fold_build1 (code, type, op0);
2700 break;
2701
2702 case 2:
2703 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2704 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
2705
2706 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
2707 return exp;
2708
2709 new_tree = fold_build2 (code, type, op0, op1);
2710 break;
2711
2712 case 3:
2713 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2714 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
2715 op2 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 2), gnat_smo);
2716
2717 if (op0 == TREE_OPERAND (exp, 0)
2718 && op1 == TREE_OPERAND (exp, 1)
2719 && op2 == TREE_OPERAND (exp, 2))
2720 return exp;
2721
2722 new_tree = fold_build3 (code, type, op0, op1, op2);
2723 break;
2724
2725 case 4:
2726 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2727 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
2728 op2 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 2), gnat_smo);
2729 op3 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 3), gnat_smo);
2730
2731 if (op0 == TREE_OPERAND (exp, 0)
2732 && op1 == TREE_OPERAND (exp, 1)
2733 && op2 == TREE_OPERAND (exp, 2)
2734 && op3 == TREE_OPERAND (exp, 3))
2735 return exp;
2736
2737 new_tree = fold (build4 (code, type, op0, op1, op2, op3));
2738 break;
2739
2740 default:
2741 gcc_unreachable ();
2742 }
2743 break;
2744
2745 case tcc_vl_exp:
2746 {
2747 gcc_assert (code == CALL_EXPR);
2748
2749 const int n = call_expr_nargs (exp);
2750 gcc_assert (n > 0);
2751 tree *argarray = XALLOCAVEC (tree, n);
2752 for (int i = 0; i < n; i++)
2753 argarray[i]
2754 = INSTANTIATE_LOAD_IN_EXPR (CALL_EXPR_ARG (exp, i), gnat_smo);
2755
2756 for (int i = 0; i < n; i++)
2757 if (argarray[i] != CALL_EXPR_ARG (exp, i))
2758 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2759
2760 return exp;
2761 }
2762
2763 default:
2764 gcc_unreachable ();
2765 }
2766
2767 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
2768
2769 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
2770 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
2771
2772 return new_tree;
2773}
2774
2775/* Given an array or slice reference, instantiate occurrences of LOAD_EXPR in
2776 it and associate them with the storage model of GNAT_SMO. */
2777
2778void
2779instantiate_load_in_array_ref (tree ref, Entity_Id gnat_smo)
2780{
2781 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
2782 tree elem_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
2783
2784 TREE_OPERAND (ref, 2)
2785 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), ref);
2786 TREE_OPERAND (ref, 2)
2787 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (ref, 2), gnat_smo);
2788
2789 TREE_OPERAND (ref, 3)
2790 = size_binop (EXACT_DIV_EXPR,
2791 SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elem_type),
2792 ref),
2793 size_int (TYPE_ALIGN_UNIT (elem_type)));
2794 TREE_OPERAND (ref, 3)
2795 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (ref, 3), gnat_smo);
2796}
2797
ced57283 2798/* Indicate that we need to take the address of T and that it therefore
7c775aca 2799 should not be allocated in a register. Return true if successful. */
a1ab4c31
AC
2800
2801bool
ced57283 2802gnat_mark_addressable (tree t)
a1ab4c31 2803{
ced57283
EB
2804 while (true)
2805 switch (TREE_CODE (t))
a1ab4c31
AC
2806 {
2807 case ADDR_EXPR:
2808 case COMPONENT_REF:
2809 case ARRAY_REF:
2810 case ARRAY_RANGE_REF:
2811 case REALPART_EXPR:
2812 case IMAGPART_EXPR:
2813 case VIEW_CONVERT_EXPR:
2814 case NON_LVALUE_EXPR:
2815 CASE_CONVERT:
ced57283 2816 t = TREE_OPERAND (t, 0);
a1ab4c31
AC
2817 break;
2818
0b3467c4
EB
2819 case COMPOUND_EXPR:
2820 t = TREE_OPERAND (t, 1);
2821 break;
2822
a1ab4c31 2823 case CONSTRUCTOR:
ced57283 2824 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2825 return true;
2826
2827 case VAR_DECL:
2828 case PARM_DECL:
2829 case RESULT_DECL:
ced57283 2830 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2831 return true;
2832
2833 case FUNCTION_DECL:
ced57283 2834 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2835 return true;
2836
2837 case CONST_DECL:
ced57283
EB
2838 return DECL_CONST_CORRESPONDING_VAR (t)
2839 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2840
a1ab4c31
AC
2841 default:
2842 return true;
2843 }
2844}
ce2d0ce2 2845
7194767c
EB
2846/* Return true if EXP is a stable expression for the purpose of the functions
2847 below and, therefore, can be returned unmodified by them. We accept things
2848 that are actual constants or that have already been handled. */
2849
2850static bool
2851gnat_stable_expr_p (tree exp)
2852{
2853 enum tree_code code = TREE_CODE (exp);
2854 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2855}
2856
e53b6e56 2857/* Save EXP for later use or reuse. This is equivalent to save_expr in tree.cc
7d7a1fe8
EB
2858 but we know how to handle our own nodes. */
2859
2860tree
2861gnat_save_expr (tree exp)
2862{
2863 tree type = TREE_TYPE (exp);
2864 enum tree_code code = TREE_CODE (exp);
2865
7194767c 2866 if (gnat_stable_expr_p (exp))
7d7a1fe8
EB
2867 return exp;
2868
2869 if (code == UNCONSTRAINED_ARRAY_REF)
2870 {
2871 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2872 TREE_READONLY (t) = TYPE_READONLY (type);
2873 return t;
2874 }
2875
2876 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2877 This may be more efficient, but will also allow us to more easily find
2878 the match for the PLACEHOLDER_EXPR. */
2879 if (code == COMPONENT_REF
2880 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2881 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
552cc590 2882 TREE_OPERAND (exp, 1), NULL_TREE);
7d7a1fe8
EB
2883
2884 return save_expr (exp);
2885}
2886
2887/* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2888 is optimized under the assumption that EXP's value doesn't change before
2889 its subsequent reuse(s) except through its potential reevaluation. */
2890
2891tree
2892gnat_protect_expr (tree exp)
2893{
2894 tree type = TREE_TYPE (exp);
2895 enum tree_code code = TREE_CODE (exp);
2896
7194767c 2897 if (gnat_stable_expr_p (exp))
7d7a1fe8
EB
2898 return exp;
2899
308e6f3a 2900 /* If EXP has no side effects, we theoretically don't need to do anything.
7d7a1fe8
EB
2901 However, we may be recursively passed more and more complex expressions
2902 involving checks which will be reused multiple times and eventually be
2903 unshared for gimplification; in order to avoid a complexity explosion
2904 at that point, we protect any expressions more complex than a simple
2905 arithmetic expression. */
58c8f770
EB
2906 if (!TREE_SIDE_EFFECTS (exp))
2907 {
2908 tree inner = skip_simple_arithmetic (exp);
2909 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2910 return exp;
2911 }
7d7a1fe8
EB
2912
2913 /* If this is a conversion, protect what's inside the conversion. */
2914 if (code == NON_LVALUE_EXPR
2915 || CONVERT_EXPR_CODE_P (code)
2916 || code == VIEW_CONVERT_EXPR)
2917 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2918
2919 /* If we're indirectly referencing something, we only need to protect the
2920 address since the data itself can't change in these situations. */
2921 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2922 {
2923 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2924 TREE_READONLY (t) = TYPE_READONLY (type);
2925 return t;
2926 }
2927
96826e28
EB
2928 /* Likewise if we're indirectly referencing part of something. */
2929 if (code == COMPONENT_REF
04682fe7 2930 && INDIRECT_REF_P (TREE_OPERAND (exp, 0)))
96826e28
EB
2931 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2932 TREE_OPERAND (exp, 1), NULL_TREE);
2933
7d7a1fe8
EB
2934 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2935 This may be more efficient, but will also allow us to more easily find
2936 the match for the PLACEHOLDER_EXPR. */
2937 if (code == COMPONENT_REF
2938 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2939 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
552cc590 2940 TREE_OPERAND (exp, 1), NULL_TREE);
7d7a1fe8 2941
bf17fe3f
EB
2942 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2943 for a CALL_EXPR as large objects are returned via invisible reference
2944 in most ABIs so the temporary will directly be filled by the callee. */
7d7a1fe8 2945 if (TYPE_IS_FAT_POINTER_P (type)
bf17fe3f 2946 || !AGGREGATE_TYPE_P (type)
7d7a1fe8
EB
2947 || code == CALL_EXPR)
2948 return save_expr (exp);
2949
2950 /* Otherwise reference, protect the address and dereference. */
2951 return
2952 build_unary_op (INDIRECT_REF, type,
96826e28 2953 save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
7d7a1fe8
EB
2954}
2955
e53b6e56 2956/* This is equivalent to stabilize_reference_1 in tree.cc but we take an extra
7d7a1fe8
EB
2957 argument to force evaluation of everything. */
2958
2959static tree
fc7a823e 2960gnat_stabilize_reference_1 (tree e, void *data)
7d7a1fe8 2961{
241125b2 2962 const bool force = *(bool *)data;
7d7a1fe8
EB
2963 enum tree_code code = TREE_CODE (e);
2964 tree type = TREE_TYPE (e);
2965 tree result;
2966
7194767c 2967 if (gnat_stable_expr_p (e))
7d7a1fe8
EB
2968 return e;
2969
2970 switch (TREE_CODE_CLASS (code))
2971 {
2972 case tcc_exceptional:
2973 case tcc_declaration:
2974 case tcc_comparison:
2975 case tcc_expression:
2976 case tcc_reference:
2977 case tcc_vl_exp:
2978 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2979 fat pointer. This may be more efficient, but will also allow
2980 us to more easily find the match for the PLACEHOLDER_EXPR. */
2981 if (code == COMPONENT_REF
2982 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2983 result
2984 = build3 (code, type,
fc7a823e 2985 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
552cc590 2986 TREE_OPERAND (e, 1), NULL_TREE);
7d7a1fe8
EB
2987 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2988 so that it will only be evaluated once. */
2989 /* The tcc_reference and tcc_comparison classes could be handled as
2990 below, but it is generally faster to only evaluate them once. */
2991 else if (TREE_SIDE_EFFECTS (e) || force)
2992 return save_expr (e);
2993 else
2994 return e;
2995 break;
2996
2997 case tcc_binary:
2998 /* Recursively stabilize each operand. */
2999 result
3000 = build2 (code, type,
fc7a823e
EB
3001 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
3002 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
7d7a1fe8
EB
3003 break;
3004
3005 case tcc_unary:
3006 /* Recursively stabilize each operand. */
3007 result
3008 = build1 (code, type,
fc7a823e 3009 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
7d7a1fe8
EB
3010 break;
3011
3012 default:
3013 gcc_unreachable ();
3014 }
3015
fccc47dd 3016 /* See gnat_rewrite_reference below for the rationale. */
7d7a1fe8 3017 TREE_READONLY (result) = TREE_READONLY (e);
7d7a1fe8
EB
3018 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3019
fccc47dd
EB
3020 if (TREE_SIDE_EFFECTS (e))
3021 TREE_SIDE_EFFECTS (result) = 1;
3022
7d7a1fe8
EB
3023 return result;
3024}
3025
e53b6e56 3026/* This is equivalent to stabilize_reference in tree.cc but we know how to
7d7a1fe8 3027 handle our own nodes and we take extra arguments. FORCE says whether to
fc7a823e
EB
3028 force evaluation of everything in REF. INIT is set to the first arm of
3029 a COMPOUND_EXPR present in REF, if any. */
7d7a1fe8
EB
3030
3031tree
fc7a823e 3032gnat_stabilize_reference (tree ref, bool force, tree *init)
241125b2 3033{
fc7a823e
EB
3034 return
3035 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
241125b2
EB
3036}
3037
3038/* Rewrite reference REF and call FUNC on each expression within REF in the
fc7a823e
EB
3039 process. DATA is passed unmodified to FUNC. INIT is set to the first
3040 arm of a COMPOUND_EXPR present in REF, if any. */
241125b2
EB
3041
3042tree
fc7a823e 3043gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
7d7a1fe8
EB
3044{
3045 tree type = TREE_TYPE (ref);
3046 enum tree_code code = TREE_CODE (ref);
3047 tree result;
3048
7d7a1fe8
EB
3049 switch (code)
3050 {
3051 case CONST_DECL:
3052 case VAR_DECL:
3053 case PARM_DECL:
3054 case RESULT_DECL:
3055 /* No action is needed in this case. */
3056 return ref;
3057
7d7a1fe8
EB
3058 CASE_CONVERT:
3059 case FLOAT_EXPR:
3060 case FIX_TRUNC_EXPR:
60424a41
EB
3061 case REALPART_EXPR:
3062 case IMAGPART_EXPR:
7d7a1fe8
EB
3063 case VIEW_CONVERT_EXPR:
3064 result
3065 = build1 (code, type,
241125b2 3066 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
fc7a823e 3067 init));
7d7a1fe8
EB
3068 break;
3069
3070 case INDIRECT_REF:
3071 case UNCONSTRAINED_ARRAY_REF:
fc7a823e 3072 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
7d7a1fe8
EB
3073 break;
3074
3075 case COMPONENT_REF:
ea292448 3076 result = build3 (COMPONENT_REF, type,
241125b2 3077 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
fc7a823e 3078 data, init),
ea292448 3079 TREE_OPERAND (ref, 1), NULL_TREE);
7d7a1fe8
EB
3080 break;
3081
3082 case BIT_FIELD_REF:
3083 result = build3 (BIT_FIELD_REF, type,
241125b2 3084 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
fc7a823e 3085 data, init),
ea814c66 3086 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
ee45a32d 3087 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
7d7a1fe8
EB
3088 break;
3089
3090 case ARRAY_REF:
3091 case ARRAY_RANGE_REF:
7194767c
EB
3092 result
3093 = build4 (code, type,
241125b2 3094 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
fc7a823e
EB
3095 init),
3096 func (TREE_OPERAND (ref, 1), data),
544d14e1 3097 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
7d7a1fe8
EB
3098 break;
3099
fc7a823e 3100 case COMPOUND_EXPR:
7c775aca 3101 gcc_assert (!*init);
fc7a823e
EB
3102 *init = TREE_OPERAND (ref, 0);
3103 /* We expect only the pattern built in Call_to_gnu. */
92d5f5ab
EB
3104 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
3105 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
3106 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
fc7a823e
EB
3107 return TREE_OPERAND (ref, 1);
3108
7d7a1fe8 3109 case CALL_EXPR:
7194767c
EB
3110 {
3111 /* This can only be an atomic load. */
3112 gcc_assert (call_is_atomic_load (ref));
3113
3114 /* An atomic load is an INDIRECT_REF of its first argument. */
3115 tree t = CALL_EXPR_ARG (ref, 0);
3116 if (TREE_CODE (t) == NOP_EXPR)
3117 t = TREE_OPERAND (t, 0);
3118 if (TREE_CODE (t) == ADDR_EXPR)
3119 t = build1 (ADDR_EXPR, TREE_TYPE (t),
241125b2 3120 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
fc7a823e 3121 init));
7194767c 3122 else
fc7a823e 3123 t = func (t, data);
7194767c 3124 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
0b3467c4 3125
7194767c
EB
3126 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
3127 t, CALL_EXPR_ARG (ref, 1));
3128 }
7d7a1fe8
EB
3129 break;
3130
3131 case ERROR_MARK:
d747d005
EB
3132 case NULL_EXPR:
3133 return ref;
7d7a1fe8 3134
7d7a1fe8 3135 default:
7194767c 3136 gcc_unreachable ();
7d7a1fe8
EB
3137 }
3138
fccc47dd
EB
3139 /* TREE_READONLY and TREE_THIS_VOLATILE set on the initial expression may
3140 not be sustained across some paths, such as the one for INDIRECT_REF.
7d7a1fe8
EB
3141
3142 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
3143 paths introduce side-effects where there was none initially (e.g. if a
3144 SAVE_EXPR is built) and we also want to keep track of that. */
3145 TREE_READONLY (result) = TREE_READONLY (ref);
7d7a1fe8
EB
3146 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3147
fccc47dd
EB
3148 if (TREE_SIDE_EFFECTS (ref))
3149 TREE_SIDE_EFFECTS (result) = 1;
3150
a1c7d797
EB
3151 if (code == INDIRECT_REF
3152 || code == UNCONSTRAINED_ARRAY_REF
3153 || code == ARRAY_REF
3154 || code == ARRAY_RANGE_REF)
3bfc61cf
EB
3155 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
3156
7d7a1fe8
EB
3157 return result;
3158}
15bf7d19 3159
e53b6e56 3160/* This is equivalent to get_inner_reference in expr.cc but it returns the
ea292448
EB
3161 ultimate containing object only if the reference (lvalue) is constant,
3162 i.e. if it doesn't depend on the context in which it is evaluated. */
3163
3164tree
3165get_inner_constant_reference (tree exp)
3166{
3167 while (true)
3168 {
3169 switch (TREE_CODE (exp))
3170 {
3171 case BIT_FIELD_REF:
3172 break;
3173
3174 case COMPONENT_REF:
ea292448
EB
3175 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
3176 return NULL_TREE;
3177 break;
3178
3179 case ARRAY_REF:
3180 case ARRAY_RANGE_REF:
3181 {
ea292448
EB
3182 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
3183 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
3184 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
3185 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
3186 return NULL_TREE;
3187 }
3188 break;
3189
3190 case REALPART_EXPR:
3191 case IMAGPART_EXPR:
3192 case VIEW_CONVERT_EXPR:
3193 break;
3194
3195 default:
3196 goto done;
3197 }
3198
3199 exp = TREE_OPERAND (exp, 0);
3200 }
3201
3202done:
3203 return exp;
3204}
3205
933a7325
EB
3206/* Return true if EXPR is the addition or the subtraction of a constant and,
3207 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
3208 if this is a subtraction. */
3209
3210bool
3211is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
3212{
3213 /* Skip overflow checks. */
3214 if (TREE_CODE (expr) == COND_EXPR
3215 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
3216 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
3217 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
3218 == gnat_raise_decls[CE_Overflow_Check_Failed])
3219 expr = COND_EXPR_ELSE (expr);
3220
3221 if (TREE_CODE (expr) == PLUS_EXPR)
3222 {
3223 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3224 {
3225 *add = TREE_OPERAND (expr, 1);
3226 *cst = TREE_OPERAND (expr, 0);
3227 *minus_p = false;
3228 return true;
3229 }
3230 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3231 {
3232 *add = TREE_OPERAND (expr, 0);
3233 *cst = TREE_OPERAND (expr, 1);
3234 *minus_p = false;
3235 return true;
3236 }
3237 }
3238 else if (TREE_CODE (expr) == MINUS_EXPR)
3239 {
3240 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3241 {
3242 *add = TREE_OPERAND (expr, 0);
3243 *cst = TREE_OPERAND (expr, 1);
3244 *minus_p = true;
3245 return true;
3246 }
3247 }
3248
3249 return false;
3250}
3251
15bf7d19
EB
3252/* If EXPR is an expression that is invariant in the current function, in the
3253 sense that it can be evaluated anywhere in the function and any number of
3254 times, return EXPR or an equivalent expression. Otherwise return NULL. */
3255
3256tree
3257gnat_invariant_expr (tree expr)
3258{
1e3cabd4 3259 tree type = TREE_TYPE (expr);
933a7325
EB
3260 tree add, cst;
3261 bool minus_p;
15bf7d19 3262
722356ce 3263 expr = remove_conversions (expr, false);
15bf7d19 3264
64235766 3265 /* Look through temporaries created to capture values. */
15bf7d19 3266 while ((TREE_CODE (expr) == CONST_DECL
04682fe7 3267 || (VAR_P (expr) && TREE_READONLY (expr)))
15bf7d19
EB
3268 && decl_function_context (expr) == current_function_decl
3269 && DECL_INITIAL (expr))
90b4c164
EB
3270 {
3271 expr = DECL_INITIAL (expr);
3272 /* Look into CONSTRUCTORs built to initialize padded types. */
ad00a297 3273 expr = maybe_padded_object (expr);
90b4c164
EB
3274 expr = remove_conversions (expr, false);
3275 }
15bf7d19 3276
61e0b233
EB
3277 /* We are only interested in scalar types at the moment and, even if we may
3278 have gone through padding types in the above loop, we must be back to a
3279 scalar value at this point. */
3280 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
3281 return NULL_TREE;
3282
15bf7d19
EB
3283 if (TREE_CONSTANT (expr))
3284 return fold_convert (type, expr);
3285
852dd866
EB
3286 /* Deal with aligning patterns. */
3287 if (TREE_CODE (expr) == BIT_AND_EXPR
3288 && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3289 {
3290 tree op0 = gnat_invariant_expr (TREE_OPERAND (expr, 0));
3291 if (op0)
3292 return fold_build2 (BIT_AND_EXPR, type, op0, TREE_OPERAND (expr, 1));
3293 else
3294 return NULL_TREE;
3295 }
3296
64235766 3297 /* Deal with addition or subtraction of constants. */
933a7325 3298 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
64235766 3299 {
933a7325
EB
3300 add = gnat_invariant_expr (add);
3301 if (add)
f8125f0c 3302 return
933a7325
EB
3303 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
3304 fold_convert (type, add), fold_convert (type, cst));
64235766
EB
3305 else
3306 return NULL_TREE;
3307 }
3308
3309 bool invariant_p = false;
3310 tree t = expr;
15bf7d19
EB
3311
3312 while (true)
3313 {
3314 switch (TREE_CODE (t))
3315 {
3316 case COMPONENT_REF:
64235766 3317 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
15bf7d19
EB
3318 break;
3319
3320 case ARRAY_REF:
3321 case ARRAY_RANGE_REF:
544d14e1
EB
3322 {
3323 tree array_type = TREE_TYPE (TREE_OPERAND (t, 0));
3324 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
3325 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
3326 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
3327 return NULL_TREE;
3328 }
15bf7d19
EB
3329 break;
3330
3331 case BIT_FIELD_REF:
15bf7d19
EB
3332 case REALPART_EXPR:
3333 case IMAGPART_EXPR:
64235766
EB
3334 case VIEW_CONVERT_EXPR:
3335 CASE_CONVERT:
15bf7d19
EB
3336 break;
3337
3338 case INDIRECT_REF:
64235766 3339 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
15bf7d19 3340 return NULL_TREE;
64235766 3341 invariant_p = false;
15bf7d19
EB
3342 break;
3343
3344 default:
3345 goto object;
3346 }
3347
3348 t = TREE_OPERAND (t, 0);
3349 }
3350
3351object:
3352 if (TREE_SIDE_EFFECTS (t))
3353 return NULL_TREE;
3354
3355 if (TREE_CODE (t) == CONST_DECL
3356 && (DECL_EXTERNAL (t)
3357 || decl_function_context (t) != current_function_decl))
3358 return fold_convert (type, expr);
3359
64235766 3360 if (!invariant_p && !TREE_READONLY (t))
15bf7d19
EB
3361 return NULL_TREE;
3362
90b4c164 3363 if (TREE_CODE (t) == PARM_DECL)
15bf7d19
EB
3364 return fold_convert (type, expr);
3365
04682fe7 3366 if (VAR_P (t)
15bf7d19
EB
3367 && (DECL_EXTERNAL (t)
3368 || decl_function_context (t) != current_function_decl))
3369 return fold_convert (type, expr);
3370
3371 return NULL_TREE;
3372}
This page took 5.428616 seconds and 5 git commands to generate.