]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
utils2.c (build_binary_op): Do not use the type of the left operand if...
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "ggc.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "tree-inline.h"
35
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "elists.h"
40 #include "namet.h"
41 #include "nlists.h"
42 #include "snames.h"
43 #include "stringt.h"
44 #include "uintp.h"
45 #include "fe.h"
46 #include "sinfo.h"
47 #include "einfo.h"
48 #include "ada-tree.h"
49 #include "gigi.h"
50
51 static tree find_common_type (tree, tree);
52 static bool contains_save_expr_p (tree);
53 static tree contains_null_expr (tree);
54 static tree compare_arrays (tree, tree, tree);
55 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
56 static tree build_simple_component_ref (tree, tree, tree, bool);
57 \f
58 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
59 operation.
60
61 This preparation consists of taking the ordinary representation of
62 an expression expr and producing a valid tree boolean expression
63 describing whether expr is nonzero. We could simply always do
64
65 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
66
67 but we optimize comparisons, &&, ||, and !.
68
69 The resulting type should always be the same as the input type.
70 This function is simpler than the corresponding C version since
71 the only possible operands will be things of Boolean type. */
72
73 tree
74 gnat_truthvalue_conversion (tree expr)
75 {
76 tree type = TREE_TYPE (expr);
77
78 switch (TREE_CODE (expr))
79 {
80 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
81 case LT_EXPR: case GT_EXPR:
82 case TRUTH_ANDIF_EXPR:
83 case TRUTH_ORIF_EXPR:
84 case TRUTH_AND_EXPR:
85 case TRUTH_OR_EXPR:
86 case TRUTH_XOR_EXPR:
87 case ERROR_MARK:
88 return expr;
89
90 case INTEGER_CST:
91 return (integer_zerop (expr)
92 ? build_int_cst (type, 0)
93 : build_int_cst (type, 1));
94
95 case REAL_CST:
96 return (real_zerop (expr)
97 ? fold_convert (type, integer_zero_node)
98 : fold_convert (type, integer_one_node));
99
100 case COND_EXPR:
101 /* Distribute the conversion into the arms of a COND_EXPR. */
102 {
103 tree arg1 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 1));
104 tree arg2 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 2));
105 return fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
106 arg1, arg2);
107 }
108
109 default:
110 return build_binary_op (NE_EXPR, type, expr,
111 fold_convert (type, integer_zero_node));
112 }
113 }
114 \f
115 /* Return the base type of TYPE. */
116
117 tree
118 get_base_type (tree type)
119 {
120 if (TREE_CODE (type) == RECORD_TYPE
121 && TYPE_JUSTIFIED_MODULAR_P (type))
122 type = TREE_TYPE (TYPE_FIELDS (type));
123
124 while (TREE_TYPE (type)
125 && (TREE_CODE (type) == INTEGER_TYPE
126 || TREE_CODE (type) == REAL_TYPE))
127 type = TREE_TYPE (type);
128
129 return type;
130 }
131 \f
132 /* EXP is a GCC tree representing an address. See if we can find how
133 strictly the object at that address is aligned. Return that alignment
134 in bits. If we don't know anything about the alignment, return 0. */
135
136 unsigned int
137 known_alignment (tree exp)
138 {
139 unsigned int this_alignment;
140 unsigned int lhs, rhs;
141
142 switch (TREE_CODE (exp))
143 {
144 CASE_CONVERT:
145 case VIEW_CONVERT_EXPR:
146 case NON_LVALUE_EXPR:
147 /* Conversions between pointers and integers don't change the alignment
148 of the underlying object. */
149 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
150 break;
151
152 case COMPOUND_EXPR:
153 /* The value of a COMPOUND_EXPR is that of it's second operand. */
154 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
155 break;
156
157 case PLUS_EXPR:
158 case MINUS_EXPR:
159 /* If two address are added, the alignment of the result is the
160 minimum of the two alignments. */
161 lhs = known_alignment (TREE_OPERAND (exp, 0));
162 rhs = known_alignment (TREE_OPERAND (exp, 1));
163 this_alignment = MIN (lhs, rhs);
164 break;
165
166 case POINTER_PLUS_EXPR:
167 lhs = known_alignment (TREE_OPERAND (exp, 0));
168 rhs = known_alignment (TREE_OPERAND (exp, 1));
169 /* If we don't know the alignment of the offset, we assume that
170 of the base. */
171 if (rhs == 0)
172 this_alignment = lhs;
173 else
174 this_alignment = MIN (lhs, rhs);
175 break;
176
177 case COND_EXPR:
178 /* If there is a choice between two values, use the smallest one. */
179 lhs = known_alignment (TREE_OPERAND (exp, 1));
180 rhs = known_alignment (TREE_OPERAND (exp, 2));
181 this_alignment = MIN (lhs, rhs);
182 break;
183
184 case INTEGER_CST:
185 {
186 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
187 /* The first part of this represents the lowest bit in the constant,
188 but it is originally in bytes, not bits. */
189 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
190 }
191 break;
192
193 case MULT_EXPR:
194 /* If we know the alignment of just one side, use it. Otherwise,
195 use the product of the alignments. */
196 lhs = known_alignment (TREE_OPERAND (exp, 0));
197 rhs = known_alignment (TREE_OPERAND (exp, 1));
198
199 if (lhs == 0)
200 this_alignment = rhs;
201 else if (rhs == 0)
202 this_alignment = lhs;
203 else
204 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
205 break;
206
207 case BIT_AND_EXPR:
208 /* A bit-and expression is as aligned as the maximum alignment of the
209 operands. We typically get here for a complex lhs and a constant
210 negative power of two on the rhs to force an explicit alignment, so
211 don't bother looking at the lhs. */
212 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
213 break;
214
215 case ADDR_EXPR:
216 this_alignment = expr_align (TREE_OPERAND (exp, 0));
217 break;
218
219 case CALL_EXPR:
220 {
221 tree t = maybe_inline_call_in_expr (exp);
222 if (t)
223 return known_alignment (t);
224 }
225
226 /* Fall through... */
227
228 default:
229 /* For other pointer expressions, we assume that the pointed-to object
230 is at least as aligned as the pointed-to type. Beware that we can
231 have a dummy type here (e.g. a Taft Amendment type), for which the
232 alignment is meaningless and should be ignored. */
233 if (POINTER_TYPE_P (TREE_TYPE (exp))
234 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
235 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
236 else
237 this_alignment = 0;
238 break;
239 }
240
241 return this_alignment;
242 }
243 \f
244 /* We have a comparison or assignment operation on two types, T1 and T2, which
245 are either both array types or both record types. T1 is assumed to be for
246 the left hand side operand, and T2 for the right hand side. Return the
247 type that both operands should be converted to for the operation, if any.
248 Otherwise return zero. */
249
250 static tree
251 find_common_type (tree t1, tree t2)
252 {
253 /* ??? As of today, various constructs lead here with types of different
254 sizes even when both constants (e.g. tagged types, packable vs regular
255 component types, padded vs unpadded types, ...). While some of these
256 would better be handled upstream (types should be made consistent before
257 calling into build_binary_op), some others are really expected and we
258 have to be careful. */
259
260 /* We must prevent writing more than what the target may hold if this is for
261 an assignment and the case of tagged types is handled in build_binary_op
262 so use the lhs type if it is known to be smaller, or of constant size and
263 the rhs type is not, whatever the modes. We also force t1 in case of
264 constant size equality to minimize occurrences of view conversions on the
265 lhs of assignments. */
266 if (TREE_CONSTANT (TYPE_SIZE (t1))
267 && (!TREE_CONSTANT (TYPE_SIZE (t2))
268 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
269 return t1;
270
271 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
272 that we will not have any alignment problems since, if we did, the
273 non-BLKmode type could not have been used. */
274 if (TYPE_MODE (t1) != BLKmode)
275 return t1;
276
277 /* If the rhs type is of constant size, use it whatever the modes. At
278 this point it is known to be smaller, or of constant size and the
279 lhs type is not. */
280 if (TREE_CONSTANT (TYPE_SIZE (t2)))
281 return t2;
282
283 /* Otherwise, if the rhs type is non-BLKmode, use it. */
284 if (TYPE_MODE (t2) != BLKmode)
285 return t2;
286
287 /* In this case, both types have variable size and BLKmode. It's
288 probably best to leave the "type mismatch" because changing it
289 could cause a bad self-referential reference. */
290 return NULL_TREE;
291 }
292 \f
293 /* See if EXP contains a SAVE_EXPR in a position where we would
294 normally put it.
295
296 ??? This is a real kludge, but is probably the best approach short
297 of some very general solution. */
298
299 static bool
300 contains_save_expr_p (tree exp)
301 {
302 switch (TREE_CODE (exp))
303 {
304 case SAVE_EXPR:
305 return true;
306
307 case ADDR_EXPR: case INDIRECT_REF:
308 case COMPONENT_REF:
309 CASE_CONVERT: case VIEW_CONVERT_EXPR:
310 return contains_save_expr_p (TREE_OPERAND (exp, 0));
311
312 case CONSTRUCTOR:
313 {
314 tree value;
315 unsigned HOST_WIDE_INT ix;
316
317 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
318 if (contains_save_expr_p (value))
319 return true;
320 return false;
321 }
322
323 default:
324 return false;
325 }
326 }
327 \f
328 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
329 it if so. This is used to detect types whose sizes involve computations
330 that are known to raise Constraint_Error. */
331
332 static tree
333 contains_null_expr (tree exp)
334 {
335 tree tem;
336
337 if (TREE_CODE (exp) == NULL_EXPR)
338 return exp;
339
340 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
341 {
342 case tcc_unary:
343 return contains_null_expr (TREE_OPERAND (exp, 0));
344
345 case tcc_comparison:
346 case tcc_binary:
347 tem = contains_null_expr (TREE_OPERAND (exp, 0));
348 if (tem)
349 return tem;
350
351 return contains_null_expr (TREE_OPERAND (exp, 1));
352
353 case tcc_expression:
354 switch (TREE_CODE (exp))
355 {
356 case SAVE_EXPR:
357 return contains_null_expr (TREE_OPERAND (exp, 0));
358
359 case COND_EXPR:
360 tem = contains_null_expr (TREE_OPERAND (exp, 0));
361 if (tem)
362 return tem;
363
364 tem = contains_null_expr (TREE_OPERAND (exp, 1));
365 if (tem)
366 return tem;
367
368 return contains_null_expr (TREE_OPERAND (exp, 2));
369
370 default:
371 return 0;
372 }
373
374 default:
375 return 0;
376 }
377 }
378 \f
379 /* Return an expression tree representing an equality comparison of
380 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
381 be of type RESULT_TYPE
382
383 Two arrays are equal in one of two ways: (1) if both have zero length
384 in some dimension (not necessarily the same dimension) or (2) if the
385 lengths in each dimension are equal and the data is equal. We perform the
386 length tests in as efficient a manner as possible. */
387
388 static tree
389 compare_arrays (tree result_type, tree a1, tree a2)
390 {
391 tree t1 = TREE_TYPE (a1);
392 tree t2 = TREE_TYPE (a2);
393 tree result = convert (result_type, integer_one_node);
394 tree a1_is_null = convert (result_type, integer_zero_node);
395 tree a2_is_null = convert (result_type, integer_zero_node);
396 bool length_zero_p = false;
397
398 /* Process each dimension separately and compare the lengths. If any
399 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
400 suppress the comparison of the data. */
401 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
402 {
403 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
404 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
405 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
406 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
407 tree bt = get_base_type (TREE_TYPE (lb1));
408 tree length1 = fold_build2 (MINUS_EXPR, bt, ub1, lb1);
409 tree length2 = fold_build2 (MINUS_EXPR, bt, ub2, lb2);
410 tree nbt;
411 tree tem;
412 tree comparison, this_a1_is_null, this_a2_is_null;
413
414 /* If the length of the first array is a constant, swap our operands
415 unless the length of the second array is the constant zero.
416 Note that we have set the `length' values to the length - 1. */
417 if (TREE_CODE (length1) == INTEGER_CST
418 && !integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
419 convert (bt, integer_one_node))))
420 {
421 tem = a1, a1 = a2, a2 = tem;
422 tem = t1, t1 = t2, t2 = tem;
423 tem = lb1, lb1 = lb2, lb2 = tem;
424 tem = ub1, ub1 = ub2, ub2 = tem;
425 tem = length1, length1 = length2, length2 = tem;
426 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
427 }
428
429 /* If the length of this dimension in the second array is the constant
430 zero, we can just go inside the original bounds for the first
431 array and see if last < first. */
432 if (integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
433 convert (bt, integer_one_node))))
434 {
435 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
436 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
437
438 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
439 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
440 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
441
442 length_zero_p = true;
443 this_a1_is_null = comparison;
444 this_a2_is_null = convert (result_type, integer_one_node);
445 }
446
447 /* If the length is some other constant value, we know that the
448 this dimension in the first array cannot be superflat, so we
449 can just use its length from the actual stored bounds. */
450 else if (TREE_CODE (length2) == INTEGER_CST)
451 {
452 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
453 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
454 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
455 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
456 nbt = get_base_type (TREE_TYPE (ub1));
457
458 comparison
459 = build_binary_op (EQ_EXPR, result_type,
460 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
461 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
462
463 /* Note that we know that UB2 and LB2 are constant and hence
464 cannot contain a PLACEHOLDER_EXPR. */
465
466 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
467 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
468
469 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
470 this_a2_is_null = convert (result_type, integer_zero_node);
471 }
472
473 /* Otherwise compare the computed lengths. */
474 else
475 {
476 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
477 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
478
479 comparison
480 = build_binary_op (EQ_EXPR, result_type, length1, length2);
481
482 this_a1_is_null
483 = build_binary_op (LT_EXPR, result_type, length1,
484 convert (bt, integer_zero_node));
485 this_a2_is_null
486 = build_binary_op (LT_EXPR, result_type, length2,
487 convert (bt, integer_zero_node));
488 }
489
490 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
491 result, comparison);
492
493 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
494 this_a1_is_null, a1_is_null);
495 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
496 this_a2_is_null, a2_is_null);
497
498 t1 = TREE_TYPE (t1);
499 t2 = TREE_TYPE (t2);
500 }
501
502 /* Unless the size of some bound is known to be zero, compare the
503 data in the array. */
504 if (!length_zero_p)
505 {
506 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
507
508 if (type)
509 a1 = convert (type, a1), a2 = convert (type, a2);
510
511 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
512 fold_build2 (EQ_EXPR, result_type, a1, a2));
513
514 }
515
516 /* The result is also true if both sizes are zero. */
517 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
518 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
519 a1_is_null, a2_is_null),
520 result);
521
522 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
523 starting the comparison above since the place it would be otherwise
524 evaluated would be wrong. */
525
526 if (contains_save_expr_p (a1))
527 result = build2 (COMPOUND_EXPR, result_type, a1, result);
528
529 if (contains_save_expr_p (a2))
530 result = build2 (COMPOUND_EXPR, result_type, a2, result);
531
532 return result;
533 }
534 \f
535 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
536 type TYPE. We know that TYPE is a modular type with a nonbinary
537 modulus. */
538
539 static tree
540 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
541 tree rhs)
542 {
543 tree modulus = TYPE_MODULUS (type);
544 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
545 unsigned int precision;
546 bool unsignedp = true;
547 tree op_type = type;
548 tree result;
549
550 /* If this is an addition of a constant, convert it to a subtraction
551 of a constant since we can do that faster. */
552 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
553 {
554 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
555 op_code = MINUS_EXPR;
556 }
557
558 /* For the logical operations, we only need PRECISION bits. For
559 addition and subtraction, we need one more and for multiplication we
560 need twice as many. But we never want to make a size smaller than
561 our size. */
562 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
563 needed_precision += 1;
564 else if (op_code == MULT_EXPR)
565 needed_precision *= 2;
566
567 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
568
569 /* Unsigned will do for everything but subtraction. */
570 if (op_code == MINUS_EXPR)
571 unsignedp = false;
572
573 /* If our type is the wrong signedness or isn't wide enough, make a new
574 type and convert both our operands to it. */
575 if (TYPE_PRECISION (op_type) < precision
576 || TYPE_UNSIGNED (op_type) != unsignedp)
577 {
578 /* Copy the node so we ensure it can be modified to make it modular. */
579 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
580 modulus = convert (op_type, modulus);
581 SET_TYPE_MODULUS (op_type, modulus);
582 TYPE_MODULAR_P (op_type) = 1;
583 lhs = convert (op_type, lhs);
584 rhs = convert (op_type, rhs);
585 }
586
587 /* Do the operation, then we'll fix it up. */
588 result = fold_build2 (op_code, op_type, lhs, rhs);
589
590 /* For multiplication, we have no choice but to do a full modulus
591 operation. However, we want to do this in the narrowest
592 possible size. */
593 if (op_code == MULT_EXPR)
594 {
595 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
596 modulus = convert (div_type, modulus);
597 SET_TYPE_MODULUS (div_type, modulus);
598 TYPE_MODULAR_P (div_type) = 1;
599 result = convert (op_type,
600 fold_build2 (TRUNC_MOD_EXPR, div_type,
601 convert (div_type, result), modulus));
602 }
603
604 /* For subtraction, add the modulus back if we are negative. */
605 else if (op_code == MINUS_EXPR)
606 {
607 result = save_expr (result);
608 result = fold_build3 (COND_EXPR, op_type,
609 fold_build2 (LT_EXPR, integer_type_node, result,
610 convert (op_type, integer_zero_node)),
611 fold_build2 (PLUS_EXPR, op_type, result, modulus),
612 result);
613 }
614
615 /* For the other operations, subtract the modulus if we are >= it. */
616 else
617 {
618 result = save_expr (result);
619 result = fold_build3 (COND_EXPR, op_type,
620 fold_build2 (GE_EXPR, integer_type_node,
621 result, modulus),
622 fold_build2 (MINUS_EXPR, op_type,
623 result, modulus),
624 result);
625 }
626
627 return convert (type, result);
628 }
629 \f
630 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
631 desired for the result. Usually the operation is to be performed
632 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
633 in which case the type to be used will be derived from the operands.
634
635 This function is very much unlike the ones for C and C++ since we
636 have already done any type conversion and matching required. All we
637 have to do here is validate the work done by SEM and handle subtypes. */
638
639 tree
640 build_binary_op (enum tree_code op_code, tree result_type,
641 tree left_operand, tree right_operand)
642 {
643 tree left_type = TREE_TYPE (left_operand);
644 tree right_type = TREE_TYPE (right_operand);
645 tree left_base_type = get_base_type (left_type);
646 tree right_base_type = get_base_type (right_type);
647 tree operation_type = result_type;
648 tree best_type = NULL_TREE;
649 tree modulus, result;
650 bool has_side_effects = false;
651
652 if (operation_type
653 && TREE_CODE (operation_type) == RECORD_TYPE
654 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
655 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
656
657 if (operation_type
658 && !AGGREGATE_TYPE_P (operation_type)
659 && TYPE_EXTRA_SUBTYPE_P (operation_type))
660 operation_type = get_base_type (operation_type);
661
662 modulus = (operation_type
663 && TREE_CODE (operation_type) == INTEGER_TYPE
664 && TYPE_MODULAR_P (operation_type)
665 ? TYPE_MODULUS (operation_type) : NULL_TREE);
666
667 switch (op_code)
668 {
669 case MODIFY_EXPR:
670 /* If there were integral or pointer conversions on the LHS, remove
671 them; we'll be putting them back below if needed. Likewise for
672 conversions between array and record types, except for justified
673 modular types. But don't do this if the right operand is not
674 BLKmode (for packed arrays) unless we are not changing the mode. */
675 while ((CONVERT_EXPR_P (left_operand)
676 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
677 && (((INTEGRAL_TYPE_P (left_type)
678 || POINTER_TYPE_P (left_type))
679 && (INTEGRAL_TYPE_P (TREE_TYPE
680 (TREE_OPERAND (left_operand, 0)))
681 || POINTER_TYPE_P (TREE_TYPE
682 (TREE_OPERAND (left_operand, 0)))))
683 || (((TREE_CODE (left_type) == RECORD_TYPE
684 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
685 || TREE_CODE (left_type) == ARRAY_TYPE)
686 && ((TREE_CODE (TREE_TYPE
687 (TREE_OPERAND (left_operand, 0)))
688 == RECORD_TYPE)
689 || (TREE_CODE (TREE_TYPE
690 (TREE_OPERAND (left_operand, 0)))
691 == ARRAY_TYPE))
692 && (TYPE_MODE (right_type) == BLKmode
693 || (TYPE_MODE (left_type)
694 == TYPE_MODE (TREE_TYPE
695 (TREE_OPERAND
696 (left_operand, 0))))))))
697 {
698 left_operand = TREE_OPERAND (left_operand, 0);
699 left_type = TREE_TYPE (left_operand);
700 }
701
702 /* If a class-wide type may be involved, force use of the RHS type. */
703 if ((TREE_CODE (right_type) == RECORD_TYPE
704 || TREE_CODE (right_type) == UNION_TYPE)
705 && TYPE_ALIGN_OK (right_type))
706 operation_type = right_type;
707
708 /* If we are copying between padded objects with compatible types, use
709 the padded view of the objects, this is very likely more efficient.
710 Likewise for a padded object that is assigned a constructor, if we
711 can convert the constructor to the inner type, to avoid putting a
712 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
713 actually copied anything. */
714 else if (TREE_CODE (left_type) == RECORD_TYPE
715 && TYPE_IS_PADDING_P (left_type)
716 && TREE_CONSTANT (TYPE_SIZE (left_type))
717 && ((TREE_CODE (right_operand) == COMPONENT_REF
718 && TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
719 == RECORD_TYPE
720 && TYPE_IS_PADDING_P
721 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
722 && gnat_types_compatible_p
723 (left_type,
724 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
725 || (TREE_CODE (right_operand) == CONSTRUCTOR
726 && !CONTAINS_PLACEHOLDER_P
727 (DECL_SIZE (TYPE_FIELDS (left_type)))))
728 && !integer_zerop (TYPE_SIZE (right_type)))
729 operation_type = left_type;
730
731 /* Find the best type to use for copying between aggregate types. */
732 else if (((TREE_CODE (left_type) == ARRAY_TYPE
733 && TREE_CODE (right_type) == ARRAY_TYPE)
734 || (TREE_CODE (left_type) == RECORD_TYPE
735 && TREE_CODE (right_type) == RECORD_TYPE))
736 && (best_type = find_common_type (left_type, right_type)))
737 operation_type = best_type;
738
739 /* Otherwise use the LHS type. */
740 else if (!operation_type)
741 operation_type = left_type;
742
743 /* Ensure everything on the LHS is valid. If we have a field reference,
744 strip anything that get_inner_reference can handle. Then remove any
745 conversions between types having the same code and mode. And mark
746 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
747 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
748 result = left_operand;
749 while (true)
750 {
751 tree restype = TREE_TYPE (result);
752
753 if (TREE_CODE (result) == COMPONENT_REF
754 || TREE_CODE (result) == ARRAY_REF
755 || TREE_CODE (result) == ARRAY_RANGE_REF)
756 while (handled_component_p (result))
757 result = TREE_OPERAND (result, 0);
758 else if (TREE_CODE (result) == REALPART_EXPR
759 || TREE_CODE (result) == IMAGPART_EXPR
760 || (CONVERT_EXPR_P (result)
761 && (((TREE_CODE (restype)
762 == TREE_CODE (TREE_TYPE
763 (TREE_OPERAND (result, 0))))
764 && (TYPE_MODE (TREE_TYPE
765 (TREE_OPERAND (result, 0)))
766 == TYPE_MODE (restype)))
767 || TYPE_ALIGN_OK (restype))))
768 result = TREE_OPERAND (result, 0);
769 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
770 {
771 TREE_ADDRESSABLE (result) = 1;
772 result = TREE_OPERAND (result, 0);
773 }
774 else
775 break;
776 }
777
778 gcc_assert (TREE_CODE (result) == INDIRECT_REF
779 || TREE_CODE (result) == NULL_EXPR
780 || DECL_P (result));
781
782 /* Convert the right operand to the operation type unless it is
783 either already of the correct type or if the type involves a
784 placeholder, since the RHS may not have the same record type. */
785 if (operation_type != right_type
786 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
787 {
788 right_operand = convert (operation_type, right_operand);
789 right_type = operation_type;
790 }
791
792 /* If the left operand is not of the same type as the operation
793 type, wrap it up in a VIEW_CONVERT_EXPR. */
794 if (left_type != operation_type)
795 left_operand = unchecked_convert (operation_type, left_operand, false);
796
797 has_side_effects = true;
798 modulus = NULL_TREE;
799 break;
800
801 case ARRAY_REF:
802 if (!operation_type)
803 operation_type = TREE_TYPE (left_type);
804
805 /* ... fall through ... */
806
807 case ARRAY_RANGE_REF:
808 /* First look through conversion between type variants. Note that
809 this changes neither the operation type nor the type domain. */
810 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
811 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
812 == TYPE_MAIN_VARIANT (left_type))
813 {
814 left_operand = TREE_OPERAND (left_operand, 0);
815 left_type = TREE_TYPE (left_operand);
816 }
817
818 /* Then convert the right operand to its base type. This will prevent
819 unneeded sign conversions when sizetype is wider than integer. */
820 right_operand = convert (right_base_type, right_operand);
821 right_operand = convert (sizetype, right_operand);
822
823 if (!TREE_CONSTANT (right_operand)
824 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
825 gnat_mark_addressable (left_operand);
826
827 modulus = NULL_TREE;
828 break;
829
830 case GE_EXPR:
831 case LE_EXPR:
832 case GT_EXPR:
833 case LT_EXPR:
834 gcc_assert (!POINTER_TYPE_P (left_type));
835
836 /* ... fall through ... */
837
838 case EQ_EXPR:
839 case NE_EXPR:
840 /* If either operand is a NULL_EXPR, just return a new one. */
841 if (TREE_CODE (left_operand) == NULL_EXPR)
842 return build2 (op_code, result_type,
843 build1 (NULL_EXPR, integer_type_node,
844 TREE_OPERAND (left_operand, 0)),
845 integer_zero_node);
846
847 else if (TREE_CODE (right_operand) == NULL_EXPR)
848 return build2 (op_code, result_type,
849 build1 (NULL_EXPR, integer_type_node,
850 TREE_OPERAND (right_operand, 0)),
851 integer_zero_node);
852
853 /* If either object is a justified modular types, get the
854 fields from within. */
855 if (TREE_CODE (left_type) == RECORD_TYPE
856 && TYPE_JUSTIFIED_MODULAR_P (left_type))
857 {
858 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
859 left_operand);
860 left_type = TREE_TYPE (left_operand);
861 left_base_type = get_base_type (left_type);
862 }
863
864 if (TREE_CODE (right_type) == RECORD_TYPE
865 && TYPE_JUSTIFIED_MODULAR_P (right_type))
866 {
867 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
868 right_operand);
869 right_type = TREE_TYPE (right_operand);
870 right_base_type = get_base_type (right_type);
871 }
872
873 /* If both objects are arrays, compare them specially. */
874 if ((TREE_CODE (left_type) == ARRAY_TYPE
875 || (TREE_CODE (left_type) == INTEGER_TYPE
876 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
877 && (TREE_CODE (right_type) == ARRAY_TYPE
878 || (TREE_CODE (right_type) == INTEGER_TYPE
879 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
880 {
881 result = compare_arrays (result_type, left_operand, right_operand);
882
883 if (op_code == NE_EXPR)
884 result = invert_truthvalue (result);
885 else
886 gcc_assert (op_code == EQ_EXPR);
887
888 return result;
889 }
890
891 /* Otherwise, the base types must be the same unless the objects are
892 fat pointers or records. If we have records, use the best type and
893 convert both operands to that type. */
894 if (left_base_type != right_base_type)
895 {
896 if (TYPE_FAT_POINTER_P (left_base_type)
897 && TYPE_FAT_POINTER_P (right_base_type)
898 && TYPE_MAIN_VARIANT (left_base_type)
899 == TYPE_MAIN_VARIANT (right_base_type))
900 best_type = left_base_type;
901 else if (TREE_CODE (left_base_type) == RECORD_TYPE
902 && TREE_CODE (right_base_type) == RECORD_TYPE)
903 {
904 /* The only way these are permitted to be the same is if both
905 types have the same name. In that case, one of them must
906 not be self-referential. Use that one as the best type.
907 Even better is if one is of fixed size. */
908 gcc_assert (TYPE_NAME (left_base_type)
909 && (TYPE_NAME (left_base_type)
910 == TYPE_NAME (right_base_type)));
911
912 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
913 best_type = left_base_type;
914 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
915 best_type = right_base_type;
916 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
917 best_type = left_base_type;
918 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
919 best_type = right_base_type;
920 else
921 gcc_unreachable ();
922 }
923 else
924 gcc_unreachable ();
925
926 left_operand = convert (best_type, left_operand);
927 right_operand = convert (best_type, right_operand);
928 }
929
930 /* If we are comparing a fat pointer against zero, we need to
931 just compare the data pointer. */
932 else if (TYPE_FAT_POINTER_P (left_base_type)
933 && TREE_CODE (right_operand) == CONSTRUCTOR
934 && integer_zerop (VEC_index (constructor_elt,
935 CONSTRUCTOR_ELTS (right_operand),
936 0)
937 ->value))
938 {
939 right_operand = build_component_ref (left_operand, NULL_TREE,
940 TYPE_FIELDS (left_base_type),
941 false);
942 left_operand = convert (TREE_TYPE (right_operand),
943 integer_zero_node);
944 }
945 else
946 {
947 left_operand = convert (left_base_type, left_operand);
948 right_operand = convert (right_base_type, right_operand);
949 }
950
951 modulus = NULL_TREE;
952 break;
953
954 case PREINCREMENT_EXPR:
955 case PREDECREMENT_EXPR:
956 case POSTINCREMENT_EXPR:
957 case POSTDECREMENT_EXPR:
958 /* These operations are not used anymore. */
959 gcc_unreachable ();
960
961 case LSHIFT_EXPR:
962 case RSHIFT_EXPR:
963 case LROTATE_EXPR:
964 case RROTATE_EXPR:
965 /* The RHS of a shift can be any type. Also, ignore any modulus
966 (we used to abort, but this is needed for unchecked conversion
967 to modular types). Otherwise, processing is the same as normal. */
968 gcc_assert (operation_type == left_base_type);
969 modulus = NULL_TREE;
970 left_operand = convert (operation_type, left_operand);
971 break;
972
973 case TRUTH_ANDIF_EXPR:
974 case TRUTH_ORIF_EXPR:
975 case TRUTH_AND_EXPR:
976 case TRUTH_OR_EXPR:
977 case TRUTH_XOR_EXPR:
978 left_operand = gnat_truthvalue_conversion (left_operand);
979 right_operand = gnat_truthvalue_conversion (right_operand);
980 goto common;
981
982 case BIT_AND_EXPR:
983 case BIT_IOR_EXPR:
984 case BIT_XOR_EXPR:
985 /* For binary modulus, if the inputs are in range, so are the
986 outputs. */
987 if (modulus && integer_pow2p (modulus))
988 modulus = NULL_TREE;
989 goto common;
990
991 case COMPLEX_EXPR:
992 gcc_assert (TREE_TYPE (result_type) == left_base_type
993 && TREE_TYPE (result_type) == right_base_type);
994 left_operand = convert (left_base_type, left_operand);
995 right_operand = convert (right_base_type, right_operand);
996 break;
997
998 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
999 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1000 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1001 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1002 /* These always produce results lower than either operand. */
1003 modulus = NULL_TREE;
1004 goto common;
1005
1006 case POINTER_PLUS_EXPR:
1007 gcc_assert (operation_type == left_base_type
1008 && sizetype == right_base_type);
1009 left_operand = convert (operation_type, left_operand);
1010 right_operand = convert (sizetype, right_operand);
1011 break;
1012
1013 case PLUS_NOMOD_EXPR:
1014 case MINUS_NOMOD_EXPR:
1015 if (op_code == PLUS_NOMOD_EXPR)
1016 op_code = PLUS_EXPR;
1017 else
1018 op_code = MINUS_EXPR;
1019 modulus = NULL_TREE;
1020
1021 /* ... fall through ... */
1022
1023 case PLUS_EXPR:
1024 case MINUS_EXPR:
1025 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1026 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1027 these types but can generate addition/subtraction for Succ/Pred. */
1028 if (operation_type
1029 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1030 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1031 operation_type = left_base_type = right_base_type
1032 = gnat_type_for_mode (TYPE_MODE (operation_type),
1033 TYPE_UNSIGNED (operation_type));
1034
1035 /* ... fall through ... */
1036
1037 default:
1038 common:
1039 /* The result type should be the same as the base types of the
1040 both operands (and they should be the same). Convert
1041 everything to the result type. */
1042
1043 gcc_assert (operation_type == left_base_type
1044 && left_base_type == right_base_type);
1045 left_operand = convert (operation_type, left_operand);
1046 right_operand = convert (operation_type, right_operand);
1047 }
1048
1049 if (modulus && !integer_pow2p (modulus))
1050 {
1051 result = nonbinary_modular_operation (op_code, operation_type,
1052 left_operand, right_operand);
1053 modulus = NULL_TREE;
1054 }
1055 /* If either operand is a NULL_EXPR, just return a new one. */
1056 else if (TREE_CODE (left_operand) == NULL_EXPR)
1057 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1058 else if (TREE_CODE (right_operand) == NULL_EXPR)
1059 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1060 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1061 result = fold (build4 (op_code, operation_type, left_operand,
1062 right_operand, NULL_TREE, NULL_TREE));
1063 else
1064 result
1065 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1066
1067 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1068 TREE_CONSTANT (result)
1069 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1070 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1071
1072 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1073 && TYPE_VOLATILE (operation_type))
1074 TREE_THIS_VOLATILE (result) = 1;
1075
1076 /* If we are working with modular types, perform the MOD operation
1077 if something above hasn't eliminated the need for it. */
1078 if (modulus)
1079 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1080 convert (operation_type, modulus));
1081
1082 if (result_type && result_type != operation_type)
1083 result = convert (result_type, result);
1084
1085 return result;
1086 }
1087 \f
1088 /* Similar, but for unary operations. */
1089
1090 tree
1091 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1092 {
1093 tree type = TREE_TYPE (operand);
1094 tree base_type = get_base_type (type);
1095 tree operation_type = result_type;
1096 tree result;
1097 bool side_effects = false;
1098
1099 if (operation_type
1100 && TREE_CODE (operation_type) == RECORD_TYPE
1101 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1102 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1103
1104 if (operation_type
1105 && !AGGREGATE_TYPE_P (operation_type)
1106 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1107 operation_type = get_base_type (operation_type);
1108
1109 switch (op_code)
1110 {
1111 case REALPART_EXPR:
1112 case IMAGPART_EXPR:
1113 if (!operation_type)
1114 result_type = operation_type = TREE_TYPE (type);
1115 else
1116 gcc_assert (result_type == TREE_TYPE (type));
1117
1118 result = fold_build1 (op_code, operation_type, operand);
1119 break;
1120
1121 case TRUTH_NOT_EXPR:
1122 gcc_assert (result_type == base_type);
1123 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1124 break;
1125
1126 case ATTR_ADDR_EXPR:
1127 case ADDR_EXPR:
1128 switch (TREE_CODE (operand))
1129 {
1130 case INDIRECT_REF:
1131 case UNCONSTRAINED_ARRAY_REF:
1132 result = TREE_OPERAND (operand, 0);
1133
1134 /* Make sure the type here is a pointer, not a reference.
1135 GCC wants pointer types for function addresses. */
1136 if (!result_type)
1137 result_type = build_pointer_type (type);
1138
1139 /* If the underlying object can alias everything, propagate the
1140 property since we are effectively retrieving the object. */
1141 if (POINTER_TYPE_P (TREE_TYPE (result))
1142 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1143 {
1144 if (TREE_CODE (result_type) == POINTER_TYPE
1145 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1146 result_type
1147 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1148 TYPE_MODE (result_type),
1149 true);
1150 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1151 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1152 result_type
1153 = build_reference_type_for_mode (TREE_TYPE (result_type),
1154 TYPE_MODE (result_type),
1155 true);
1156 }
1157 break;
1158
1159 case NULL_EXPR:
1160 result = operand;
1161 TREE_TYPE (result) = type = build_pointer_type (type);
1162 break;
1163
1164 case ARRAY_REF:
1165 case ARRAY_RANGE_REF:
1166 case COMPONENT_REF:
1167 case BIT_FIELD_REF:
1168 /* If this is for 'Address, find the address of the prefix and
1169 add the offset to the field. Otherwise, do this the normal
1170 way. */
1171 if (op_code == ATTR_ADDR_EXPR)
1172 {
1173 HOST_WIDE_INT bitsize;
1174 HOST_WIDE_INT bitpos;
1175 tree offset, inner;
1176 enum machine_mode mode;
1177 int unsignedp, volatilep;
1178
1179 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1180 &mode, &unsignedp, &volatilep,
1181 false);
1182
1183 /* If INNER is a padding type whose field has a self-referential
1184 size, convert to that inner type. We know the offset is zero
1185 and we need to have that type visible. */
1186 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1187 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1188 && (CONTAINS_PLACEHOLDER_P
1189 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1190 (TREE_TYPE (inner)))))))
1191 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1192 inner);
1193
1194 /* Compute the offset as a byte offset from INNER. */
1195 if (!offset)
1196 offset = size_zero_node;
1197
1198 if (bitpos % BITS_PER_UNIT != 0)
1199 post_error
1200 ("taking address of object not aligned on storage unit?",
1201 error_gnat_node);
1202
1203 offset = size_binop (PLUS_EXPR, offset,
1204 size_int (bitpos / BITS_PER_UNIT));
1205
1206 /* Take the address of INNER, convert the offset to void *, and
1207 add then. It will later be converted to the desired result
1208 type, if any. */
1209 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1210 inner = convert (ptr_void_type_node, inner);
1211 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1212 inner, offset);
1213 result = convert (build_pointer_type (TREE_TYPE (operand)),
1214 result);
1215 break;
1216 }
1217 goto common;
1218
1219 case CONSTRUCTOR:
1220 /* If this is just a constructor for a padded record, we can
1221 just take the address of the single field and convert it to
1222 a pointer to our type. */
1223 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1224 {
1225 result = (VEC_index (constructor_elt,
1226 CONSTRUCTOR_ELTS (operand),
1227 0)
1228 ->value);
1229
1230 result = convert (build_pointer_type (TREE_TYPE (operand)),
1231 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1232 break;
1233 }
1234
1235 goto common;
1236
1237 case NOP_EXPR:
1238 if (AGGREGATE_TYPE_P (type)
1239 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1240 return build_unary_op (ADDR_EXPR, result_type,
1241 TREE_OPERAND (operand, 0));
1242
1243 /* ... fallthru ... */
1244
1245 case VIEW_CONVERT_EXPR:
1246 /* If this just a variant conversion or if the conversion doesn't
1247 change the mode, get the result type from this type and go down.
1248 This is needed for conversions of CONST_DECLs, to eventually get
1249 to the address of their CORRESPONDING_VARs. */
1250 if ((TYPE_MAIN_VARIANT (type)
1251 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1252 || (TYPE_MODE (type) != BLKmode
1253 && (TYPE_MODE (type)
1254 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1255 return build_unary_op (ADDR_EXPR,
1256 (result_type ? result_type
1257 : build_pointer_type (type)),
1258 TREE_OPERAND (operand, 0));
1259 goto common;
1260
1261 case CONST_DECL:
1262 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1263
1264 /* ... fall through ... */
1265
1266 default:
1267 common:
1268
1269 /* If we are taking the address of a padded record whose field is
1270 contains a template, take the address of the template. */
1271 if (TREE_CODE (type) == RECORD_TYPE
1272 && TYPE_IS_PADDING_P (type)
1273 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1274 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1275 {
1276 type = TREE_TYPE (TYPE_FIELDS (type));
1277 operand = convert (type, operand);
1278 }
1279
1280 if (type != error_mark_node)
1281 operation_type = build_pointer_type (type);
1282
1283 gnat_mark_addressable (operand);
1284 result = fold_build1 (ADDR_EXPR, operation_type, operand);
1285 }
1286
1287 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1288 break;
1289
1290 case INDIRECT_REF:
1291 /* If we want to refer to an entire unconstrained array,
1292 make up an expression to do so. This will never survive to
1293 the backend. If TYPE is a thin pointer, first convert the
1294 operand to a fat pointer. */
1295 if (TYPE_THIN_POINTER_P (type)
1296 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1297 {
1298 operand
1299 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1300 operand);
1301 type = TREE_TYPE (operand);
1302 }
1303
1304 if (TYPE_FAT_POINTER_P (type))
1305 {
1306 result = build1 (UNCONSTRAINED_ARRAY_REF,
1307 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1308 TREE_READONLY (result) = TREE_STATIC (result)
1309 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1310 }
1311 else if (TREE_CODE (operand) == ADDR_EXPR)
1312 result = TREE_OPERAND (operand, 0);
1313
1314 else
1315 {
1316 result = fold_build1 (op_code, TREE_TYPE (type), operand);
1317 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1318 }
1319
1320 side_effects
1321 = (!TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1322 break;
1323
1324 case NEGATE_EXPR:
1325 case BIT_NOT_EXPR:
1326 {
1327 tree modulus = ((operation_type
1328 && TREE_CODE (operation_type) == INTEGER_TYPE
1329 && TYPE_MODULAR_P (operation_type))
1330 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1331 int mod_pow2 = modulus && integer_pow2p (modulus);
1332
1333 /* If this is a modular type, there are various possibilities
1334 depending on the operation and whether the modulus is a
1335 power of two or not. */
1336
1337 if (modulus)
1338 {
1339 gcc_assert (operation_type == base_type);
1340 operand = convert (operation_type, operand);
1341
1342 /* The fastest in the negate case for binary modulus is
1343 the straightforward code; the TRUNC_MOD_EXPR below
1344 is an AND operation. */
1345 if (op_code == NEGATE_EXPR && mod_pow2)
1346 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1347 fold_build1 (NEGATE_EXPR, operation_type,
1348 operand),
1349 modulus);
1350
1351 /* For nonbinary negate case, return zero for zero operand,
1352 else return the modulus minus the operand. If the modulus
1353 is a power of two minus one, we can do the subtraction
1354 as an XOR since it is equivalent and faster on most machines. */
1355 else if (op_code == NEGATE_EXPR && !mod_pow2)
1356 {
1357 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1358 modulus,
1359 convert (operation_type,
1360 integer_one_node))))
1361 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1362 operand, modulus);
1363 else
1364 result = fold_build2 (MINUS_EXPR, operation_type,
1365 modulus, operand);
1366
1367 result = fold_build3 (COND_EXPR, operation_type,
1368 fold_build2 (NE_EXPR,
1369 integer_type_node,
1370 operand,
1371 convert
1372 (operation_type,
1373 integer_zero_node)),
1374 result, operand);
1375 }
1376 else
1377 {
1378 /* For the NOT cases, we need a constant equal to
1379 the modulus minus one. For a binary modulus, we
1380 XOR against the constant and subtract the operand from
1381 that constant for nonbinary modulus. */
1382
1383 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1384 convert (operation_type,
1385 integer_one_node));
1386
1387 if (mod_pow2)
1388 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1389 operand, cnst);
1390 else
1391 result = fold_build2 (MINUS_EXPR, operation_type,
1392 cnst, operand);
1393 }
1394
1395 break;
1396 }
1397 }
1398
1399 /* ... fall through ... */
1400
1401 default:
1402 gcc_assert (operation_type == base_type);
1403 result = fold_build1 (op_code, operation_type,
1404 convert (operation_type, operand));
1405 }
1406
1407 if (side_effects)
1408 {
1409 TREE_SIDE_EFFECTS (result) = 1;
1410 if (TREE_CODE (result) == INDIRECT_REF)
1411 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1412 }
1413
1414 if (result_type && TREE_TYPE (result) != result_type)
1415 result = convert (result_type, result);
1416
1417 return result;
1418 }
1419 \f
1420 /* Similar, but for COND_EXPR. */
1421
1422 tree
1423 build_cond_expr (tree result_type, tree condition_operand,
1424 tree true_operand, tree false_operand)
1425 {
1426 bool addr_p = false;
1427 tree result;
1428
1429 /* The front-end verified that result, true and false operands have
1430 same base type. Convert everything to the result type. */
1431 true_operand = convert (result_type, true_operand);
1432 false_operand = convert (result_type, false_operand);
1433
1434 /* If the result type is unconstrained, take the address of the operands
1435 and then dereference our result. */
1436 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1437 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1438 {
1439 result_type = build_pointer_type (result_type);
1440 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1441 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1442 addr_p = true;
1443 }
1444
1445 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1446 true_operand, false_operand);
1447
1448 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1449 in both arms, make sure it gets evaluated by moving it ahead of the
1450 conditional expression. This is necessary because it is evaluated
1451 in only one place at run time and would otherwise be uninitialized
1452 in one of the arms. */
1453 true_operand = skip_simple_arithmetic (true_operand);
1454 false_operand = skip_simple_arithmetic (false_operand);
1455
1456 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1457 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1458
1459 if (addr_p)
1460 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1461
1462 return result;
1463 }
1464
1465 /* Similar, but for RETURN_EXPR. If RESULT_DECL is non-zero, build
1466 a RETURN_EXPR around the assignment of RET_VAL to RESULT_DECL.
1467 If RESULT_DECL is zero, build a bare RETURN_EXPR. */
1468
1469 tree
1470 build_return_expr (tree result_decl, tree ret_val)
1471 {
1472 tree result_expr;
1473
1474 if (result_decl)
1475 {
1476 /* The gimplifier explicitly enforces the following invariant:
1477
1478 RETURN_EXPR
1479 |
1480 MODIFY_EXPR
1481 / \
1482 / \
1483 RESULT_DECL ...
1484
1485 As a consequence, type-homogeneity dictates that we use the type
1486 of the RESULT_DECL as the operation type. */
1487
1488 tree operation_type = TREE_TYPE (result_decl);
1489
1490 /* Convert the right operand to the operation type. Note that
1491 it's the same transformation as in the MODIFY_EXPR case of
1492 build_binary_op with the additional guarantee that the type
1493 cannot involve a placeholder, since otherwise the function
1494 would use the "target pointer" return mechanism. */
1495
1496 if (operation_type != TREE_TYPE (ret_val))
1497 ret_val = convert (operation_type, ret_val);
1498
1499 result_expr
1500 = build2 (MODIFY_EXPR, operation_type, result_decl, ret_val);
1501 }
1502 else
1503 result_expr = NULL_TREE;
1504
1505 return build1 (RETURN_EXPR, void_type_node, result_expr);
1506 }
1507 \f
1508 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1509 the CALL_EXPR. */
1510
1511 tree
1512 build_call_1_expr (tree fundecl, tree arg)
1513 {
1514 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1515 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1516 1, arg);
1517 TREE_SIDE_EFFECTS (call) = 1;
1518 return call;
1519 }
1520
1521 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1522 the CALL_EXPR. */
1523
1524 tree
1525 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1526 {
1527 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1528 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1529 2, arg1, arg2);
1530 TREE_SIDE_EFFECTS (call) = 1;
1531 return call;
1532 }
1533
1534 /* Likewise to call FUNDECL with no arguments. */
1535
1536 tree
1537 build_call_0_expr (tree fundecl)
1538 {
1539 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1540 it possible to propagate DECL_IS_PURE on parameterless functions. */
1541 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1542 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1543 0);
1544 return call;
1545 }
1546 \f
1547 /* Call a function that raises an exception and pass the line number and file
1548 name, if requested. MSG says which exception function to call.
1549
1550 GNAT_NODE is the gnat node conveying the source location for which the
1551 error should be signaled, or Empty in which case the error is signaled on
1552 the current ref_file_name/input_line.
1553
1554 KIND says which kind of exception this is for
1555 (N_Raise_{Constraint,Storage,Program}_Error). */
1556
1557 tree
1558 build_call_raise (int msg, Node_Id gnat_node, char kind)
1559 {
1560 tree fndecl = gnat_raise_decls[msg];
1561 tree label = get_exception_label (kind);
1562 tree filename;
1563 int line_number;
1564 const char *str;
1565 int len;
1566
1567 /* If this is to be done as a goto, handle that case. */
1568 if (label)
1569 {
1570 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1571 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1572
1573 /* If Local_Raise is present, generate
1574 Local_Raise (exception'Identity); */
1575 if (Present (local_raise))
1576 {
1577 tree gnu_local_raise
1578 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1579 tree gnu_exception_entity
1580 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1581 tree gnu_call
1582 = build_call_1_expr (gnu_local_raise,
1583 build_unary_op (ADDR_EXPR, NULL_TREE,
1584 gnu_exception_entity));
1585
1586 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1587 gnu_call, gnu_result);}
1588
1589 return gnu_result;
1590 }
1591
1592 str
1593 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1594 ? ""
1595 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1596 ? IDENTIFIER_POINTER
1597 (get_identifier (Get_Name_String
1598 (Debug_Source_Name
1599 (Get_Source_File_Index (Sloc (gnat_node))))))
1600 : ref_filename;
1601
1602 len = strlen (str);
1603 filename = build_string (len, str);
1604 line_number
1605 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1606 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1607
1608 TREE_TYPE (filename)
1609 = build_array_type (char_type_node, build_index_type (size_int (len)));
1610
1611 return
1612 build_call_2_expr (fndecl,
1613 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1614 filename),
1615 build_int_cst (NULL_TREE, line_number));
1616 }
1617 \f
1618 /* qsort comparer for the bit positions of two constructor elements
1619 for record components. */
1620
1621 static int
1622 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1623 {
1624 const_tree const elmt1 = * (const_tree const *) rt1;
1625 const_tree const elmt2 = * (const_tree const *) rt2;
1626 const_tree const field1 = TREE_PURPOSE (elmt1);
1627 const_tree const field2 = TREE_PURPOSE (elmt2);
1628 const int ret
1629 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1630
1631 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1632 }
1633
1634 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1635
1636 tree
1637 gnat_build_constructor (tree type, tree list)
1638 {
1639 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1640 bool side_effects = false;
1641 tree elmt, result;
1642 int n_elmts;
1643
1644 /* Scan the elements to see if they are all constant or if any has side
1645 effects, to let us set global flags on the resulting constructor. Count
1646 the elements along the way for possible sorting purposes below. */
1647 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1648 {
1649 tree obj = TREE_PURPOSE (elmt);
1650 tree val = TREE_VALUE (elmt);
1651
1652 /* The predicate must be in keeping with output_constructor. */
1653 if (!TREE_CONSTANT (val)
1654 || (TREE_CODE (type) == RECORD_TYPE
1655 && CONSTRUCTOR_BITFIELD_P (obj)
1656 && !initializer_constant_valid_for_bitfield_p (val))
1657 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1658 allconstant = false;
1659
1660 if (TREE_SIDE_EFFECTS (val))
1661 side_effects = true;
1662
1663 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1664 be executing the code we generate here in that case, but handle it
1665 specially to avoid the compiler blowing up. */
1666 if (TREE_CODE (type) == RECORD_TYPE
1667 && (result = contains_null_expr (DECL_SIZE (obj))) != NULL_TREE)
1668 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1669 }
1670
1671 /* For record types with constant components only, sort field list
1672 by increasing bit position. This is necessary to ensure the
1673 constructor can be output as static data. */
1674 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1675 {
1676 /* Fill an array with an element tree per index, and ask qsort to order
1677 them according to what a bitpos comparison function says. */
1678 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1679 int i;
1680
1681 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1682 gnu_arr[i] = elmt;
1683
1684 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1685
1686 /* Then reconstruct the list from the sorted array contents. */
1687 list = NULL_TREE;
1688 for (i = n_elmts - 1; i >= 0; i--)
1689 {
1690 TREE_CHAIN (gnu_arr[i]) = list;
1691 list = gnu_arr[i];
1692 }
1693 }
1694
1695 result = build_constructor_from_list (type, list);
1696 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1697 TREE_SIDE_EFFECTS (result) = side_effects;
1698 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1699 return result;
1700 }
1701 \f
1702 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1703 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1704 for the field. Don't fold the result if NO_FOLD_P is true.
1705
1706 We also handle the fact that we might have been passed a pointer to the
1707 actual record and know how to look for fields in variant parts. */
1708
1709 static tree
1710 build_simple_component_ref (tree record_variable, tree component,
1711 tree field, bool no_fold_p)
1712 {
1713 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1714 tree ref, inner_variable;
1715
1716 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1717 || TREE_CODE (record_type) == UNION_TYPE
1718 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1719 && TYPE_SIZE (record_type)
1720 && (component != 0) != (field != 0));
1721
1722 /* If no field was specified, look for a field with the specified name
1723 in the current record only. */
1724 if (!field)
1725 for (field = TYPE_FIELDS (record_type); field;
1726 field = TREE_CHAIN (field))
1727 if (DECL_NAME (field) == component)
1728 break;
1729
1730 if (!field)
1731 return NULL_TREE;
1732
1733 /* If this field is not in the specified record, see if we can find
1734 something in the record whose original field is the same as this one. */
1735 if (DECL_CONTEXT (field) != record_type)
1736 /* Check if there is a field with name COMPONENT in the record. */
1737 {
1738 tree new_field;
1739
1740 /* First loop thru normal components. */
1741
1742 for (new_field = TYPE_FIELDS (record_type); new_field;
1743 new_field = TREE_CHAIN (new_field))
1744 if (field == new_field
1745 || DECL_ORIGINAL_FIELD (new_field) == field
1746 || new_field == DECL_ORIGINAL_FIELD (field)
1747 || (DECL_ORIGINAL_FIELD (field)
1748 && (DECL_ORIGINAL_FIELD (field)
1749 == DECL_ORIGINAL_FIELD (new_field))))
1750 break;
1751
1752 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1753 the component in the first search. Doing this search in 2 steps
1754 is required to avoiding hidden homonymous fields in the
1755 _Parent field. */
1756
1757 if (!new_field)
1758 for (new_field = TYPE_FIELDS (record_type); new_field;
1759 new_field = TREE_CHAIN (new_field))
1760 if (DECL_INTERNAL_P (new_field))
1761 {
1762 tree field_ref
1763 = build_simple_component_ref (record_variable,
1764 NULL_TREE, new_field, no_fold_p);
1765 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1766 no_fold_p);
1767
1768 if (ref)
1769 return ref;
1770 }
1771
1772 field = new_field;
1773 }
1774
1775 if (!field)
1776 return NULL_TREE;
1777
1778 /* If the field's offset has overflowed, do not attempt to access it
1779 as doing so may trigger sanity checks deeper in the back-end.
1780 Note that we don't need to warn since this will be done on trying
1781 to declare the object. */
1782 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1783 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1784 return NULL_TREE;
1785
1786 /* Look through conversion between type variants. Note that this
1787 is transparent as far as the field is concerned. */
1788 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1789 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1790 == record_type)
1791 inner_variable = TREE_OPERAND (record_variable, 0);
1792 else
1793 inner_variable = record_variable;
1794
1795 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1796 NULL_TREE);
1797
1798 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1799 TREE_READONLY (ref) = 1;
1800 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1801 || TYPE_VOLATILE (record_type))
1802 TREE_THIS_VOLATILE (ref) = 1;
1803
1804 if (no_fold_p)
1805 return ref;
1806
1807 /* The generic folder may punt in this case because the inner array type
1808 can be self-referential, but folding is in fact not problematic. */
1809 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1810 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1811 {
1812 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1813 unsigned HOST_WIDE_INT idx;
1814 tree index, value;
1815 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1816 if (index == field)
1817 return value;
1818 return ref;
1819 }
1820
1821 else
1822 return fold (ref);
1823 }
1824 \f
1825 /* Like build_simple_component_ref, except that we give an error if the
1826 reference could not be found. */
1827
1828 tree
1829 build_component_ref (tree record_variable, tree component,
1830 tree field, bool no_fold_p)
1831 {
1832 tree ref = build_simple_component_ref (record_variable, component, field,
1833 no_fold_p);
1834
1835 if (ref)
1836 return ref;
1837
1838 /* If FIELD was specified, assume this is an invalid user field so raise
1839 Constraint_Error. Otherwise, we have no type to return so abort. */
1840 gcc_assert (field);
1841 return build1 (NULL_EXPR, TREE_TYPE (field),
1842 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1843 N_Raise_Constraint_Error));
1844 }
1845 \f
1846 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1847 identically. Process the case where a GNAT_PROC to call is provided. */
1848
1849 static inline tree
1850 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1851 Entity_Id gnat_proc, Entity_Id gnat_pool)
1852 {
1853 tree gnu_proc = gnat_to_gnu (gnat_proc);
1854 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1855 tree gnu_call;
1856
1857 /* The storage pools are obviously always tagged types, but the
1858 secondary stack uses the same mechanism and is not tagged. */
1859 if (Is_Tagged_Type (Etype (gnat_pool)))
1860 {
1861 /* The size is the third parameter; the alignment is the
1862 same type. */
1863 Entity_Id gnat_size_type
1864 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1865 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1866
1867 tree gnu_pool = gnat_to_gnu (gnat_pool);
1868 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1869 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1870
1871 gnu_size = convert (gnu_size_type, gnu_size);
1872 gnu_align = convert (gnu_size_type, gnu_align);
1873
1874 /* The first arg is always the address of the storage pool; next
1875 comes the address of the object, for a deallocator, then the
1876 size and alignment. */
1877 if (gnu_obj)
1878 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1879 gnu_proc_addr, 4, gnu_pool_addr,
1880 gnu_obj, gnu_size, gnu_align);
1881 else
1882 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1883 gnu_proc_addr, 3, gnu_pool_addr,
1884 gnu_size, gnu_align);
1885 }
1886
1887 /* Secondary stack case. */
1888 else
1889 {
1890 /* The size is the second parameter. */
1891 Entity_Id gnat_size_type
1892 = Etype (Next_Formal (First_Formal (gnat_proc)));
1893 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1894
1895 gnu_size = convert (gnu_size_type, gnu_size);
1896
1897 /* The first arg is the address of the object, for a deallocator,
1898 then the size. */
1899 if (gnu_obj)
1900 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1901 gnu_proc_addr, 2, gnu_obj, gnu_size);
1902 else
1903 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1904 gnu_proc_addr, 1, gnu_size);
1905 }
1906
1907 TREE_SIDE_EFFECTS (gnu_call) = 1;
1908 return gnu_call;
1909 }
1910
1911 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1912 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1913 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1914 latter offers. */
1915
1916 static inline tree
1917 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1918 {
1919 /* When the DATA_TYPE alignment is stricter than what malloc offers
1920 (super-aligned case), we allocate an "aligning" wrapper type and return
1921 the address of its single data field with the malloc's return value
1922 stored just in front. */
1923
1924 unsigned int data_align = TYPE_ALIGN (data_type);
1925 unsigned int default_allocator_alignment
1926 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1927
1928 tree aligning_type
1929 = ((data_align > default_allocator_alignment)
1930 ? make_aligning_type (data_type, data_align, data_size,
1931 default_allocator_alignment,
1932 POINTER_SIZE / BITS_PER_UNIT)
1933 : NULL_TREE);
1934
1935 tree size_to_malloc
1936 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1937
1938 tree malloc_ptr;
1939
1940 /* On VMS, if 64-bit memory is disabled or pointers are 64-bit and the
1941 allocator size is 32-bit or Convention C, allocate 32-bit memory. */
1942 if (TARGET_ABI_OPEN_VMS
1943 && (!TARGET_MALLOC64
1944 || (POINTER_SIZE == 64
1945 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1946 || Convention (Etype (gnat_node)) == Convention_C))))
1947 malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1948 else
1949 malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1950
1951 if (aligning_type)
1952 {
1953 /* Latch malloc's return value and get a pointer to the aligning field
1954 first. */
1955 tree storage_ptr = save_expr (malloc_ptr);
1956
1957 tree aligning_record_addr
1958 = convert (build_pointer_type (aligning_type), storage_ptr);
1959
1960 tree aligning_record
1961 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1962
1963 tree aligning_field
1964 = build_component_ref (aligning_record, NULL_TREE,
1965 TYPE_FIELDS (aligning_type), 0);
1966
1967 tree aligning_field_addr
1968 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1969
1970 /* Then arrange to store the allocator's return value ahead
1971 and return. */
1972 tree storage_ptr_slot_addr
1973 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1974 convert (ptr_void_type_node, aligning_field_addr),
1975 size_int (-POINTER_SIZE/BITS_PER_UNIT));
1976
1977 tree storage_ptr_slot
1978 = build_unary_op (INDIRECT_REF, NULL_TREE,
1979 convert (build_pointer_type (ptr_void_type_node),
1980 storage_ptr_slot_addr));
1981
1982 return
1983 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1984 build_binary_op (MODIFY_EXPR, NULL_TREE,
1985 storage_ptr_slot, storage_ptr),
1986 aligning_field_addr);
1987 }
1988 else
1989 return malloc_ptr;
1990 }
1991
1992 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1993 designated by DATA_PTR using the __gnat_free entry point. */
1994
1995 static inline tree
1996 maybe_wrap_free (tree data_ptr, tree data_type)
1997 {
1998 /* In the regular alignment case, we pass the data pointer straight to free.
1999 In the superaligned case, we need to retrieve the initial allocator
2000 return value, stored in front of the data block at allocation time. */
2001
2002 unsigned int data_align = TYPE_ALIGN (data_type);
2003 unsigned int default_allocator_alignment
2004 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
2005
2006 tree free_ptr;
2007
2008 if (data_align > default_allocator_alignment)
2009 {
2010 /* DATA_FRONT_PTR (void *)
2011 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2012 tree data_front_ptr
2013 = build_binary_op
2014 (POINTER_PLUS_EXPR, ptr_void_type_node,
2015 convert (ptr_void_type_node, data_ptr),
2016 size_int (-POINTER_SIZE/BITS_PER_UNIT));
2017
2018 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2019 free_ptr
2020 = build_unary_op
2021 (INDIRECT_REF, NULL_TREE,
2022 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2023 }
2024 else
2025 free_ptr = data_ptr;
2026
2027 return build_call_1_expr (free_decl, free_ptr);
2028 }
2029
2030 /* Build a GCC tree to call an allocation or deallocation function.
2031 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2032 generate an allocator.
2033
2034 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2035 object type, used to determine the to-be-honored address alignment.
2036 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2037 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2038 to provide an error location for restriction violation messages. */
2039
2040 tree
2041 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2042 Entity_Id gnat_proc, Entity_Id gnat_pool,
2043 Node_Id gnat_node)
2044 {
2045 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2046
2047 /* Explicit proc to call ? This one is assumed to deal with the type
2048 alignment constraints. */
2049 if (Present (gnat_proc))
2050 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2051 gnat_proc, gnat_pool);
2052
2053 /* Otherwise, object to "free" or "malloc" with possible special processing
2054 for alignments stricter than what the default allocator honors. */
2055 else if (gnu_obj)
2056 return maybe_wrap_free (gnu_obj, gnu_type);
2057 else
2058 {
2059 /* Assert that we no longer can be called with this special pool. */
2060 gcc_assert (gnat_pool != -1);
2061
2062 /* Check that we aren't violating the associated restriction. */
2063 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2064 Check_No_Implicit_Heap_Alloc (gnat_node);
2065
2066 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2067 }
2068 }
2069 \f
2070 /* Build a GCC tree to correspond to allocating an object of TYPE whose
2071 initial value is INIT, if INIT is nonzero. Convert the expression to
2072 RESULT_TYPE, which must be some type of pointer. Return the tree.
2073
2074 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2075 the storage pool to use. GNAT_NODE is used to provide an error
2076 location for restriction violation messages. If IGNORE_INIT_TYPE is
2077 true, ignore the type of INIT for the purpose of determining the size;
2078 this will cause the maximum size to be allocated if TYPE is of
2079 self-referential size. */
2080
2081 tree
2082 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2083 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2084 {
2085 tree size = TYPE_SIZE_UNIT (type);
2086 tree result;
2087
2088 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2089 if (init && TREE_CODE (init) == NULL_EXPR)
2090 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2091
2092 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2093 sizes of the object and its template. Allocate the whole thing and
2094 fill in the parts that are known. */
2095 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
2096 {
2097 tree storage_type
2098 = build_unc_object_type_from_ptr (result_type, type,
2099 get_identifier ("ALLOC"));
2100 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2101 tree storage_ptr_type = build_pointer_type (storage_type);
2102 tree storage;
2103 tree template_cons = NULL_TREE;
2104
2105 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2106 init);
2107
2108 /* If the size overflows, pass -1 so the allocator will raise
2109 storage error. */
2110 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2111 size = ssize_int (-1);
2112
2113 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2114 gnat_proc, gnat_pool, gnat_node);
2115 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
2116
2117 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2118 {
2119 type = TREE_TYPE (TYPE_FIELDS (type));
2120
2121 if (init)
2122 init = convert (type, init);
2123 }
2124
2125 /* If there is an initializing expression, make a constructor for
2126 the entire object including the bounds and copy it into the
2127 object. If there is no initializing expression, just set the
2128 bounds. */
2129 if (init)
2130 {
2131 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2132 init, NULL_TREE);
2133 template_cons = tree_cons (TYPE_FIELDS (storage_type),
2134 build_template (template_type, type,
2135 init),
2136 template_cons);
2137
2138 return convert
2139 (result_type,
2140 build2 (COMPOUND_EXPR, storage_ptr_type,
2141 build_binary_op
2142 (MODIFY_EXPR, storage_type,
2143 build_unary_op (INDIRECT_REF, NULL_TREE,
2144 convert (storage_ptr_type, storage)),
2145 gnat_build_constructor (storage_type, template_cons)),
2146 convert (storage_ptr_type, storage)));
2147 }
2148 else
2149 return build2
2150 (COMPOUND_EXPR, result_type,
2151 build_binary_op
2152 (MODIFY_EXPR, template_type,
2153 build_component_ref
2154 (build_unary_op (INDIRECT_REF, NULL_TREE,
2155 convert (storage_ptr_type, storage)),
2156 NULL_TREE, TYPE_FIELDS (storage_type), 0),
2157 build_template (template_type, type, NULL_TREE)),
2158 convert (result_type, convert (storage_ptr_type, storage)));
2159 }
2160
2161 /* If we have an initializing expression, see if its size is simpler
2162 than the size from the type. */
2163 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2164 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2165 || CONTAINS_PLACEHOLDER_P (size)))
2166 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2167
2168 /* If the size is still self-referential, reference the initializing
2169 expression, if it is present. If not, this must have been a
2170 call to allocate a library-level object, in which case we use
2171 the maximum size. */
2172 if (CONTAINS_PLACEHOLDER_P (size))
2173 {
2174 if (!ignore_init_type && init)
2175 size = substitute_placeholder_in_expr (size, init);
2176 else
2177 size = max_size (size, true);
2178 }
2179
2180 /* If the size overflows, pass -1 so the allocator will raise
2181 storage error. */
2182 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2183 size = ssize_int (-1);
2184
2185 result = convert (result_type,
2186 build_call_alloc_dealloc (NULL_TREE, size, type,
2187 gnat_proc, gnat_pool,
2188 gnat_node));
2189
2190 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
2191 the value, and return the address. Do this with a COMPOUND_EXPR. */
2192
2193 if (init)
2194 {
2195 result = save_expr (result);
2196 result
2197 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2198 build_binary_op
2199 (MODIFY_EXPR, NULL_TREE,
2200 build_unary_op (INDIRECT_REF,
2201 TREE_TYPE (TREE_TYPE (result)), result),
2202 init),
2203 result);
2204 }
2205
2206 return convert (result_type, result);
2207 }
2208 \f
2209 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2210 GNAT_FORMAL is how we find the descriptor record. GNAT_ACTUAL is
2211 how we derive the source location to raise C_E on an out of range
2212 pointer. */
2213
2214 tree
2215 fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
2216 {
2217 tree field;
2218 tree parm_decl = get_gnu_tree (gnat_formal);
2219 tree const_list = NULL_TREE;
2220 tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2221 int do_range_check =
2222 strcmp ("MBO",
2223 IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
2224
2225 expr = maybe_unconstrained_array (expr);
2226 gnat_mark_addressable (expr);
2227
2228 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2229 {
2230 tree conexpr = convert (TREE_TYPE (field),
2231 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2232 (DECL_INITIAL (field), expr));
2233
2234 /* Check to ensure that only 32bit pointers are passed in
2235 32bit descriptors */
2236 if (do_range_check &&
2237 strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2238 {
2239 tree pointer64type =
2240 build_pointer_type_for_mode (void_type_node, DImode, false);
2241 tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2242 tree malloc64low =
2243 build_int_cstu (long_integer_type_node, 0x80000000);
2244
2245 add_stmt (build3 (COND_EXPR, void_type_node,
2246 build_binary_op (GE_EXPR, long_integer_type_node,
2247 convert (long_integer_type_node,
2248 addr64expr),
2249 malloc64low),
2250 build_call_raise (CE_Range_Check_Failed, gnat_actual,
2251 N_Raise_Constraint_Error),
2252 NULL_TREE));
2253 }
2254 const_list = tree_cons (field, conexpr, const_list);
2255 }
2256
2257 return gnat_build_constructor (record_type, nreverse (const_list));
2258 }
2259
2260 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2261 should not be allocated in a register. Returns true if successful. */
2262
2263 bool
2264 gnat_mark_addressable (tree expr_node)
2265 {
2266 while (1)
2267 switch (TREE_CODE (expr_node))
2268 {
2269 case ADDR_EXPR:
2270 case COMPONENT_REF:
2271 case ARRAY_REF:
2272 case ARRAY_RANGE_REF:
2273 case REALPART_EXPR:
2274 case IMAGPART_EXPR:
2275 case VIEW_CONVERT_EXPR:
2276 case NON_LVALUE_EXPR:
2277 CASE_CONVERT:
2278 expr_node = TREE_OPERAND (expr_node, 0);
2279 break;
2280
2281 case CONSTRUCTOR:
2282 TREE_ADDRESSABLE (expr_node) = 1;
2283 return true;
2284
2285 case VAR_DECL:
2286 case PARM_DECL:
2287 case RESULT_DECL:
2288 TREE_ADDRESSABLE (expr_node) = 1;
2289 return true;
2290
2291 case FUNCTION_DECL:
2292 TREE_ADDRESSABLE (expr_node) = 1;
2293 return true;
2294
2295 case CONST_DECL:
2296 return (DECL_CONST_CORRESPONDING_VAR (expr_node)
2297 && (gnat_mark_addressable
2298 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
2299 default:
2300 return true;
2301 }
2302 }
This page took 0.141126 seconds and 6 git commands to generate.