1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
42 #include "diagnostic.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
61 #ifndef MAX_BITS_PER_WORD
62 #define MAX_BITS_PER_WORD BITS_PER_WORD
65 /* If nonzero, pretend we are allocating at global level. */
68 /* The default alignment of "double" floating-point types, i.e. floating
69 point types whose size is equal to 64 bits, or 0 if this alignment is
70 not specifically capped. */
71 int double_float_alignment
;
73 /* The default alignment of "double" or larger scalar types, i.e. scalar
74 types whose size is greater or equal to 64 bits, or 0 if this alignment
75 is not specifically capped. */
76 int double_scalar_alignment
;
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls
[(int) ADT_LAST
];
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
84 /* Likewise, but with extra info for each of the possible raise reasons. */
85 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
87 /* Forward declarations for handlers of attributes. */
88 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
96 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
97 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
98 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
99 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
101 /* Fake handler for attributes we don't properly support, typically because
102 they'd require dragging a lot of the common-c front-end circuitry. */
103 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
105 /* Table of machine-independent internal attributes for Ada. We support
106 this minimal set of attributes to accommodate the needs of builtins. */
107 const struct attribute_spec gnat_internal_attribute_table
[] =
109 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
110 affects_type_identity } */
111 { "const", 0, 0, true, false, false, handle_const_attribute
,
113 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
115 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
117 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
119 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
121 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
123 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
125 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
127 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
129 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
132 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
134 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
136 { "may_alias", 0, 0, false, true, false, NULL
, false },
138 /* ??? format and format_arg are heavy and not supported, which actually
139 prevents support for stdio builtins, which we however declare as part
140 of the common builtins.def contents. */
141 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
142 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
144 { NULL
, 0, 0, false, false, false, NULL
, false }
147 /* Associates a GNAT tree node to a GCC tree node. It is used in
148 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
149 of `save_gnu_tree' for more info. */
150 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
152 #define GET_GNU_TREE(GNAT_ENTITY) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
156 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
159 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
162 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
164 #define GET_DUMMY_NODE(GNAT_ENTITY) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
168 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
171 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173 /* This variable keeps a table for types for each precision so that we only
174 allocate each of them once. Signed and unsigned types are kept separate.
176 Note that these types are only used when fold-const requests something
177 special. Perhaps we should NOT share these types; we'll see how it
179 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
181 /* Likewise for float types, but record these by mode. */
182 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
184 /* For each binding contour we allocate a binding_level structure to indicate
185 the binding depth. */
187 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
188 /* The binding level containing this one (the enclosing binding level). */
189 struct gnat_binding_level
*chain
;
190 /* The BLOCK node for this level. */
192 /* If nonzero, the setjmp buffer that needs to be updated for any
193 variable-sized definition within this context. */
197 /* The binding level currently in effect. */
198 static GTY(()) struct gnat_binding_level
*current_binding_level
;
200 /* A chain of gnat_binding_level structures awaiting reuse. */
201 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
203 /* The context to be used for global declarations. */
204 static GTY(()) tree global_context
;
206 /* An array of global declarations. */
207 static GTY(()) VEC(tree
,gc
) *global_decls
;
209 /* An array of builtin function declarations. */
210 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
212 /* An array of global renaming pointers. */
213 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
215 /* A chain of unused BLOCK nodes. */
216 static GTY((deletable
)) tree free_block_chain
;
218 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
219 static tree
compute_related_constant (tree
, tree
);
220 static tree
split_plus (tree
, tree
*);
221 static tree
float_type_for_precision (int, enum machine_mode
);
222 static tree
convert_to_fat_pointer (tree
, tree
);
223 static tree
convert_to_thin_pointer (tree
, tree
);
224 static bool potential_alignment_gap (tree
, tree
, tree
);
225 static void process_attributes (tree
, struct attrib
*);
227 /* Initialize the association of GNAT nodes to GCC trees. */
230 init_gnat_to_gnu (void)
232 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
235 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
236 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
237 If NO_CHECK is true, the latter check is suppressed.
239 If GNU_DECL is zero, reset a previous association. */
242 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
244 /* Check that GNAT_ENTITY is not already defined and that it is being set
245 to something which is a decl. If that is not the case, this usually
246 means GNAT_ENTITY is defined twice, but occasionally is due to some
248 gcc_assert (!(gnu_decl
249 && (PRESENT_GNU_TREE (gnat_entity
)
250 || (!no_check
&& !DECL_P (gnu_decl
)))));
252 SET_GNU_TREE (gnat_entity
, gnu_decl
);
255 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
256 that was associated with it. If there is no such tree node, abort.
258 In some cases, such as delayed elaboration or expressions that need to
259 be elaborated only once, GNAT_ENTITY is really not an entity. */
262 get_gnu_tree (Entity_Id gnat_entity
)
264 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
265 return GET_GNU_TREE (gnat_entity
);
268 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
271 present_gnu_tree (Entity_Id gnat_entity
)
273 return PRESENT_GNU_TREE (gnat_entity
);
276 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
279 init_dummy_type (void)
281 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
284 /* Make a dummy type corresponding to GNAT_TYPE. */
287 make_dummy_type (Entity_Id gnat_type
)
289 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
292 /* If there is an equivalent type, get its underlying type. */
293 if (Present (gnat_underlying
))
294 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
296 /* If there was no equivalent type (can only happen when just annotating
297 types) or underlying type, go back to the original type. */
298 if (No (gnat_underlying
))
299 gnat_underlying
= gnat_type
;
301 /* If it there already a dummy type, use that one. Else make one. */
302 if (PRESENT_DUMMY_NODE (gnat_underlying
))
303 return GET_DUMMY_NODE (gnat_underlying
);
305 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
307 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
308 ? tree_code_for_record_type (gnat_underlying
)
310 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
311 TYPE_DUMMY_P (gnu_type
) = 1;
312 TYPE_STUB_DECL (gnu_type
)
313 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
314 if (Is_By_Reference_Type (gnat_underlying
))
315 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
317 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
322 /* Return the dummy type that was made for GNAT_TYPE, if any. */
325 get_dummy_type (Entity_Id gnat_type
)
327 return GET_DUMMY_NODE (gnat_type
);
330 /* Build dummy fat and thin pointer types whose designated type is specified
331 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
334 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
336 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
337 tree gnu_fat_type
, fields
, gnu_object_type
;
339 gnu_template_type
= make_node (RECORD_TYPE
);
340 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
341 TYPE_DUMMY_P (gnu_template_type
) = 1;
342 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
344 gnu_array_type
= make_node (ENUMERAL_TYPE
);
345 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
346 TYPE_DUMMY_P (gnu_array_type
) = 1;
347 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
349 gnu_fat_type
= make_node (RECORD_TYPE
);
350 /* Build a stub DECL to trigger the special processing for fat pointer types
352 TYPE_NAME (gnu_fat_type
)
353 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
355 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
356 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
358 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
359 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
360 finish_fat_pointer_type (gnu_fat_type
, fields
);
361 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
362 /* Suppress debug info until after the type is completed. */
363 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
365 gnu_object_type
= make_node (RECORD_TYPE
);
366 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
367 TYPE_DUMMY_P (gnu_object_type
) = 1;
369 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
370 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
373 /* Return true if we are in the global binding level. */
376 global_bindings_p (void)
378 return force_global
|| current_function_decl
== NULL_TREE
;
381 /* Enter a new binding level. */
384 gnat_pushlevel (void)
386 struct gnat_binding_level
*newlevel
= NULL
;
388 /* Reuse a struct for this binding level, if there is one. */
389 if (free_binding_level
)
391 newlevel
= free_binding_level
;
392 free_binding_level
= free_binding_level
->chain
;
395 newlevel
= ggc_alloc_gnat_binding_level ();
397 /* Use a free BLOCK, if any; otherwise, allocate one. */
398 if (free_block_chain
)
400 newlevel
->block
= free_block_chain
;
401 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
402 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
405 newlevel
->block
= make_node (BLOCK
);
407 /* Point the BLOCK we just made to its parent. */
408 if (current_binding_level
)
409 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
411 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
412 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
413 TREE_USED (newlevel
->block
) = 1;
415 /* Add this level to the front of the chain (stack) of active levels. */
416 newlevel
->chain
= current_binding_level
;
417 newlevel
->jmpbuf_decl
= NULL_TREE
;
418 current_binding_level
= newlevel
;
421 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
422 and point FNDECL to this BLOCK. */
425 set_current_block_context (tree fndecl
)
427 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
428 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
429 set_block_for_group (current_binding_level
->block
);
432 /* Set the jmpbuf_decl for the current binding level to DECL. */
435 set_block_jmpbuf_decl (tree decl
)
437 current_binding_level
->jmpbuf_decl
= decl
;
440 /* Get the jmpbuf_decl, if any, for the current binding level. */
443 get_block_jmpbuf_decl (void)
445 return current_binding_level
->jmpbuf_decl
;
448 /* Exit a binding level. Set any BLOCK into the current code group. */
453 struct gnat_binding_level
*level
= current_binding_level
;
454 tree block
= level
->block
;
456 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
457 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
459 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
460 are no variables free the block and merge its subblocks into those of its
461 parent block. Otherwise, add it to the list of its parent. */
462 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
464 else if (BLOCK_VARS (block
) == NULL_TREE
)
466 BLOCK_SUBBLOCKS (level
->chain
->block
)
467 = block_chainon (BLOCK_SUBBLOCKS (block
),
468 BLOCK_SUBBLOCKS (level
->chain
->block
));
469 BLOCK_CHAIN (block
) = free_block_chain
;
470 free_block_chain
= block
;
474 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
475 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
476 TREE_USED (block
) = 1;
477 set_block_for_group (block
);
480 /* Free this binding structure. */
481 current_binding_level
= level
->chain
;
482 level
->chain
= free_binding_level
;
483 free_binding_level
= level
;
486 /* Exit a binding level and discard the associated BLOCK. */
491 struct gnat_binding_level
*level
= current_binding_level
;
492 tree block
= level
->block
;
494 BLOCK_CHAIN (block
) = free_block_chain
;
495 free_block_chain
= block
;
497 /* Free this binding structure. */
498 current_binding_level
= level
->chain
;
499 level
->chain
= free_binding_level
;
500 free_binding_level
= level
;
503 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
504 for location information and flag propagation. */
507 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
509 /* If DECL is public external or at top level, it has global context. */
510 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
513 global_context
= build_translation_unit_decl (NULL_TREE
);
514 DECL_CONTEXT (decl
) = global_context
;
518 DECL_CONTEXT (decl
) = current_function_decl
;
520 /* Functions imported in another function are not really nested.
521 For really nested functions mark them initially as needing
522 a static chain for uses of that flag before unnesting;
523 lower_nested_functions will then recompute it. */
524 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
525 DECL_STATIC_CHAIN (decl
) = 1;
528 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
530 /* Set the location of DECL and emit a declaration for it. */
531 if (Present (gnat_node
))
532 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
534 add_decl_expr (decl
, gnat_node
);
536 /* Put the declaration on the list. The list of declarations is in reverse
537 order. The list will be reversed later. Put global declarations in the
538 globals list and local ones in the current block. But skip TYPE_DECLs
539 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
540 with the debugger and aren't needed anyway. */
541 if (!(TREE_CODE (decl
) == TYPE_DECL
542 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
544 if (global_bindings_p ())
546 VEC_safe_push (tree
, gc
, global_decls
, decl
);
548 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
549 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
551 else if (!DECL_EXTERNAL (decl
))
553 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
554 BLOCK_VARS (current_binding_level
->block
) = decl
;
558 /* For the declaration of a type, set its name if it either is not already
559 set or if the previous type name was not derived from a source name.
560 We'd rather have the type named with a real name and all the pointer
561 types to the same object have the same POINTER_TYPE node. Code in the
562 equivalent function of c-decl.c makes a copy of the type node here, but
563 that may cause us trouble with incomplete types. We make an exception
564 for fat pointer types because the compiler automatically builds them
565 for unconstrained array types and the debugger uses them to represent
566 both these and pointers to these. */
567 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
569 tree t
= TREE_TYPE (decl
);
571 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
573 /* Array and pointer types aren't "tagged" types so we force the
574 type to be associated with its typedef in the DWARF back-end,
575 in order to make sure that the latter is always preserved. */
576 if (!DECL_ARTIFICIAL (decl
)
577 && (TREE_CODE (t
) == ARRAY_TYPE
578 || TREE_CODE (t
) == POINTER_TYPE
))
580 tree tt
= build_distinct_type_copy (t
);
581 if (TREE_CODE (t
) == POINTER_TYPE
)
582 TYPE_NEXT_PTR_TO (t
) = tt
;
583 TYPE_NAME (tt
) = DECL_NAME (decl
);
584 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
585 DECL_ORIGINAL_TYPE (decl
) = tt
;
588 else if (TYPE_IS_FAT_POINTER_P (t
))
590 /* We need a variant for the placeholder machinery to work. */
591 tree tt
= build_variant_type_copy (t
);
592 TYPE_NAME (tt
) = decl
;
593 TREE_USED (tt
) = TREE_USED (t
);
594 TREE_TYPE (decl
) = tt
;
595 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
596 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
598 DECL_ORIGINAL_TYPE (decl
) = t
;
599 DECL_ARTIFICIAL (decl
) = 0;
602 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
607 /* Propagate the name to all the anonymous variants. This is needed
608 for the type qualifiers machinery to work properly. */
610 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
611 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
612 TYPE_NAME (t
) = decl
;
616 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
617 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
620 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
622 tree type_decl
= build_decl (input_location
,
623 TYPE_DECL
, get_identifier (name
), type
);
624 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
625 TYPE_ARTIFICIAL (type
) = artificial_p
;
626 gnat_pushdecl (type_decl
, Empty
);
628 if (debug_hooks
->type_decl
)
629 debug_hooks
->type_decl (type_decl
, false);
632 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
633 finish constructing the record type as a fat pointer type. */
636 finish_fat_pointer_type (tree record_type
, tree field_list
)
638 /* Make sure we can put it into a register. */
639 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
641 /* Show what it really is. */
642 TYPE_FAT_POINTER_P (record_type
) = 1;
644 /* Do not emit debug info for it since the types of its fields may still be
645 incomplete at this point. */
646 finish_record_type (record_type
, field_list
, 0, false);
648 /* Force type_contains_placeholder_p to return true on it. Although the
649 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
650 type but the representation of the unconstrained array. */
651 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
654 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
655 finish constructing the record or union type. If REP_LEVEL is zero, this
656 record has no representation clause and so will be entirely laid out here.
657 If REP_LEVEL is one, this record has a representation clause and has been
658 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
659 this record is derived from a parent record and thus inherits its layout;
660 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
661 we need to write debug information about this type. */
664 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
667 enum tree_code code
= TREE_CODE (record_type
);
668 tree name
= TYPE_NAME (record_type
);
669 tree ada_size
= bitsize_zero_node
;
670 tree size
= bitsize_zero_node
;
671 bool had_size
= TYPE_SIZE (record_type
) != 0;
672 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
673 bool had_align
= TYPE_ALIGN (record_type
) != 0;
676 TYPE_FIELDS (record_type
) = field_list
;
678 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
679 generate debug info and have a parallel type. */
680 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
681 name
= DECL_NAME (name
);
682 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
684 /* Globally initialize the record first. If this is a rep'ed record,
685 that just means some initializations; otherwise, layout the record. */
688 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
691 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
694 TYPE_SIZE (record_type
) = bitsize_zero_node
;
696 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
697 out just like a UNION_TYPE, since the size will be fixed. */
698 else if (code
== QUAL_UNION_TYPE
)
703 /* Ensure there isn't a size already set. There can be in an error
704 case where there is a rep clause but all fields have errors and
705 no longer have a position. */
706 TYPE_SIZE (record_type
) = 0;
707 layout_type (record_type
);
710 /* At this point, the position and size of each field is known. It was
711 either set before entry by a rep clause, or by laying out the type above.
713 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
714 to compute the Ada size; the GCC size and alignment (for rep'ed records
715 that are not padding types); and the mode (for rep'ed records). We also
716 clear the DECL_BIT_FIELD indication for the cases we know have not been
717 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
719 if (code
== QUAL_UNION_TYPE
)
720 field_list
= nreverse (field_list
);
722 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
724 tree type
= TREE_TYPE (field
);
725 tree pos
= bit_position (field
);
726 tree this_size
= DECL_SIZE (field
);
729 if (RECORD_OR_UNION_TYPE_P (type
)
730 && !TYPE_FAT_POINTER_P (type
)
731 && !TYPE_CONTAINS_TEMPLATE_P (type
)
732 && TYPE_ADA_SIZE (type
))
733 this_ada_size
= TYPE_ADA_SIZE (type
);
735 this_ada_size
= this_size
;
737 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
738 if (DECL_BIT_FIELD (field
)
739 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
741 unsigned int align
= TYPE_ALIGN (type
);
743 /* In the general case, type alignment is required. */
744 if (value_factor_p (pos
, align
))
746 /* The enclosing record type must be sufficiently aligned.
747 Otherwise, if no alignment was specified for it and it
748 has been laid out already, bump its alignment to the
749 desired one if this is compatible with its size. */
750 if (TYPE_ALIGN (record_type
) >= align
)
752 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
753 DECL_BIT_FIELD (field
) = 0;
757 && value_factor_p (TYPE_SIZE (record_type
), align
))
759 TYPE_ALIGN (record_type
) = align
;
760 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
761 DECL_BIT_FIELD (field
) = 0;
765 /* In the non-strict alignment case, only byte alignment is. */
766 if (!STRICT_ALIGNMENT
767 && DECL_BIT_FIELD (field
)
768 && value_factor_p (pos
, BITS_PER_UNIT
))
769 DECL_BIT_FIELD (field
) = 0;
772 /* If we still have DECL_BIT_FIELD set at this point, we know that the
773 field is technically not addressable. Except that it can actually
774 be addressed if it is BLKmode and happens to be properly aligned. */
775 if (DECL_BIT_FIELD (field
)
776 && !(DECL_MODE (field
) == BLKmode
777 && value_factor_p (pos
, BITS_PER_UNIT
)))
778 DECL_NONADDRESSABLE_P (field
) = 1;
780 /* A type must be as aligned as its most aligned field that is not
781 a bit-field. But this is already enforced by layout_type. */
782 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
783 TYPE_ALIGN (record_type
)
784 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
789 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
790 size
= size_binop (MAX_EXPR
, size
, this_size
);
793 case QUAL_UNION_TYPE
:
795 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
796 this_ada_size
, ada_size
);
797 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
802 /* Since we know here that all fields are sorted in order of
803 increasing bit position, the size of the record is one
804 higher than the ending bit of the last field processed
805 unless we have a rep clause, since in that case we might
806 have a field outside a QUAL_UNION_TYPE that has a higher ending
807 position. So use a MAX in that case. Also, if this field is a
808 QUAL_UNION_TYPE, we need to take into account the previous size in
809 the case of empty variants. */
811 = merge_sizes (ada_size
, pos
, this_ada_size
,
812 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
814 = merge_sizes (size
, pos
, this_size
,
815 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
823 if (code
== QUAL_UNION_TYPE
)
824 nreverse (field_list
);
828 /* If this is a padding record, we never want to make the size smaller
829 than what was specified in it, if any. */
830 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
831 size
= TYPE_SIZE (record_type
);
833 /* Now set any of the values we've just computed that apply. */
834 if (!TYPE_FAT_POINTER_P (record_type
)
835 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
836 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
840 tree size_unit
= had_size_unit
841 ? TYPE_SIZE_UNIT (record_type
)
843 size_binop (CEIL_DIV_EXPR
, size
,
845 unsigned int align
= TYPE_ALIGN (record_type
);
847 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
848 TYPE_SIZE_UNIT (record_type
)
849 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
851 compute_record_mode (record_type
);
856 rest_of_record_type_compilation (record_type
);
859 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
860 associated with it. It need not be invoked directly in most cases since
861 finish_record_type takes care of doing so, but this can be necessary if
862 a parallel type is to be attached to the record type. */
865 rest_of_record_type_compilation (tree record_type
)
867 tree field_list
= TYPE_FIELDS (record_type
);
869 enum tree_code code
= TREE_CODE (record_type
);
870 bool var_size
= false;
872 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
874 /* We need to make an XVE/XVU record if any field has variable size,
875 whether or not the record does. For example, if we have a union,
876 it may be that all fields, rounded up to the alignment, have the
877 same size, in which case we'll use that size. But the debug
878 output routines (except Dwarf2) won't be able to output the fields,
879 so we need to make the special record. */
880 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
881 /* If a field has a non-constant qualifier, the record will have
882 variable size too. */
883 || (code
== QUAL_UNION_TYPE
884 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
891 /* If this record is of variable size, rename it so that the
892 debugger knows it is and make a new, parallel, record
893 that tells the debugger how the record is laid out. See
894 exp_dbug.ads. But don't do this for records that are padding
895 since they confuse GDB. */
896 if (var_size
&& !TYPE_IS_PADDING_P (record_type
))
899 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
900 ? UNION_TYPE
: TREE_CODE (record_type
));
901 tree orig_name
= TYPE_NAME (record_type
), new_name
;
902 tree last_pos
= bitsize_zero_node
;
903 tree old_field
, prev_old_field
= NULL_TREE
;
905 if (TREE_CODE (orig_name
) == TYPE_DECL
)
906 orig_name
= DECL_NAME (orig_name
);
909 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
911 TYPE_NAME (new_record_type
) = new_name
;
912 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
913 TYPE_STUB_DECL (new_record_type
)
914 = create_type_stub_decl (new_name
, new_record_type
);
915 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
916 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
917 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
918 TYPE_SIZE_UNIT (new_record_type
)
919 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
921 /* Now scan all the fields, replacing each field with a new
922 field corresponding to the new encoding. */
923 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
924 old_field
= DECL_CHAIN (old_field
))
926 tree field_type
= TREE_TYPE (old_field
);
927 tree field_name
= DECL_NAME (old_field
);
929 tree curpos
= bit_position (old_field
);
931 unsigned int align
= 0;
934 /* See how the position was modified from the last position.
936 There are two basic cases we support: a value was added
937 to the last position or the last position was rounded to
938 a boundary and they something was added. Check for the
939 first case first. If not, see if there is any evidence
940 of rounding. If so, round the last position and try
943 If this is a union, the position can be taken as zero. */
945 /* Some computations depend on the shape of the position expression,
946 so strip conversions to make sure it's exposed. */
947 curpos
= remove_conversions (curpos
, true);
949 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
950 pos
= bitsize_zero_node
, align
= 0;
952 pos
= compute_related_constant (curpos
, last_pos
);
954 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
955 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
957 tree offset
= TREE_OPERAND (curpos
, 0);
958 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
960 /* An offset which is a bitwise AND with a negative power of 2
961 means an alignment corresponding to this power of 2. Note
962 that, as sizetype is sign-extended but nonetheless unsigned,
963 we don't directly use tree_int_cst_sgn. */
964 offset
= remove_conversions (offset
, true);
965 if (TREE_CODE (offset
) == BIT_AND_EXPR
966 && host_integerp (TREE_OPERAND (offset
, 1), 0)
967 && TREE_INT_CST_HIGH (TREE_OPERAND (offset
, 1)) < 0)
970 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
971 if (exact_log2 (pow
) > 0)
975 pos
= compute_related_constant (curpos
,
976 round_up (last_pos
, align
));
978 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
979 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
980 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
981 && host_integerp (TREE_OPERAND
982 (TREE_OPERAND (curpos
, 0), 1),
987 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
988 pos
= compute_related_constant (curpos
,
989 round_up (last_pos
, align
));
991 else if (potential_alignment_gap (prev_old_field
, old_field
,
994 align
= TYPE_ALIGN (field_type
);
995 pos
= compute_related_constant (curpos
,
996 round_up (last_pos
, align
));
999 /* If we can't compute a position, set it to zero.
1001 ??? We really should abort here, but it's too much work
1002 to get this correct for all cases. */
1005 pos
= bitsize_zero_node
;
1007 /* See if this type is variable-sized and make a pointer type
1008 and indicate the indirection if so. Beware that the debug
1009 back-end may adjust the position computed above according
1010 to the alignment of the field type, i.e. the pointer type
1011 in this case, if we don't preventively counter that. */
1012 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1014 field_type
= build_pointer_type (field_type
);
1015 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1017 field_type
= copy_node (field_type
);
1018 TYPE_ALIGN (field_type
) = align
;
1023 /* Make a new field name, if necessary. */
1024 if (var
|| align
!= 0)
1029 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1030 align
/ BITS_PER_UNIT
);
1032 strcpy (suffix
, "XVL");
1034 field_name
= concat_name (field_name
, suffix
);
1038 = create_field_decl (field_name
, field_type
, new_record_type
,
1039 DECL_SIZE (old_field
), pos
, 0, 0);
1040 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1041 TYPE_FIELDS (new_record_type
) = new_field
;
1043 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1044 zero. The only time it's not the last field of the record
1045 is when there are other components at fixed positions after
1046 it (meaning there was a rep clause for every field) and we
1047 want to be able to encode them. */
1048 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1049 (TREE_CODE (TREE_TYPE (old_field
))
1052 : DECL_SIZE (old_field
));
1053 prev_old_field
= old_field
;
1056 TYPE_FIELDS (new_record_type
)
1057 = nreverse (TYPE_FIELDS (new_record_type
));
1059 /* We used to explicitly invoke rest_of_type_decl_compilation on the
1060 parallel type for the sake of STABS. We don't do it any more, so
1061 as to ensure that the parallel type be processed after the type
1062 by the debug back-end and, thus, prevent it from interfering with
1063 the processing of a recursive type. */
1064 add_parallel_type (TYPE_STUB_DECL (record_type
), new_record_type
);
1067 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type
));
1070 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1073 add_parallel_type (tree decl
, tree parallel_type
)
1077 while (DECL_PARALLEL_TYPE (d
))
1078 d
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d
));
1080 SET_DECL_PARALLEL_TYPE (d
, parallel_type
);
1083 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1084 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1085 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1086 replace a value of zero with the old size. If HAS_REP is true, we take the
1087 MAX of the end position of this field with LAST_SIZE. In all other cases,
1088 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1091 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1094 tree type
= TREE_TYPE (last_size
);
1097 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1099 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1101 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1105 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1106 integer_zerop (TREE_OPERAND (size
, 1))
1107 ? last_size
: merge_sizes (last_size
, first_bit
,
1108 TREE_OPERAND (size
, 1),
1110 integer_zerop (TREE_OPERAND (size
, 2))
1111 ? last_size
: merge_sizes (last_size
, first_bit
,
1112 TREE_OPERAND (size
, 2),
1115 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1116 when fed through substitute_in_expr) into thinking that a constant
1117 size is not constant. */
1118 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1119 new_size
= TREE_OPERAND (new_size
, 0);
1124 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1125 related by the addition of a constant. Return that constant if so. */
1128 compute_related_constant (tree op0
, tree op1
)
1130 tree op0_var
, op1_var
;
1131 tree op0_con
= split_plus (op0
, &op0_var
);
1132 tree op1_con
= split_plus (op1
, &op1_var
);
1133 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1135 if (operand_equal_p (op0_var
, op1_var
, 0))
1137 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1143 /* Utility function of above to split a tree OP which may be a sum, into a
1144 constant part, which is returned, and a variable part, which is stored
1145 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1149 split_plus (tree in
, tree
*pvar
)
1151 /* Strip conversions in order to ease the tree traversal and maximize the
1152 potential for constant or plus/minus discovery. We need to be careful
1153 to always return and set *pvar to bitsizetype trees, but it's worth
1155 in
= remove_conversions (in
, false);
1157 *pvar
= convert (bitsizetype
, in
);
1159 if (TREE_CODE (in
) == INTEGER_CST
)
1161 *pvar
= bitsize_zero_node
;
1162 return convert (bitsizetype
, in
);
1164 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1166 tree lhs_var
, rhs_var
;
1167 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1168 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1170 if (lhs_var
== TREE_OPERAND (in
, 0)
1171 && rhs_var
== TREE_OPERAND (in
, 1))
1172 return bitsize_zero_node
;
1174 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1175 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1178 return bitsize_zero_node
;
1181 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1182 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1183 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1184 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1185 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1186 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1187 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1188 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1189 invisible reference. */
1192 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1193 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1194 bool return_by_invisi_ref_p
)
1196 /* A list of the data type nodes of the subprogram formal parameters.
1197 This list is generated by traversing the input list of PARM_DECL
1199 VEC(tree
,gc
) *param_type_list
= NULL
;
1202 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1203 VEC_safe_push (tree
, gc
, param_type_list
, TREE_TYPE (t
));
1205 type
= build_function_type_vec (return_type
, param_type_list
);
1207 /* TYPE may have been shared since GCC hashes types. If it has a different
1208 CICO_LIST, make a copy. Likewise for the various flags. */
1209 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1210 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1212 type
= copy_type (type
);
1213 TYPE_CI_CO_LIST (type
) = cico_list
;
1214 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1215 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1216 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1222 /* Return a copy of TYPE but safe to modify in any way. */
1225 copy_type (tree type
)
1227 tree new_type
= copy_node (type
);
1229 /* Unshare the language-specific data. */
1230 if (TYPE_LANG_SPECIFIC (type
))
1232 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1233 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1236 /* And the contents of the language-specific slot if needed. */
1237 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1238 && TYPE_RM_VALUES (type
))
1240 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1241 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1242 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
1243 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
1246 /* copy_node clears this field instead of copying it, because it is
1247 aliased with TREE_CHAIN. */
1248 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
1250 TYPE_POINTER_TO (new_type
) = 0;
1251 TYPE_REFERENCE_TO (new_type
) = 0;
1252 TYPE_MAIN_VARIANT (new_type
) = new_type
;
1253 TYPE_NEXT_VARIANT (new_type
) = 0;
1258 /* Return a subtype of sizetype with range MIN to MAX and whose
1259 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1260 of the associated TYPE_DECL. */
1263 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
1265 /* First build a type for the desired range. */
1266 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
1268 /* Then set the index type. */
1269 SET_TYPE_INDEX_TYPE (type
, index
);
1270 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
1275 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1276 sizetype is used. */
1279 create_range_type (tree type
, tree min
, tree max
)
1283 if (type
== NULL_TREE
)
1286 /* First build a type with the base range. */
1287 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
1288 TYPE_MAX_VALUE (type
));
1290 /* Then set the actual range. */
1291 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
1292 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
1297 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1298 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1302 create_type_stub_decl (tree type_name
, tree type
)
1304 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1305 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1306 emitted in DWARF. */
1307 tree type_decl
= build_decl (input_location
,
1308 TYPE_DECL
, type_name
, type
);
1309 DECL_ARTIFICIAL (type_decl
) = 1;
1310 TYPE_ARTIFICIAL (type
) = 1;
1314 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1315 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1316 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1317 true if we need to write debug information about this type. GNAT_NODE
1318 is used for the position of the decl. */
1321 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
1322 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
1324 enum tree_code code
= TREE_CODE (type
);
1325 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
1328 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1329 gcc_assert (!TYPE_IS_DUMMY_P (type
));
1331 /* If the type hasn't been named yet, we're naming it; preserve an existing
1332 TYPE_STUB_DECL that has been attached to it for some purpose. */
1333 if (!named
&& TYPE_STUB_DECL (type
))
1335 type_decl
= TYPE_STUB_DECL (type
);
1336 DECL_NAME (type_decl
) = type_name
;
1339 type_decl
= build_decl (input_location
,
1340 TYPE_DECL
, type_name
, type
);
1342 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1343 TYPE_ARTIFICIAL (type
) = artificial_p
;
1345 /* Add this decl to the current binding level. */
1346 gnat_pushdecl (type_decl
, gnat_node
);
1348 process_attributes (type_decl
, attr_list
);
1350 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1351 This causes the name to be also viewed as a "tag" by the debug
1352 back-end, with the advantage that no DW_TAG_typedef is emitted
1353 for artificial "tagged" types in DWARF. */
1355 TYPE_STUB_DECL (type
) = type_decl
;
1357 /* Pass the type declaration to the debug back-end unless this is an
1358 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1359 type for which debugging information was not requested, or else an
1360 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1361 handled separately. And do not pass dummy types either. */
1362 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
1363 DECL_IGNORED_P (type_decl
) = 1;
1364 else if (code
!= ENUMERAL_TYPE
1365 && (code
!= RECORD_TYPE
|| TYPE_FAT_POINTER_P (type
))
1366 && !((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1367 && TYPE_IS_DUMMY_P (TREE_TYPE (type
)))
1368 && !(code
== RECORD_TYPE
1370 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))))))
1371 rest_of_type_decl_compilation (type_decl
);
1376 /* Return a VAR_DECL or CONST_DECL node.
1378 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1379 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1380 the GCC tree for an optional initial expression; NULL_TREE if none.
1382 CONST_FLAG is true if this variable is constant, in which case we might
1383 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1385 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1386 definition to be made visible outside of the current compilation unit, for
1387 instance variable definitions in a package specification.
1389 EXTERN_FLAG is true when processing an external variable declaration (as
1390 opposed to a definition: no storage is to be allocated for the variable).
1392 STATIC_FLAG is only relevant when not at top level. In that case
1393 it indicates whether to always allocate storage to the variable.
1395 GNAT_NODE is used for the position of the decl. */
1398 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1399 bool const_flag
, bool public_flag
, bool extern_flag
,
1400 bool static_flag
, bool const_decl_allowed_p
,
1401 struct attrib
*attr_list
, Node_Id gnat_node
)
1403 /* Whether the initializer is a constant initializer. At the global level
1404 or for an external object or an object to be allocated in static memory,
1405 we check that it is a valid constant expression for use in initializing
1406 a static variable; otherwise, we only check that it is constant. */
1409 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
1410 && (global_bindings_p () || extern_flag
|| static_flag
1411 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
1412 : TREE_CONSTANT (var_init
)));
1414 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1415 case the initializer may be used in-lieu of the DECL node (as done in
1416 Identifier_to_gnu). This is useful to prevent the need of elaboration
1417 code when an identifier for which such a decl is made is in turn used as
1418 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1419 but extra constraints apply to this choice (see below) and are not
1420 relevant to the distinction we wish to make. */
1421 bool constant_p
= const_flag
&& init_const
;
1423 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1424 and may be used for scalars in general but not for aggregates. */
1426 = build_decl (input_location
,
1427 (constant_p
&& const_decl_allowed_p
1428 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
1431 /* If this is external, throw away any initializations (they will be done
1432 elsewhere) unless this is a constant for which we would like to remain
1433 able to get the initializer. If we are defining a global here, leave a
1434 constant initialization and save any variable elaborations for the
1435 elaboration routine. If we are just annotating types, throw away the
1436 initialization if it isn't a constant. */
1437 if ((extern_flag
&& !constant_p
)
1438 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
1439 var_init
= NULL_TREE
;
1441 /* At the global level, an initializer requiring code to be generated
1442 produces elaboration statements. Check that such statements are allowed,
1443 that is, not violating a No_Elaboration_Code restriction. */
1444 if (global_bindings_p () && var_init
!= 0 && !init_const
)
1445 Check_Elaboration_Code_Allowed (gnat_node
);
1447 DECL_INITIAL (var_decl
) = var_init
;
1448 TREE_READONLY (var_decl
) = const_flag
;
1449 DECL_EXTERNAL (var_decl
) = extern_flag
;
1450 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1451 TREE_CONSTANT (var_decl
) = constant_p
;
1452 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1453 = TYPE_VOLATILE (type
);
1455 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1456 try to fiddle with DECL_COMMON. However, on platforms that don't
1457 support global BSS sections, uninitialized global variables would
1458 go in DATA instead, thus increasing the size of the executable. */
1460 && TREE_CODE (var_decl
) == VAR_DECL
1461 && TREE_PUBLIC (var_decl
)
1462 && !have_global_bss_p ())
1463 DECL_COMMON (var_decl
) = 1;
1465 /* At the global binding level, we need to allocate static storage for the
1466 variable if it isn't external. Otherwise, we allocate automatic storage
1467 unless requested not to. */
1468 TREE_STATIC (var_decl
)
1469 = !extern_flag
&& (static_flag
|| global_bindings_p ());
1471 /* For an external constant whose initializer is not absolute, do not emit
1472 debug info. In DWARF this would mean a global relocation in a read-only
1473 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1477 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
1478 != null_pointer_node
)
1479 DECL_IGNORED_P (var_decl
) = 1;
1481 /* Add this decl to the current binding level. */
1482 gnat_pushdecl (var_decl
, gnat_node
);
1484 if (TREE_SIDE_EFFECTS (var_decl
))
1485 TREE_ADDRESSABLE (var_decl
) = 1;
1487 if (TREE_CODE (var_decl
) == VAR_DECL
)
1490 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1491 process_attributes (var_decl
, attr_list
);
1492 if (global_bindings_p ())
1493 rest_of_decl_compilation (var_decl
, true, 0);
1496 expand_decl (var_decl
);
1501 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1504 aggregate_type_contains_array_p (tree type
)
1506 switch (TREE_CODE (type
))
1510 case QUAL_UNION_TYPE
:
1513 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1514 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
1515 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
1528 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1529 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1530 nonzero, it is the specified size of the field. If POS is nonzero, it is
1531 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1532 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1533 means we are allowed to take the address of the field; if it is negative,
1534 we should not make a bitfield, which is used by make_aligning_type. */
1537 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
1538 tree size
, tree pos
, int packed
, int addressable
)
1540 tree field_decl
= build_decl (input_location
,
1541 FIELD_DECL
, field_name
, field_type
);
1543 DECL_CONTEXT (field_decl
) = record_type
;
1544 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1546 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1547 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1548 Likewise for an aggregate without specified position that contains an
1549 array, because in this case slices of variable length of this array
1550 must be handled by GCC and variable-sized objects need to be aligned
1551 to at least a byte boundary. */
1552 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
1554 && AGGREGATE_TYPE_P (field_type
)
1555 && aggregate_type_contains_array_p (field_type
))))
1556 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1558 /* If a size is specified, use it. Otherwise, if the record type is packed
1559 compute a size to use, which may differ from the object's natural size.
1560 We always set a size in this case to trigger the checks for bitfield
1561 creation below, which is typically required when no position has been
1564 size
= convert (bitsizetype
, size
);
1565 else if (packed
== 1)
1567 size
= rm_size (field_type
);
1568 if (TYPE_MODE (field_type
) == BLKmode
)
1569 size
= round_up (size
, BITS_PER_UNIT
);
1572 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1573 specified for two reasons: first if the size differs from the natural
1574 size. Second, if the alignment is insufficient. There are a number of
1575 ways the latter can be true.
1577 We never make a bitfield if the type of the field has a nonconstant size,
1578 because no such entity requiring bitfield operations should reach here.
1580 We do *preventively* make a bitfield when there might be the need for it
1581 but we don't have all the necessary information to decide, as is the case
1582 of a field with no specified position in a packed record.
1584 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1585 in layout_decl or finish_record_type to clear the bit_field indication if
1586 it is in fact not needed. */
1587 if (addressable
>= 0
1589 && TREE_CODE (size
) == INTEGER_CST
1590 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1591 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
1592 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
1594 || (TYPE_ALIGN (record_type
) != 0
1595 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1597 DECL_BIT_FIELD (field_decl
) = 1;
1598 DECL_SIZE (field_decl
) = size
;
1599 if (!packed
&& !pos
)
1601 if (TYPE_ALIGN (record_type
) != 0
1602 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
1603 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
1605 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1609 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
1611 /* Bump the alignment if need be, either for bitfield/packing purposes or
1612 to satisfy the type requirements if no such consideration applies. When
1613 we get the alignment from the type, indicate if this is from an explicit
1614 user request, which prevents stor-layout from lowering it later on. */
1616 unsigned int bit_align
1617 = (DECL_BIT_FIELD (field_decl
) ? 1
1618 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
1620 if (bit_align
> DECL_ALIGN (field_decl
))
1621 DECL_ALIGN (field_decl
) = bit_align
;
1622 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
1624 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1625 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
1631 /* We need to pass in the alignment the DECL is known to have.
1632 This is the lowest-order bit set in POS, but no more than
1633 the alignment of the record, if one is specified. Note
1634 that an alignment of 0 is taken as infinite. */
1635 unsigned int known_align
;
1637 if (host_integerp (pos
, 1))
1638 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1640 known_align
= BITS_PER_UNIT
;
1642 if (TYPE_ALIGN (record_type
)
1643 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1644 known_align
= TYPE_ALIGN (record_type
);
1646 layout_decl (field_decl
, known_align
);
1647 SET_DECL_OFFSET_ALIGN (field_decl
,
1648 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1650 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1651 &DECL_FIELD_BIT_OFFSET (field_decl
),
1652 DECL_OFFSET_ALIGN (field_decl
), pos
);
1655 /* In addition to what our caller says, claim the field is addressable if we
1656 know that its type is not suitable.
1658 The field may also be "technically" nonaddressable, meaning that even if
1659 we attempt to take the field's address we will actually get the address
1660 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1661 value we have at this point is not accurate enough, so we don't account
1662 for this here and let finish_record_type decide. */
1663 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
1666 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
1671 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1672 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1673 (either an In parameter or an address of a pass-by-ref parameter). */
1676 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
1678 tree param_decl
= build_decl (input_location
,
1679 PARM_DECL
, param_name
, param_type
);
1681 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1682 can lead to various ABI violations. */
1683 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
1684 && INTEGRAL_TYPE_P (param_type
)
1685 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1687 /* We have to be careful about biased types here. Make a subtype
1688 of integer_type_node with the proper biasing. */
1689 if (TREE_CODE (param_type
) == INTEGER_TYPE
1690 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1693 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
1694 TREE_TYPE (subtype
) = integer_type_node
;
1695 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
1696 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
1697 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
1698 param_type
= subtype
;
1701 param_type
= integer_type_node
;
1704 DECL_ARG_TYPE (param_decl
) = param_type
;
1705 TREE_READONLY (param_decl
) = readonly
;
1709 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1712 process_attributes (tree decl
, struct attrib
*attr_list
)
1714 for (; attr_list
; attr_list
= attr_list
->next
)
1715 switch (attr_list
->type
)
1717 case ATTR_MACHINE_ATTRIBUTE
:
1718 input_location
= DECL_SOURCE_LOCATION (decl
);
1719 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
1721 ATTR_FLAG_TYPE_IN_PLACE
);
1724 case ATTR_LINK_ALIAS
:
1725 if (! DECL_EXTERNAL (decl
))
1727 TREE_STATIC (decl
) = 1;
1728 assemble_alias (decl
, attr_list
->name
);
1732 case ATTR_WEAK_EXTERNAL
:
1734 declare_weak (decl
);
1736 post_error ("?weak declarations not supported on this target",
1737 attr_list
->error_point
);
1740 case ATTR_LINK_SECTION
:
1741 if (targetm_common
.have_named_sections
)
1743 DECL_SECTION_NAME (decl
)
1744 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1745 IDENTIFIER_POINTER (attr_list
->name
));
1746 DECL_COMMON (decl
) = 0;
1749 post_error ("?section attributes are not supported for this target",
1750 attr_list
->error_point
);
1753 case ATTR_LINK_CONSTRUCTOR
:
1754 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1755 TREE_USED (decl
) = 1;
1758 case ATTR_LINK_DESTRUCTOR
:
1759 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1760 TREE_USED (decl
) = 1;
1763 case ATTR_THREAD_LOCAL_STORAGE
:
1764 DECL_TLS_MODEL (decl
) = decl_default_tls_model (decl
);
1765 DECL_COMMON (decl
) = 0;
1770 /* Record DECL as a global renaming pointer. */
1773 record_global_renaming_pointer (tree decl
)
1775 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
1776 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
1779 /* Invalidate the global renaming pointers. */
1782 invalidate_global_renaming_pointers (void)
1787 FOR_EACH_VEC_ELT (tree
, global_renaming_pointers
, i
, iter
)
1788 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
1790 VEC_free (tree
, gc
, global_renaming_pointers
);
1793 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1797 value_factor_p (tree value
, HOST_WIDE_INT factor
)
1799 if (host_integerp (value
, 1))
1800 return tree_low_cst (value
, 1) % factor
== 0;
1802 if (TREE_CODE (value
) == MULT_EXPR
)
1803 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1804 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1809 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
1810 unless we can prove these 2 fields are laid out in such a way that no gap
1811 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1812 is the distance in bits between the end of PREV_FIELD and the starting
1813 position of CURR_FIELD. It is ignored if null. */
1816 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1818 /* If this is the first field of the record, there cannot be any gap */
1822 /* If the previous field is a union type, then return False: The only
1823 time when such a field is not the last field of the record is when
1824 there are other components at fixed positions after it (meaning there
1825 was a rep clause for every field), in which case we don't want the
1826 alignment constraint to override them. */
1827 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1830 /* If the distance between the end of prev_field and the beginning of
1831 curr_field is constant, then there is a gap if the value of this
1832 constant is not null. */
1833 if (offset
&& host_integerp (offset
, 1))
1834 return !integer_zerop (offset
);
1836 /* If the size and position of the previous field are constant,
1837 then check the sum of this size and position. There will be a gap
1838 iff it is not multiple of the current field alignment. */
1839 if (host_integerp (DECL_SIZE (prev_field
), 1)
1840 && host_integerp (bit_position (prev_field
), 1))
1841 return ((tree_low_cst (bit_position (prev_field
), 1)
1842 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1843 % DECL_ALIGN (curr_field
) != 0);
1845 /* If both the position and size of the previous field are multiples
1846 of the current field alignment, there cannot be any gap. */
1847 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1848 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1851 /* Fallback, return that there may be a potential gap */
1855 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
1859 create_label_decl (tree label_name
, Node_Id gnat_node
)
1862 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
1864 DECL_MODE (label_decl
) = VOIDmode
;
1866 /* Add this decl to the current binding level. */
1867 gnat_pushdecl (label_decl
, gnat_node
);
1872 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1873 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1874 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1875 PARM_DECL nodes chained through the DECL_CHAIN field).
1877 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
1878 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
1879 used for the position of the decl. */
1882 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
1883 tree param_decl_list
, bool inline_flag
, bool public_flag
,
1884 bool extern_flag
, bool artificial_flag
,
1885 struct attrib
*attr_list
, Node_Id gnat_node
)
1887 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
1889 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
1890 TREE_TYPE (subprog_type
));
1891 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1893 /* If this is a non-inline function nested inside an inlined external
1894 function, we cannot honor both requests without cloning the nested
1895 function in the current unit since it is private to the other unit.
1896 We could inline the nested function as well but it's probably better
1897 to err on the side of too little inlining. */
1900 && current_function_decl
1901 && DECL_DECLARED_INLINE_P (current_function_decl
)
1902 && DECL_EXTERNAL (current_function_decl
))
1903 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
1905 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
1906 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1907 DECL_DECLARED_INLINE_P (subprog_decl
) = inline_flag
;
1908 DECL_NO_INLINE_WARNING_P (subprog_decl
) = inline_flag
&& artificial_flag
;
1910 TREE_PUBLIC (subprog_decl
) = public_flag
;
1911 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1912 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1913 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1915 DECL_ARTIFICIAL (result_decl
) = 1;
1916 DECL_IGNORED_P (result_decl
) = 1;
1917 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
1918 DECL_RESULT (subprog_decl
) = result_decl
;
1922 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1924 /* The expand_main_function circuitry expects "main_identifier_node" to
1925 designate the DECL_NAME of the 'main' entry point, in turn expected
1926 to be declared as the "main" function literally by default. Ada
1927 program entry points are typically declared with a different name
1928 within the binder generated file, exported as 'main' to satisfy the
1929 system expectations. Force main_identifier_node in this case. */
1930 if (asm_name
== main_identifier_node
)
1931 DECL_NAME (subprog_decl
) = main_identifier_node
;
1934 /* Add this decl to the current binding level. */
1935 gnat_pushdecl (subprog_decl
, gnat_node
);
1937 process_attributes (subprog_decl
, attr_list
);
1939 /* Output the assembler code and/or RTL for the declaration. */
1940 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
1942 return subprog_decl
;
1945 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1946 body. This routine needs to be invoked before processing the declarations
1947 appearing in the subprogram. */
1950 begin_subprog_body (tree subprog_decl
)
1954 announce_function (subprog_decl
);
1956 /* This function is being defined. */
1957 TREE_STATIC (subprog_decl
) = 1;
1959 current_function_decl
= subprog_decl
;
1961 /* Enter a new binding level and show that all the parameters belong to
1965 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1966 param_decl
= DECL_CHAIN (param_decl
))
1967 DECL_CONTEXT (param_decl
) = subprog_decl
;
1969 make_decl_rtl (subprog_decl
);
1972 /* Finish translating the current subprogram and set its BODY. */
1975 end_subprog_body (tree body
)
1977 tree fndecl
= current_function_decl
;
1979 /* Attach the BLOCK for this level to the function and pop the level. */
1980 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
1981 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
1984 /* Mark the RESULT_DECL as being in this subprogram. */
1985 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
1987 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
1988 if (TREE_CODE (body
) == BIND_EXPR
)
1990 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
1991 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
1994 DECL_SAVED_TREE (fndecl
) = body
;
1996 current_function_decl
= decl_function_context (fndecl
);
1999 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2002 rest_of_subprog_body_compilation (tree subprog_decl
)
2004 /* We cannot track the location of errors past this point. */
2005 error_gnat_node
= Empty
;
2007 /* If we're only annotating types, don't actually compile this function. */
2008 if (type_annotate_only
)
2011 /* Dump functions before gimplification. */
2012 dump_function (TDI_original
, subprog_decl
);
2014 /* ??? This special handling of nested functions is probably obsolete. */
2015 if (!decl_function_context (subprog_decl
))
2016 cgraph_finalize_function (subprog_decl
, false);
2018 /* Register this function with cgraph just far enough to get it
2019 added to our parent's nested function list. */
2020 (void) cgraph_get_create_node (subprog_decl
);
2024 gnat_builtin_function (tree decl
)
2026 gnat_pushdecl (decl
, Empty
);
2030 /* Return an integer type with the number of bits of precision given by
2031 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2032 it is a signed type. */
2035 gnat_type_for_size (unsigned precision
, int unsignedp
)
2040 if (precision
<= 2 * MAX_BITS_PER_WORD
2041 && signed_and_unsigned_types
[precision
][unsignedp
])
2042 return signed_and_unsigned_types
[precision
][unsignedp
];
2045 t
= make_unsigned_type (precision
);
2047 t
= make_signed_type (precision
);
2049 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2050 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2054 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2055 TYPE_NAME (t
) = get_identifier (type_name
);
2061 /* Likewise for floating-point types. */
2064 float_type_for_precision (int precision
, enum machine_mode mode
)
2069 if (float_types
[(int) mode
])
2070 return float_types
[(int) mode
];
2072 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2073 TYPE_PRECISION (t
) = precision
;
2076 gcc_assert (TYPE_MODE (t
) == mode
);
2079 sprintf (type_name
, "FLOAT_%d", precision
);
2080 TYPE_NAME (t
) = get_identifier (type_name
);
2086 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2087 an unsigned type; otherwise a signed type is returned. */
2090 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2092 if (mode
== BLKmode
)
2095 if (mode
== VOIDmode
)
2096 return void_type_node
;
2098 if (COMPLEX_MODE_P (mode
))
2101 if (SCALAR_FLOAT_MODE_P (mode
))
2102 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2104 if (SCALAR_INT_MODE_P (mode
))
2105 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2107 if (VECTOR_MODE_P (mode
))
2109 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2110 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2112 return build_vector_type_for_mode (inner_type
, mode
);
2118 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2121 gnat_unsigned_type (tree type_node
)
2123 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2125 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2127 type
= copy_node (type
);
2128 TREE_TYPE (type
) = type_node
;
2130 else if (TREE_TYPE (type_node
)
2131 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2132 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2134 type
= copy_node (type
);
2135 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2141 /* Return the signed version of a TYPE_NODE, a scalar type. */
2144 gnat_signed_type (tree type_node
)
2146 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2148 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2150 type
= copy_node (type
);
2151 TREE_TYPE (type
) = type_node
;
2153 else if (TREE_TYPE (type_node
)
2154 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2155 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2157 type
= copy_node (type
);
2158 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2164 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2165 transparently converted to each other. */
2168 gnat_types_compatible_p (tree t1
, tree t2
)
2170 enum tree_code code
;
2172 /* This is the default criterion. */
2173 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2176 /* We only check structural equivalence here. */
2177 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2180 /* Vector types are also compatible if they have the same number of subparts
2181 and the same form of (scalar) element type. */
2182 if (code
== VECTOR_TYPE
2183 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2184 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2185 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2188 /* Array types are also compatible if they are constrained and have the same
2189 domain(s) and the same component type. */
2190 if (code
== ARRAY_TYPE
2191 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2192 || (TYPE_DOMAIN (t1
)
2194 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2195 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2196 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2197 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2198 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2199 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2200 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2203 /* Padding record types are also compatible if they pad the same
2204 type and have the same constant size. */
2205 if (code
== RECORD_TYPE
2206 && TYPE_PADDING_P (t1
) && TYPE_PADDING_P (t2
)
2207 && TREE_TYPE (TYPE_FIELDS (t1
)) == TREE_TYPE (TYPE_FIELDS (t2
))
2208 && tree_int_cst_equal (TYPE_SIZE (t1
), TYPE_SIZE (t2
)))
2214 /* Return true if EXPR is a useless type conversion. */
2217 gnat_useless_type_conversion (tree expr
)
2219 if (CONVERT_EXPR_P (expr
)
2220 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2221 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
2222 return gnat_types_compatible_p (TREE_TYPE (expr
),
2223 TREE_TYPE (TREE_OPERAND (expr
, 0)));
2228 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2231 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
2232 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
2234 return TYPE_CI_CO_LIST (t
) == cico_list
2235 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
2236 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
2237 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
2240 /* EXP is an expression for the size of an object. If this size contains
2241 discriminant references, replace them with the maximum (if MAX_P) or
2242 minimum (if !MAX_P) possible value of the discriminant. */
2245 max_size (tree exp
, bool max_p
)
2247 enum tree_code code
= TREE_CODE (exp
);
2248 tree type
= TREE_TYPE (exp
);
2250 switch (TREE_CODE_CLASS (code
))
2252 case tcc_declaration
:
2257 if (code
== CALL_EXPR
)
2262 t
= maybe_inline_call_in_expr (exp
);
2264 return max_size (t
, max_p
);
2266 n
= call_expr_nargs (exp
);
2268 argarray
= XALLOCAVEC (tree
, n
);
2269 for (i
= 0; i
< n
; i
++)
2270 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
2271 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
2276 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2277 modify. Otherwise, we treat it like a variable. */
2278 if (!CONTAINS_PLACEHOLDER_P (exp
))
2281 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2283 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
2285 case tcc_comparison
:
2286 return max_p
? size_one_node
: size_zero_node
;
2290 case tcc_expression
:
2291 switch (TREE_CODE_LENGTH (code
))
2294 if (code
== SAVE_EXPR
)
2296 else if (code
== NON_LVALUE_EXPR
)
2297 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2300 fold_build1 (code
, type
,
2301 max_size (TREE_OPERAND (exp
, 0),
2302 code
== NEGATE_EXPR
? !max_p
: max_p
));
2305 if (code
== COMPOUND_EXPR
)
2306 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2309 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2310 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2311 code
== MINUS_EXPR
? !max_p
: max_p
);
2313 /* Special-case wanting the maximum value of a MIN_EXPR.
2314 In that case, if one side overflows, return the other.
2315 sizetype is signed, but we know sizes are non-negative.
2316 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2317 overflowing and the RHS a variable. */
2320 && TREE_CODE (rhs
) == INTEGER_CST
2321 && TREE_OVERFLOW (rhs
))
2325 && TREE_CODE (lhs
) == INTEGER_CST
2326 && TREE_OVERFLOW (lhs
))
2328 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2329 && TREE_CODE (lhs
) == INTEGER_CST
2330 && TREE_OVERFLOW (lhs
)
2331 && !TREE_CONSTANT (rhs
))
2334 return fold_build2 (code
, type
, lhs
, rhs
);
2338 if (code
== COND_EXPR
)
2339 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2340 max_size (TREE_OPERAND (exp
, 1), max_p
),
2341 max_size (TREE_OPERAND (exp
, 2), max_p
));
2344 /* Other tree classes cannot happen. */
2352 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2353 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2354 Return a constructor for the template. */
2357 build_template (tree template_type
, tree array_type
, tree expr
)
2359 VEC(constructor_elt
,gc
) *template_elts
= NULL
;
2360 tree bound_list
= NULL_TREE
;
2363 while (TREE_CODE (array_type
) == RECORD_TYPE
2364 && (TYPE_PADDING_P (array_type
)
2365 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
2366 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2368 if (TREE_CODE (array_type
) == ARRAY_TYPE
2369 || (TREE_CODE (array_type
) == INTEGER_TYPE
2370 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2371 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2373 /* First make the list for a CONSTRUCTOR for the template. Go down the
2374 field list of the template instead of the type chain because this
2375 array might be an Ada array of arrays and we can't tell where the
2376 nested arrays stop being the underlying object. */
2378 for (field
= TYPE_FIELDS (template_type
); field
;
2380 ? (bound_list
= TREE_CHAIN (bound_list
))
2381 : (array_type
= TREE_TYPE (array_type
))),
2382 field
= DECL_CHAIN (DECL_CHAIN (field
)))
2384 tree bounds
, min
, max
;
2386 /* If we have a bound list, get the bounds from there. Likewise
2387 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2388 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2389 This will give us a maximum range. */
2391 bounds
= TREE_VALUE (bound_list
);
2392 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2393 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2394 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
2395 && DECL_BY_COMPONENT_PTR_P (expr
))
2396 bounds
= TREE_TYPE (field
);
2400 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
2401 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
2403 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2404 substitute it from OBJECT. */
2405 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2406 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2408 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
2409 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
2412 return gnat_build_constructor (template_type
, template_elts
);
2415 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2416 being built; the new decl is chained on to the front of the list. */
2419 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
2420 tree initial
, tree field_list
)
2423 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
2426 DECL_INITIAL (field
) = initial
;
2427 DECL_CHAIN (field
) = field_list
;
2431 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2432 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2433 type contains in its DECL_INITIAL the expression to use when a constructor
2434 is made for the type. GNAT_ENTITY is an entity used to print out an error
2435 message if the mechanism cannot be applied to an object of that type and
2436 also for the name. */
2439 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2441 tree record_type
= make_node (RECORD_TYPE
);
2442 tree pointer32_type
, pointer64_type
;
2443 tree field_list
= NULL_TREE
;
2444 int klass
, ndim
, i
, dtype
= 0;
2445 tree inner_type
, tem
;
2448 /* If TYPE is an unconstrained array, use the underlying array type. */
2449 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2450 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2452 /* If this is an array, compute the number of dimensions in the array,
2453 get the index types, and point to the inner type. */
2454 if (TREE_CODE (type
) != ARRAY_TYPE
)
2457 for (ndim
= 1, inner_type
= type
;
2458 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2459 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2460 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2463 idx_arr
= XALLOCAVEC (tree
, ndim
);
2465 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
2466 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2467 for (i
= ndim
- 1, inner_type
= type
;
2469 i
--, inner_type
= TREE_TYPE (inner_type
))
2470 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2472 for (i
= 0, inner_type
= type
;
2474 i
++, inner_type
= TREE_TYPE (inner_type
))
2475 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2477 /* Now get the DTYPE value. */
2478 switch (TREE_CODE (type
))
2483 if (TYPE_VAX_FLOATING_POINT_P (type
))
2484 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2497 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2500 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2503 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2506 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2509 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2512 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2518 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2522 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2523 && TYPE_VAX_FLOATING_POINT_P (type
))
2524 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2536 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2547 /* Get the CLASS value. */
2550 case By_Descriptor_A
:
2551 case By_Short_Descriptor_A
:
2554 case By_Descriptor_NCA
:
2555 case By_Short_Descriptor_NCA
:
2558 case By_Descriptor_SB
:
2559 case By_Short_Descriptor_SB
:
2563 case By_Short_Descriptor
:
2564 case By_Descriptor_S
:
2565 case By_Short_Descriptor_S
:
2571 /* Make the type for a descriptor for VMS. The first four fields are the
2572 same for all types. */
2574 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2575 size_in_bytes ((mech
== By_Descriptor_A
2576 || mech
== By_Short_Descriptor_A
)
2577 ? inner_type
: type
),
2580 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
2581 size_int (dtype
), field_list
);
2583 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
2584 size_int (klass
), field_list
);
2586 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
2587 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
2589 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2590 that we cannot build a template call to the CE routine as it would get a
2591 wrong source location; instead we use a second placeholder for it. */
2592 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
2593 build0 (PLACEHOLDER_EXPR
, type
));
2594 tem
= build3 (COND_EXPR
, pointer32_type
,
2596 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
2597 build_int_cstu (pointer64_type
, 0x80000000))
2598 : boolean_false_node
,
2599 build0 (PLACEHOLDER_EXPR
, void_type_node
),
2600 convert (pointer32_type
, tem
));
2603 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
2609 case By_Short_Descriptor
:
2610 case By_Descriptor_S
:
2611 case By_Short_Descriptor_S
:
2614 case By_Descriptor_SB
:
2615 case By_Short_Descriptor_SB
:
2617 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2619 (TREE_CODE (type
) == ARRAY_TYPE
2620 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
2624 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2626 (TREE_CODE (type
) == ARRAY_TYPE
2627 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
2632 case By_Descriptor_A
:
2633 case By_Short_Descriptor_A
:
2634 case By_Descriptor_NCA
:
2635 case By_Short_Descriptor_NCA
:
2637 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2638 record_type
, size_zero_node
, field_list
);
2641 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2642 record_type
, size_zero_node
, field_list
);
2645 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2647 size_int ((mech
== By_Descriptor_NCA
2648 || mech
== By_Short_Descriptor_NCA
)
2650 /* Set FL_COLUMN, FL_COEFF, and
2652 : (TREE_CODE (type
) == ARRAY_TYPE
2653 && TYPE_CONVENTION_FORTRAN_P
2659 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2660 record_type
, size_int (ndim
), field_list
);
2663 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2664 record_type
, size_in_bytes (type
),
2667 /* Now build a pointer to the 0,0,0... element. */
2668 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2669 for (i
= 0, inner_type
= type
; i
< ndim
;
2670 i
++, inner_type
= TREE_TYPE (inner_type
))
2671 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2672 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2673 NULL_TREE
, NULL_TREE
);
2676 = make_descriptor_field ("A0", pointer32_type
, record_type
,
2677 build1 (ADDR_EXPR
, pointer32_type
, tem
),
2680 /* Next come the addressing coefficients. */
2681 tem
= size_one_node
;
2682 for (i
= 0; i
< ndim
; i
++)
2686 = size_binop (MULT_EXPR
, tem
,
2687 size_binop (PLUS_EXPR
,
2688 size_binop (MINUS_EXPR
,
2689 TYPE_MAX_VALUE (idx_arr
[i
]),
2690 TYPE_MIN_VALUE (idx_arr
[i
])),
2693 fname
[0] = ((mech
== By_Descriptor_NCA
||
2694 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
2695 fname
[1] = '0' + i
, fname
[2] = 0;
2697 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2698 record_type
, idx_length
, field_list
);
2700 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
2704 /* Finally here are the bounds. */
2705 for (i
= 0; i
< ndim
; i
++)
2709 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2711 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2712 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
2717 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2718 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
2724 post_error ("unsupported descriptor type for &", gnat_entity
);
2727 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
2728 finish_record_type (record_type
, nreverse (field_list
), 0, false);
2732 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2733 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2734 type contains in its DECL_INITIAL the expression to use when a constructor
2735 is made for the type. GNAT_ENTITY is an entity used to print out an error
2736 message if the mechanism cannot be applied to an object of that type and
2737 also for the name. */
2740 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2742 tree record_type
= make_node (RECORD_TYPE
);
2743 tree pointer64_type
;
2744 tree field_list
= NULL_TREE
;
2745 int klass
, ndim
, i
, dtype
= 0;
2746 tree inner_type
, tem
;
2749 /* If TYPE is an unconstrained array, use the underlying array type. */
2750 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2751 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2753 /* If this is an array, compute the number of dimensions in the array,
2754 get the index types, and point to the inner type. */
2755 if (TREE_CODE (type
) != ARRAY_TYPE
)
2758 for (ndim
= 1, inner_type
= type
;
2759 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2760 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2761 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2764 idx_arr
= XALLOCAVEC (tree
, ndim
);
2766 if (mech
!= By_Descriptor_NCA
2767 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2768 for (i
= ndim
- 1, inner_type
= type
;
2770 i
--, inner_type
= TREE_TYPE (inner_type
))
2771 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2773 for (i
= 0, inner_type
= type
;
2775 i
++, inner_type
= TREE_TYPE (inner_type
))
2776 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2778 /* Now get the DTYPE value. */
2779 switch (TREE_CODE (type
))
2784 if (TYPE_VAX_FLOATING_POINT_P (type
))
2785 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2798 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2801 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2804 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2807 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2810 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2813 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2819 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2823 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2824 && TYPE_VAX_FLOATING_POINT_P (type
))
2825 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2837 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2848 /* Get the CLASS value. */
2851 case By_Descriptor_A
:
2854 case By_Descriptor_NCA
:
2857 case By_Descriptor_SB
:
2861 case By_Descriptor_S
:
2867 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2868 are the same for all types. */
2870 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2871 record_type
, size_int (1), field_list
);
2873 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2874 record_type
, size_int (dtype
), field_list
);
2876 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2877 record_type
, size_int (klass
), field_list
);
2879 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2880 record_type
, ssize_int (-1), field_list
);
2882 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2884 size_in_bytes (mech
== By_Descriptor_A
2885 ? inner_type
: type
),
2888 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
2891 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
2892 build_unary_op (ADDR_EXPR
, pointer64_type
,
2893 build0 (PLACEHOLDER_EXPR
, type
)),
2899 case By_Descriptor_S
:
2902 case By_Descriptor_SB
:
2904 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2906 (TREE_CODE (type
) == ARRAY_TYPE
2907 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
2911 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2913 (TREE_CODE (type
) == ARRAY_TYPE
2914 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
2919 case By_Descriptor_A
:
2920 case By_Descriptor_NCA
:
2922 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2923 record_type
, size_zero_node
, field_list
);
2926 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2927 record_type
, size_zero_node
, field_list
);
2929 dtype
= (mech
== By_Descriptor_NCA
2931 /* Set FL_COLUMN, FL_COEFF, and
2933 : (TREE_CODE (type
) == ARRAY_TYPE
2934 && TYPE_CONVENTION_FORTRAN_P (type
)
2937 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2938 record_type
, size_int (dtype
),
2942 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2943 record_type
, size_int (ndim
), field_list
);
2946 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2947 record_type
, size_int (0), field_list
);
2949 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2950 record_type
, size_in_bytes (type
),
2953 /* Now build a pointer to the 0,0,0... element. */
2954 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2955 for (i
= 0, inner_type
= type
; i
< ndim
;
2956 i
++, inner_type
= TREE_TYPE (inner_type
))
2957 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2958 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2959 NULL_TREE
, NULL_TREE
);
2962 = make_descriptor_field ("A0", pointer64_type
, record_type
,
2963 build1 (ADDR_EXPR
, pointer64_type
, tem
),
2966 /* Next come the addressing coefficients. */
2967 tem
= size_one_node
;
2968 for (i
= 0; i
< ndim
; i
++)
2972 = size_binop (MULT_EXPR
, tem
,
2973 size_binop (PLUS_EXPR
,
2974 size_binop (MINUS_EXPR
,
2975 TYPE_MAX_VALUE (idx_arr
[i
]),
2976 TYPE_MIN_VALUE (idx_arr
[i
])),
2979 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2980 fname
[1] = '0' + i
, fname
[2] = 0;
2982 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2983 record_type
, idx_length
, field_list
);
2985 if (mech
== By_Descriptor_NCA
)
2989 /* Finally here are the bounds. */
2990 for (i
= 0; i
< ndim
; i
++)
2994 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2996 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2998 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
3002 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3004 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
3009 post_error ("unsupported descriptor type for &", gnat_entity
);
3012 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
3013 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3017 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3018 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3021 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3023 VEC(constructor_elt
,gc
) *v
= NULL
;
3026 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3027 gnu_expr
= gnat_protect_expr (gnu_expr
);
3028 gnat_mark_addressable (gnu_expr
);
3030 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3031 routine in case we have a 32-bit descriptor. */
3032 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3033 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3034 N_Raise_Constraint_Error
),
3037 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3040 = convert (TREE_TYPE (field
),
3041 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3043 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3046 return gnat_build_constructor (gnu_type
, v
);
3049 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3050 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3051 which the VMS descriptor is passed. */
3054 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3056 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3057 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3058 /* The CLASS field is the 3rd field in the descriptor. */
3059 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3060 /* The POINTER field is the 6th field in the descriptor. */
3061 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3063 /* Retrieve the value of the POINTER field. */
3065 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3067 if (POINTER_TYPE_P (gnu_type
))
3068 return convert (gnu_type
, gnu_expr64
);
3070 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3072 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3073 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3074 tree template_type
= TREE_TYPE (p_bounds_type
);
3075 tree min_field
= TYPE_FIELDS (template_type
);
3076 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3077 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3078 /* See the head comment of build_vms_descriptor. */
3079 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3080 tree lfield
, ufield
;
3081 VEC(constructor_elt
,gc
) *v
;
3083 /* Convert POINTER to the pointer-to-array type. */
3084 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3088 case 1: /* Class S */
3089 case 15: /* Class SB */
3090 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3091 v
= VEC_alloc (constructor_elt
, gc
, 2);
3092 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3093 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3094 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3095 convert (TREE_TYPE (min_field
),
3097 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3098 convert (TREE_TYPE (max_field
), t
));
3099 template_tree
= gnat_build_constructor (template_type
, v
);
3100 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3102 /* For class S, we are done. */
3106 /* Test that we really have a SB descriptor, like DEC Ada. */
3107 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3108 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3109 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3110 /* If so, there is already a template in the descriptor and
3111 it is located right after the POINTER field. The fields are
3112 64bits so they must be repacked. */
3113 t
= DECL_CHAIN (pointer
);
3114 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3115 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3118 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3120 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3122 /* Build the template in the form of a constructor. */
3123 v
= VEC_alloc (constructor_elt
, gc
, 2);
3124 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3125 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3127 template_tree
= gnat_build_constructor (template_type
, v
);
3129 /* Otherwise use the {1, LENGTH} template we build above. */
3130 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3131 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3136 case 4: /* Class A */
3137 /* The AFLAGS field is the 3rd field after the pointer in the
3139 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3140 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3141 /* The DIMCT field is the next field in the descriptor after
3144 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3145 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3146 or FL_COEFF or FL_BOUNDS not set. */
3147 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3148 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3149 build_binary_op (NE_EXPR
, boolean_type_node
,
3151 convert (TREE_TYPE (dimct
),
3153 build_binary_op (NE_EXPR
, boolean_type_node
,
3154 build2 (BIT_AND_EXPR
,
3158 /* There is already a template in the descriptor and it is located
3159 in block 3. The fields are 64bits so they must be repacked. */
3160 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3162 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3163 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3166 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3168 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3170 /* Build the template in the form of a constructor. */
3171 v
= VEC_alloc (constructor_elt
, gc
, 2);
3172 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3173 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3175 template_tree
= gnat_build_constructor (template_type
, v
);
3176 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3177 build_call_raise (CE_Length_Check_Failed
, Empty
,
3178 N_Raise_Constraint_Error
),
3181 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3184 case 10: /* Class NCA */
3186 post_error ("unsupported descriptor type for &", gnat_subprog
);
3187 template_addr
= integer_zero_node
;
3191 /* Build the fat pointer in the form of a constructor. */
3192 v
= VEC_alloc (constructor_elt
, gc
, 2);
3193 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3194 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3196 return gnat_build_constructor (gnu_type
, v
);
3203 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3204 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3205 which the VMS descriptor is passed. */
3208 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3210 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3211 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3212 /* The CLASS field is the 3rd field in the descriptor. */
3213 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3214 /* The POINTER field is the 4th field in the descriptor. */
3215 tree pointer
= DECL_CHAIN (klass
);
3217 /* Retrieve the value of the POINTER field. */
3219 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3221 if (POINTER_TYPE_P (gnu_type
))
3222 return convert (gnu_type
, gnu_expr32
);
3224 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3226 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3227 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3228 tree template_type
= TREE_TYPE (p_bounds_type
);
3229 tree min_field
= TYPE_FIELDS (template_type
);
3230 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3231 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3232 /* See the head comment of build_vms_descriptor. */
3233 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3234 VEC(constructor_elt
,gc
) *v
;
3236 /* Convert POINTER to the pointer-to-array type. */
3237 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
3241 case 1: /* Class S */
3242 case 15: /* Class SB */
3243 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3244 v
= VEC_alloc (constructor_elt
, gc
, 2);
3245 t
= TYPE_FIELDS (desc_type
);
3246 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3247 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3248 convert (TREE_TYPE (min_field
),
3250 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3251 convert (TREE_TYPE (max_field
), t
));
3252 template_tree
= gnat_build_constructor (template_type
, v
);
3253 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3255 /* For class S, we are done. */
3259 /* Test that we really have a SB descriptor, like DEC Ada. */
3260 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3261 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3262 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3263 /* If so, there is already a template in the descriptor and
3264 it is located right after the POINTER field. */
3265 t
= DECL_CHAIN (pointer
);
3267 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3268 /* Otherwise use the {1, LENGTH} template we build above. */
3269 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3270 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3275 case 4: /* Class A */
3276 /* The AFLAGS field is the 7th field in the descriptor. */
3277 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3278 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3279 /* The DIMCT field is the 8th field in the descriptor. */
3281 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3282 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3283 or FL_COEFF or FL_BOUNDS not set. */
3284 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3285 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3286 build_binary_op (NE_EXPR
, boolean_type_node
,
3288 convert (TREE_TYPE (dimct
),
3290 build_binary_op (NE_EXPR
, boolean_type_node
,
3291 build2 (BIT_AND_EXPR
,
3295 /* There is already a template in the descriptor and it is
3296 located at the start of block 3 (12th field). */
3297 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
3299 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3300 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
3301 build_call_raise (CE_Length_Check_Failed
, Empty
,
3302 N_Raise_Constraint_Error
),
3305 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3308 case 10: /* Class NCA */
3310 post_error ("unsupported descriptor type for &", gnat_subprog
);
3311 template_addr
= integer_zero_node
;
3315 /* Build the fat pointer in the form of a constructor. */
3316 v
= VEC_alloc (constructor_elt
, gc
, 2);
3317 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
3318 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3321 return gnat_build_constructor (gnu_type
, v
);
3328 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3329 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3330 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3331 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3335 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
3336 bool by_ref
, Entity_Id gnat_subprog
)
3338 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3339 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3340 tree mbo
= TYPE_FIELDS (desc_type
);
3341 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
3342 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
3343 tree real_type
, is64bit
, gnu_expr32
, gnu_expr64
;
3346 real_type
= TREE_TYPE (gnu_type
);
3348 real_type
= gnu_type
;
3350 /* If the field name is not MBO, it must be 32-bit and no alternate.
3351 Otherwise primary must be 64-bit and alternate 32-bit. */
3352 if (strcmp (mbostr
, "MBO") != 0)
3354 tree ret
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
3356 ret
= build_unary_op (ADDR_EXPR
, gnu_type
, ret
);
3360 /* Build the test for 64-bit descriptor. */
3361 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
3362 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
3364 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
3365 build_binary_op (EQ_EXPR
, boolean_type_node
,
3366 convert (integer_type_node
, mbo
),
3368 build_binary_op (EQ_EXPR
, boolean_type_node
,
3369 convert (integer_type_node
, mbmo
),
3370 integer_minus_one_node
));
3372 /* Build the 2 possible end results. */
3373 gnu_expr64
= convert_vms_descriptor64 (real_type
, gnu_expr
, gnat_subprog
);
3375 gnu_expr64
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr64
);
3376 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
3377 gnu_expr32
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
3379 gnu_expr32
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr32
);
3381 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
3384 /* Build a type to be used to represent an aliased object whose nominal type
3385 is an unconstrained array. This consists of a RECORD_TYPE containing a
3386 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3387 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3388 an arbitrary unconstrained object. Use NAME as the name of the record.
3389 DEBUG_INFO_P is true if we need to write debug information for the type. */
3392 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
3395 tree type
= make_node (RECORD_TYPE
);
3397 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
3398 NULL_TREE
, NULL_TREE
, 0, 1);
3400 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
3401 NULL_TREE
, NULL_TREE
, 0, 1);
3403 TYPE_NAME (type
) = name
;
3404 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
3405 DECL_CHAIN (template_field
) = array_field
;
3406 finish_record_type (type
, template_field
, 0, true);
3408 /* Declare it now since it will never be declared otherwise. This is
3409 necessary to ensure that its subtrees are properly marked. */
3410 create_type_decl (name
, type
, NULL
, true, debug_info_p
, Empty
);
3415 /* Same, taking a thin or fat pointer type instead of a template type. */
3418 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
3419 tree name
, bool debug_info_p
)
3423 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
3426 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
3427 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
3428 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
3431 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
3434 /* Shift the component offsets within an unconstrained object TYPE to make it
3435 suitable for use as a designated type for thin pointers. */
3438 shift_unc_components_for_thin_pointers (tree type
)
3440 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3441 allocated past the BOUNDS template. The designated type is adjusted to
3442 have ARRAY at position zero and the template at a negative offset, so
3443 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3445 tree bounds_field
= TYPE_FIELDS (type
);
3446 tree array_field
= DECL_CHAIN (TYPE_FIELDS (type
));
3448 DECL_FIELD_OFFSET (bounds_field
)
3449 = size_binop (MINUS_EXPR
, size_zero_node
, byte_position (array_field
));
3451 DECL_FIELD_OFFSET (array_field
) = size_zero_node
;
3452 DECL_FIELD_BIT_OFFSET (array_field
) = bitsize_zero_node
;
3455 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3456 In the normal case this is just two adjustments, but we have more to
3457 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3460 update_pointer_to (tree old_type
, tree new_type
)
3462 tree ptr
= TYPE_POINTER_TO (old_type
);
3463 tree ref
= TYPE_REFERENCE_TO (old_type
);
3466 /* If this is the main variant, process all the other variants first. */
3467 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
3468 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
3469 update_pointer_to (t
, new_type
);
3471 /* If no pointers and no references, we are done. */
3475 /* Merge the old type qualifiers in the new type.
3477 Each old variant has qualifiers for specific reasons, and the new
3478 designated type as well. Each set of qualifiers represents useful
3479 information grabbed at some point, and merging the two simply unifies
3480 these inputs into the final type description.
3482 Consider for instance a volatile type frozen after an access to constant
3483 type designating it; after the designated type's freeze, we get here with
3484 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3485 when the access type was processed. We will make a volatile and readonly
3486 designated type, because that's what it really is.
3488 We might also get here for a non-dummy OLD_TYPE variant with different
3489 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3490 to private record type elaboration (see the comments around the call to
3491 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3492 the qualifiers in those cases too, to avoid accidentally discarding the
3493 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3495 = build_qualified_type (new_type
,
3496 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
3498 /* If old type and new type are identical, there is nothing to do. */
3499 if (old_type
== new_type
)
3502 /* Otherwise, first handle the simple case. */
3503 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
3505 tree new_ptr
, new_ref
;
3507 /* If pointer or reference already points to new type, nothing to do.
3508 This can happen as update_pointer_to can be invoked multiple times
3509 on the same couple of types because of the type variants. */
3510 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
3511 || (ref
&& TREE_TYPE (ref
) == new_type
))
3514 /* Chain PTR and its variants at the end. */
3515 new_ptr
= TYPE_POINTER_TO (new_type
);
3518 while (TYPE_NEXT_PTR_TO (new_ptr
))
3519 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
3520 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
3523 TYPE_POINTER_TO (new_type
) = ptr
;
3525 /* Now adjust them. */
3526 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
3527 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
3529 TREE_TYPE (t
) = new_type
;
3530 if (TYPE_NULL_BOUNDS (t
))
3531 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
3534 /* If we have adjusted named types, finalize them. This is necessary
3535 since we had forced a DWARF typedef for them in gnat_pushdecl. */
3536 for (ptr
= TYPE_POINTER_TO (old_type
); ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
3537 if (TYPE_NAME (ptr
) && TREE_CODE (TYPE_NAME (ptr
)) == TYPE_DECL
)
3538 rest_of_type_decl_compilation (TYPE_NAME (ptr
));
3540 /* Chain REF and its variants at the end. */
3541 new_ref
= TYPE_REFERENCE_TO (new_type
);
3544 while (TYPE_NEXT_REF_TO (new_ref
))
3545 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
3546 TYPE_NEXT_REF_TO (new_ref
) = ref
;
3549 TYPE_REFERENCE_TO (new_type
) = ref
;
3551 /* Now adjust them. */
3552 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
3553 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
3554 TREE_TYPE (t
) = new_type
;
3556 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
3557 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
3560 /* Now deal with the unconstrained array case. In this case the pointer
3561 is actually a record where both fields are pointers to dummy nodes.
3562 Turn them into pointers to the correct types using update_pointer_to.
3563 Likewise for the pointer to the object record (thin pointer). */
3566 tree new_ptr
= TYPE_POINTER_TO (new_type
);
3568 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
3570 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3571 since update_pointer_to can be invoked multiple times on the same
3572 couple of types because of the type variants. */
3573 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
3577 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
3578 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
3581 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
3582 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
3584 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
3585 TYPE_OBJECT_RECORD_TYPE (new_type
));
3587 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
3591 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3592 unconstrained one. This involves making or finding a template. */
3595 convert_to_fat_pointer (tree type
, tree expr
)
3597 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
3598 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
3599 tree etype
= TREE_TYPE (expr
);
3601 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
3603 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3604 array (compare_fat_pointers ensures that this is the full discriminant)
3605 and a valid pointer to the bounds. This latter property is necessary
3606 since the compiler can hoist the load of the bounds done through it. */
3607 if (integer_zerop (expr
))
3609 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
3610 tree null_bounds
, t
;
3612 if (TYPE_NULL_BOUNDS (ptr_template_type
))
3613 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
3616 /* The template type can still be dummy at this point so we build an
3617 empty constructor. The middle-end will fill it in with zeros. */
3618 t
= build_constructor (template_type
, NULL
);
3619 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
3620 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
3621 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
3624 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3625 fold_convert (p_array_type
, null_pointer_node
));
3626 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
3627 t
= build_constructor (type
, v
);
3628 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3629 TREE_CONSTANT (t
) = 0;
3630 TREE_STATIC (t
) = 1;
3635 /* If EXPR is a thin pointer, make template and data from the record.. */
3636 else if (TYPE_IS_THIN_POINTER_P (etype
))
3638 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
3640 expr
= gnat_protect_expr (expr
);
3641 if (TREE_CODE (expr
) == ADDR_EXPR
)
3642 expr
= TREE_OPERAND (expr
, 0);
3644 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
3646 template_tree
= build_component_ref (expr
, NULL_TREE
, fields
, false);
3647 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
3648 build_component_ref (expr
, NULL_TREE
,
3649 DECL_CHAIN (fields
), false));
3652 /* Otherwise, build the constructor for the template. */
3654 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
3656 /* The final result is a constructor for the fat pointer.
3658 If EXPR is an argument of a foreign convention subprogram, the type it
3659 points to is directly the component type. In this case, the expression
3660 type may not match the corresponding FIELD_DECL type at this point, so we
3661 call "convert" here to fix that up if necessary. This type consistency is
3662 required, for instance because it ensures that possible later folding of
3663 COMPONENT_REFs against this constructor always yields something of the
3664 same type as the initial reference.
3666 Note that the call to "build_template" above is still fine because it
3667 will only refer to the provided TEMPLATE_TYPE in this case. */
3668 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3669 convert (p_array_type
, expr
));
3670 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
3671 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3673 return gnat_build_constructor (type
, v
);
3676 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3677 is something that is a fat pointer, so convert to it first if it EXPR
3678 is not already a fat pointer. */
3681 convert_to_thin_pointer (tree type
, tree expr
)
3683 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr
)))
3685 = convert_to_fat_pointer
3686 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
3688 /* We get the pointer to the data and use a NOP_EXPR to make it the
3690 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)),
3692 expr
= build1 (NOP_EXPR
, type
, expr
);
3697 /* Create an expression whose value is that of EXPR,
3698 converted to type TYPE. The TREE_TYPE of the value
3699 is always TYPE. This function implements all reasonable
3700 conversions; callers should filter out those that are
3701 not permitted by the language being compiled. */
3704 convert (tree type
, tree expr
)
3706 tree etype
= TREE_TYPE (expr
);
3707 enum tree_code ecode
= TREE_CODE (etype
);
3708 enum tree_code code
= TREE_CODE (type
);
3710 /* If the expression is already of the right type, we are done. */
3714 /* If both input and output have padding and are of variable size, do this
3715 as an unchecked conversion. Likewise if one is a mere variant of the
3716 other, so we avoid a pointless unpad/repad sequence. */
3717 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
3718 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
3719 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3720 || !TREE_CONSTANT (TYPE_SIZE (etype
))
3721 || gnat_types_compatible_p (type
, etype
)
3722 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
3723 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
3726 /* If the output type has padding, convert to the inner type and make a
3727 constructor to build the record, unless a variable size is involved. */
3728 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
3730 VEC(constructor_elt
,gc
) *v
;
3732 /* If we previously converted from another type and our type is
3733 of variable size, remove the conversion to avoid the need for
3734 variable-sized temporaries. Likewise for a conversion between
3735 original and packable version. */
3736 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3737 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3738 || (ecode
== RECORD_TYPE
3739 && TYPE_NAME (etype
)
3740 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
3741 expr
= TREE_OPERAND (expr
, 0);
3743 /* If we are just removing the padding from expr, convert the original
3744 object if we have variable size in order to avoid the need for some
3745 variable-sized temporaries. Likewise if the padding is a variant
3746 of the other, so we avoid a pointless unpad/repad sequence. */
3747 if (TREE_CODE (expr
) == COMPONENT_REF
3748 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
3749 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3750 || gnat_types_compatible_p (type
,
3751 TREE_TYPE (TREE_OPERAND (expr
, 0)))
3752 || (ecode
== RECORD_TYPE
3753 && TYPE_NAME (etype
)
3754 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
3755 return convert (type
, TREE_OPERAND (expr
, 0));
3757 /* If the inner type is of self-referential size and the expression type
3758 is a record, do this as an unchecked conversion. But first pad the
3759 expression if possible to have the same size on both sides. */
3760 if (ecode
== RECORD_TYPE
3761 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
3763 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
3764 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
3765 false, false, false, true),
3767 return unchecked_convert (type
, expr
, false);
3770 /* If we are converting between array types with variable size, do the
3771 final conversion as an unchecked conversion, again to avoid the need
3772 for some variable-sized temporaries. If valid, this conversion is
3773 very likely purely technical and without real effects. */
3774 if (ecode
== ARRAY_TYPE
3775 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
3776 && !TREE_CONSTANT (TYPE_SIZE (etype
))
3777 && !TREE_CONSTANT (TYPE_SIZE (type
)))
3778 return unchecked_convert (type
,
3779 convert (TREE_TYPE (TYPE_FIELDS (type
)),
3783 v
= VEC_alloc (constructor_elt
, gc
, 1);
3784 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3785 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
3786 return gnat_build_constructor (type
, v
);
3789 /* If the input type has padding, remove it and convert to the output type.
3790 The conditions ordering is arranged to ensure that the output type is not
3791 a padding type here, as it is not clear whether the conversion would
3792 always be correct if this was to happen. */
3793 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
3797 /* If we have just converted to this padded type, just get the
3798 inner expression. */
3799 if (TREE_CODE (expr
) == CONSTRUCTOR
3800 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
3801 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
3802 == TYPE_FIELDS (etype
))
3804 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
3806 /* Otherwise, build an explicit component reference. */
3809 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
3811 return convert (type
, unpadded
);
3814 /* If the input is a biased type, adjust first. */
3815 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
3816 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
3817 fold_convert (TREE_TYPE (etype
),
3819 TYPE_MIN_VALUE (etype
)));
3821 /* If the input is a justified modular type, we need to extract the actual
3822 object before converting it to any other type with the exceptions of an
3823 unconstrained array or of a mere type variant. It is useful to avoid the
3824 extraction and conversion in the type variant case because it could end
3825 up replacing a VAR_DECL expr by a constructor and we might be about the
3826 take the address of the result. */
3827 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
3828 && code
!= UNCONSTRAINED_ARRAY_TYPE
3829 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
3830 return convert (type
, build_component_ref (expr
, NULL_TREE
,
3831 TYPE_FIELDS (etype
), false));
3833 /* If converting to a type that contains a template, convert to the data
3834 type and then build the template. */
3835 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
3837 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
3838 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
3840 /* If the source already has a template, get a reference to the
3841 associated array only, as we are going to rebuild a template
3842 for the target type anyway. */
3843 expr
= maybe_unconstrained_array (expr
);
3845 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3846 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
3847 obj_type
, NULL_TREE
));
3848 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
3849 convert (obj_type
, expr
));
3850 return gnat_build_constructor (type
, v
);
3853 /* There are some cases of expressions that we process specially. */
3854 switch (TREE_CODE (expr
))
3860 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3861 conversion in gnat_expand_expr. NULL_EXPR does not represent
3862 and actual value, so no conversion is needed. */
3863 expr
= copy_node (expr
);
3864 TREE_TYPE (expr
) = type
;
3868 /* If we are converting a STRING_CST to another constrained array type,
3869 just make a new one in the proper type. */
3870 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
3871 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
3872 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
3874 expr
= copy_node (expr
);
3875 TREE_TYPE (expr
) = type
;
3881 /* If we are converting a VECTOR_CST to a mere variant type, just make
3882 a new one in the proper type. */
3883 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
3885 expr
= copy_node (expr
);
3886 TREE_TYPE (expr
) = type
;
3891 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3892 a new one in the proper type. */
3893 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
3895 expr
= copy_node (expr
);
3896 TREE_TYPE (expr
) = type
;
3900 /* Likewise for a conversion between original and packable version, or
3901 conversion between types of the same size and with the same list of
3902 fields, but we have to work harder to preserve type consistency. */
3904 && code
== RECORD_TYPE
3905 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
3906 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
3909 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
3910 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
3911 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, len
);
3912 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
3913 unsigned HOST_WIDE_INT idx
;
3916 /* Whether we need to clear TREE_CONSTANT et al. on the output
3917 constructor when we convert in place. */
3918 bool clear_constant
= false;
3920 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
3922 constructor_elt
*elt
;
3923 /* We expect only simple constructors. */
3924 if (!SAME_FIELD_P (index
, efield
))
3926 /* The field must be the same. */
3927 if (!SAME_FIELD_P (efield
, field
))
3929 elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
3931 elt
->value
= convert (TREE_TYPE (field
), value
);
3933 /* If packing has made this field a bitfield and the input
3934 value couldn't be emitted statically any more, we need to
3935 clear TREE_CONSTANT on our output. */
3937 && TREE_CONSTANT (expr
)
3938 && !CONSTRUCTOR_BITFIELD_P (efield
)
3939 && CONSTRUCTOR_BITFIELD_P (field
)
3940 && !initializer_constant_valid_for_bitfield_p (value
))
3941 clear_constant
= true;
3943 efield
= DECL_CHAIN (efield
);
3944 field
= DECL_CHAIN (field
);
3947 /* If we have been able to match and convert all the input fields
3948 to their output type, convert in place now. We'll fallback to a
3949 view conversion downstream otherwise. */
3952 expr
= copy_node (expr
);
3953 TREE_TYPE (expr
) = type
;
3954 CONSTRUCTOR_ELTS (expr
) = v
;
3956 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
3961 /* Likewise for a conversion between array type and vector type with a
3962 compatible representative array. */
3963 else if (code
== VECTOR_TYPE
3964 && ecode
== ARRAY_TYPE
3965 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
3968 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
3969 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
3970 VEC(constructor_elt
,gc
) *v
;
3971 unsigned HOST_WIDE_INT ix
;
3974 /* Build a VECTOR_CST from a *constant* array constructor. */
3975 if (TREE_CONSTANT (expr
))
3977 bool constant_p
= true;
3979 /* Iterate through elements and check if all constructor
3980 elements are *_CSTs. */
3981 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
3982 if (!CONSTANT_CLASS_P (value
))
3989 return build_vector_from_ctor (type
,
3990 CONSTRUCTOR_ELTS (expr
));
3993 /* Otherwise, build a regular vector constructor. */
3994 v
= VEC_alloc (constructor_elt
, gc
, len
);
3995 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
3997 constructor_elt
*elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
3998 elt
->index
= NULL_TREE
;
4001 expr
= copy_node (expr
);
4002 TREE_TYPE (expr
) = type
;
4003 CONSTRUCTOR_ELTS (expr
) = v
;
4008 case UNCONSTRAINED_ARRAY_REF
:
4009 /* First retrieve the underlying array. */
4010 expr
= maybe_unconstrained_array (expr
);
4011 etype
= TREE_TYPE (expr
);
4012 ecode
= TREE_CODE (etype
);
4015 case VIEW_CONVERT_EXPR
:
4017 /* GCC 4.x is very sensitive to type consistency overall, and view
4018 conversions thus are very frequent. Even though just "convert"ing
4019 the inner operand to the output type is fine in most cases, it
4020 might expose unexpected input/output type mismatches in special
4021 circumstances so we avoid such recursive calls when we can. */
4022 tree op0
= TREE_OPERAND (expr
, 0);
4024 /* If we are converting back to the original type, we can just
4025 lift the input conversion. This is a common occurrence with
4026 switches back-and-forth amongst type variants. */
4027 if (type
== TREE_TYPE (op0
))
4030 /* Otherwise, if we're converting between two aggregate or vector
4031 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4032 target type in place or to just convert the inner expression. */
4033 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4034 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4036 /* If we are converting between mere variants, we can just
4037 substitute the VIEW_CONVERT_EXPR in place. */
4038 if (gnat_types_compatible_p (type
, etype
))
4039 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4041 /* Otherwise, we may just bypass the input view conversion unless
4042 one of the types is a fat pointer, which is handled by
4043 specialized code below which relies on exact type matching. */
4044 else if (!TYPE_IS_FAT_POINTER_P (type
)
4045 && !TYPE_IS_FAT_POINTER_P (etype
))
4046 return convert (type
, op0
);
4056 /* Check for converting to a pointer to an unconstrained array. */
4057 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4058 return convert_to_fat_pointer (type
, expr
);
4060 /* If we are converting between two aggregate or vector types that are mere
4061 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4062 to a vector type from its representative array type. */
4063 else if ((code
== ecode
4064 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4065 && gnat_types_compatible_p (type
, etype
))
4066 || (code
== VECTOR_TYPE
4067 && ecode
== ARRAY_TYPE
4068 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4070 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4072 /* If we are converting between tagged types, try to upcast properly. */
4073 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4074 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4076 tree child_etype
= etype
;
4078 tree field
= TYPE_FIELDS (child_etype
);
4079 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4080 return build_component_ref (expr
, NULL_TREE
, field
, false);
4081 child_etype
= TREE_TYPE (field
);
4082 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4085 /* If we are converting from a smaller form of record type back to it, just
4086 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4087 size on both sides. */
4088 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4089 && smaller_form_type_p (etype
, type
))
4091 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4092 false, false, false, true),
4094 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4097 /* In all other cases of related types, make a NOP_EXPR. */
4098 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4099 return fold_convert (type
, expr
);
4104 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4107 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4108 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4109 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4110 return unchecked_convert (type
, expr
, false);
4111 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4112 return fold_convert (type
,
4113 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4114 convert (TREE_TYPE (type
), expr
),
4115 TYPE_MIN_VALUE (type
)));
4117 /* ... fall through ... */
4121 /* If we are converting an additive expression to an integer type
4122 with lower precision, be wary of the optimization that can be
4123 applied by convert_to_integer. There are 2 problematic cases:
4124 - if the first operand was originally of a biased type,
4125 because we could be recursively called to convert it
4126 to an intermediate type and thus rematerialize the
4127 additive operator endlessly,
4128 - if the expression contains a placeholder, because an
4129 intermediate conversion that changes the sign could
4130 be inserted and thus introduce an artificial overflow
4131 at compile time when the placeholder is substituted. */
4132 if (code
== INTEGER_TYPE
4133 && ecode
== INTEGER_TYPE
4134 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4135 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4137 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4139 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4140 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4141 || CONTAINS_PLACEHOLDER_P (expr
))
4142 return build1 (NOP_EXPR
, type
, expr
);
4145 return fold (convert_to_integer (type
, expr
));
4148 case REFERENCE_TYPE
:
4149 /* If converting between two pointers to records denoting
4150 both a template and type, adjust if needed to account
4151 for any differing offsets, since one might be negative. */
4152 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4155 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
4156 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
4158 = size_binop (CEIL_DIV_EXPR
, bit_diff
, sbitsize_unit_node
);
4159 expr
= build1 (NOP_EXPR
, type
, expr
);
4160 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
4161 if (integer_zerop (byte_diff
))
4164 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4165 fold (convert (sizetype
, byte_diff
)));
4168 /* If converting to a thin pointer, handle specially. */
4169 if (TYPE_IS_THIN_POINTER_P (type
)
4170 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)))
4171 return convert_to_thin_pointer (type
, expr
);
4173 /* If converting fat pointer to normal pointer, get the pointer to the
4174 array and then convert it. */
4175 else if (TYPE_IS_FAT_POINTER_P (etype
))
4177 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4179 return fold (convert_to_pointer (type
, expr
));
4182 return fold (convert_to_real (type
, expr
));
4185 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4187 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4189 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4190 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4192 return gnat_build_constructor (type
, v
);
4195 /* ... fall through ... */
4198 /* In these cases, assume the front-end has validated the conversion.
4199 If the conversion is valid, it will be a bit-wise conversion, so
4200 it can be viewed as an unchecked conversion. */
4201 return unchecked_convert (type
, expr
, false);
4204 /* This is a either a conversion between a tagged type and some
4205 subtype, which we have to mark as a UNION_TYPE because of
4206 overlapping fields or a conversion of an Unchecked_Union. */
4207 return unchecked_convert (type
, expr
, false);
4209 case UNCONSTRAINED_ARRAY_TYPE
:
4210 /* If the input is a VECTOR_TYPE, convert to the representative
4211 array type first. */
4212 if (ecode
== VECTOR_TYPE
)
4214 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4215 etype
= TREE_TYPE (expr
);
4216 ecode
= TREE_CODE (etype
);
4219 /* If EXPR is a constrained array, take its address, convert it to a
4220 fat pointer, and then dereference it. Likewise if EXPR is a
4221 record containing both a template and a constrained array.
4222 Note that a record representing a justified modular type
4223 always represents a packed constrained array. */
4224 if (ecode
== ARRAY_TYPE
4225 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4226 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4227 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4230 (INDIRECT_REF
, NULL_TREE
,
4231 convert_to_fat_pointer (TREE_TYPE (type
),
4232 build_unary_op (ADDR_EXPR
,
4235 /* Do something very similar for converting one unconstrained
4236 array to another. */
4237 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4239 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4240 convert (TREE_TYPE (type
),
4241 build_unary_op (ADDR_EXPR
,
4247 return fold (convert_to_complex (type
, expr
));
4254 /* Create an expression whose value is that of EXPR converted to the common
4255 index type, which is sizetype. EXPR is supposed to be in the base type
4256 of the GNAT index type. Calling it is equivalent to doing
4258 convert (sizetype, expr)
4260 but we try to distribute the type conversion with the knowledge that EXPR
4261 cannot overflow in its type. This is a best-effort approach and we fall
4262 back to the above expression as soon as difficulties are encountered.
4264 This is necessary to overcome issues that arise when the GNAT base index
4265 type and the GCC common index type (sizetype) don't have the same size,
4266 which is quite frequent on 64-bit architectures. In this case, and if
4267 the GNAT base index type is signed but the iteration type of the loop has
4268 been forced to unsigned, the loop scalar evolution engine cannot compute
4269 a simple evolution for the general induction variables associated with the
4270 array indices, because it will preserve the wrap-around semantics in the
4271 unsigned type of their "inner" part. As a result, many loop optimizations
4274 The solution is to use a special (basic) induction variable that is at
4275 least as large as sizetype, and to express the aforementioned general
4276 induction variables in terms of this induction variable, eliminating
4277 the problematic intermediate truncation to the GNAT base index type.
4278 This is possible as long as the original expression doesn't overflow
4279 and if the middle-end hasn't introduced artificial overflows in the
4280 course of the various simplification it can make to the expression. */
4283 convert_to_index_type (tree expr
)
4285 enum tree_code code
= TREE_CODE (expr
);
4286 tree type
= TREE_TYPE (expr
);
4288 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4289 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4290 if (TYPE_UNSIGNED (type
) || !optimize
)
4291 return convert (sizetype
, expr
);
4296 /* The main effect of the function: replace a loop parameter with its
4297 associated special induction variable. */
4298 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
4299 expr
= DECL_INDUCTION_VAR (expr
);
4304 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4305 /* Bail out as soon as we suspect some sort of type frobbing. */
4306 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
4307 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
4311 /* ... fall through ... */
4313 case NON_LVALUE_EXPR
:
4314 return fold_build1 (code
, sizetype
,
4315 convert_to_index_type (TREE_OPERAND (expr
, 0)));
4320 return fold_build2 (code
, sizetype
,
4321 convert_to_index_type (TREE_OPERAND (expr
, 0)),
4322 convert_to_index_type (TREE_OPERAND (expr
, 1)));
4325 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
4326 convert_to_index_type (TREE_OPERAND (expr
, 1)));
4329 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
4330 convert_to_index_type (TREE_OPERAND (expr
, 1)),
4331 convert_to_index_type (TREE_OPERAND (expr
, 2)));
4337 return convert (sizetype
, expr
);
4340 /* Remove all conversions that are done in EXP. This includes converting
4341 from a padded type or to a justified modular type. If TRUE_ADDRESS
4342 is true, always return the address of the containing object even if
4343 the address is not bit-aligned. */
4346 remove_conversions (tree exp
, bool true_address
)
4348 switch (TREE_CODE (exp
))
4352 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
4353 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
4355 remove_conversions (VEC_index (constructor_elt
,
4356 CONSTRUCTOR_ELTS (exp
), 0)->value
,
4361 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4362 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4366 case VIEW_CONVERT_EXPR
:
4367 case NON_LVALUE_EXPR
:
4368 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4377 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4378 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4379 likewise return an expression pointing to the underlying array. */
4382 maybe_unconstrained_array (tree exp
)
4384 enum tree_code code
= TREE_CODE (exp
);
4385 tree type
= TREE_TYPE (exp
);
4387 switch (TREE_CODE (type
))
4389 case UNCONSTRAINED_ARRAY_TYPE
:
4390 if (code
== UNCONSTRAINED_ARRAY_REF
)
4392 const bool read_only
= TREE_READONLY (exp
);
4393 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
4395 exp
= TREE_OPERAND (exp
, 0);
4396 type
= TREE_TYPE (exp
);
4398 if (TREE_CODE (exp
) == COND_EXPR
)
4401 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
4402 build_component_ref (TREE_OPERAND (exp
, 1),
4407 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
4408 build_component_ref (TREE_OPERAND (exp
, 2),
4413 exp
= build3 (COND_EXPR
,
4414 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
4415 TREE_OPERAND (exp
, 0), op1
, op2
);
4419 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
4420 build_component_ref (exp
, NULL_TREE
,
4423 TREE_READONLY (exp
) = read_only
;
4424 TREE_THIS_NOTRAP (exp
) = no_trap
;
4428 else if (code
== NULL_EXPR
)
4429 exp
= build1 (NULL_EXPR
,
4430 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
4431 TREE_OPERAND (exp
, 0));
4435 /* If this is a padded type and it contains a template, convert to the
4436 unpadded type first. */
4437 if (TYPE_PADDING_P (type
)
4438 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
4439 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
4441 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
4442 type
= TREE_TYPE (exp
);
4445 if (TYPE_CONTAINS_TEMPLATE_P (type
))
4447 exp
= build_component_ref (exp
, NULL_TREE
,
4448 DECL_CHAIN (TYPE_FIELDS (type
)),
4450 type
= TREE_TYPE (exp
);
4452 /* If the array type is padded, convert to the unpadded type. */
4453 if (TYPE_IS_PADDING_P (type
))
4454 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
4465 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4466 TYPE_REPRESENTATIVE_ARRAY. */
4469 maybe_vector_array (tree exp
)
4471 tree etype
= TREE_TYPE (exp
);
4473 if (VECTOR_TYPE_P (etype
))
4474 exp
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), exp
);
4479 /* Return true if EXPR is an expression that can be folded as an operand
4480 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4483 can_fold_for_view_convert_p (tree expr
)
4487 /* The folder will fold NOP_EXPRs between integral types with the same
4488 precision (in the middle-end's sense). We cannot allow it if the
4489 types don't have the same precision in the Ada sense as well. */
4490 if (TREE_CODE (expr
) != NOP_EXPR
)
4493 t1
= TREE_TYPE (expr
);
4494 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4496 /* Defer to the folder for non-integral conversions. */
4497 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
4500 /* Only fold conversions that preserve both precisions. */
4501 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
4502 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
4508 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4509 If NOTRUNC_P is true, truncation operations should be suppressed.
4511 Special care is required with (source or target) integral types whose
4512 precision is not equal to their size, to make sure we fetch or assign
4513 the value bits whose location might depend on the endianness, e.g.
4515 Rmsize : constant := 8;
4516 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4518 type Bit_Array is array (1 .. Rmsize) of Boolean;
4519 pragma Pack (Bit_Array);
4521 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4523 Value : Int := 2#1000_0001#;
4524 Vbits : Bit_Array := To_Bit_Array (Value);
4526 we expect the 8 bits at Vbits'Address to always contain Value, while
4527 their original location depends on the endianness, at Value'Address
4528 on a little-endian architecture but not on a big-endian one. */
4531 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
4533 tree etype
= TREE_TYPE (expr
);
4534 enum tree_code ecode
= TREE_CODE (etype
);
4535 enum tree_code code
= TREE_CODE (type
);
4538 /* If the expression is already of the right type, we are done. */
4542 /* If both types types are integral just do a normal conversion.
4543 Likewise for a conversion to an unconstrained array. */
4544 if ((((INTEGRAL_TYPE_P (type
)
4545 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
4546 || (POINTER_TYPE_P (type
) && ! TYPE_IS_THIN_POINTER_P (type
))
4547 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
4548 && ((INTEGRAL_TYPE_P (etype
)
4549 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
4550 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
4551 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
4552 || code
== UNCONSTRAINED_ARRAY_TYPE
)
4554 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4556 tree ntype
= copy_type (etype
);
4557 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
4558 TYPE_MAIN_VARIANT (ntype
) = ntype
;
4559 expr
= build1 (NOP_EXPR
, ntype
, expr
);
4562 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
4564 tree rtype
= copy_type (type
);
4565 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
4566 TYPE_MAIN_VARIANT (rtype
) = rtype
;
4567 expr
= convert (rtype
, expr
);
4568 expr
= build1 (NOP_EXPR
, type
, expr
);
4571 expr
= convert (type
, expr
);
4574 /* If we are converting to an integral type whose precision is not equal
4575 to its size, first unchecked convert to a record type that contains an
4576 field of the given precision. Then extract the field. */
4577 else if (INTEGRAL_TYPE_P (type
)
4578 && TYPE_RM_SIZE (type
)
4579 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4580 GET_MODE_BITSIZE (TYPE_MODE (type
))))
4582 tree rec_type
= make_node (RECORD_TYPE
);
4583 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
4584 tree field_type
, field
;
4586 if (TYPE_UNSIGNED (type
))
4587 field_type
= make_unsigned_type (prec
);
4589 field_type
= make_signed_type (prec
);
4590 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
4592 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
4593 NULL_TREE
, NULL_TREE
, 1, 0);
4595 TYPE_FIELDS (rec_type
) = field
;
4596 layout_type (rec_type
);
4598 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
4599 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
4600 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
4603 /* Similarly if we are converting from an integral type whose precision is
4604 not equal to its size, first copy into a field of the given precision
4605 and unchecked convert the record type. */
4606 else if (INTEGRAL_TYPE_P (etype
)
4607 && TYPE_RM_SIZE (etype
)
4608 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
4609 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
4611 tree rec_type
= make_node (RECORD_TYPE
);
4612 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
4613 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4614 tree field_type
, field
;
4616 if (TYPE_UNSIGNED (etype
))
4617 field_type
= make_unsigned_type (prec
);
4619 field_type
= make_signed_type (prec
);
4620 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
4622 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
4623 NULL_TREE
, NULL_TREE
, 1, 0);
4625 TYPE_FIELDS (rec_type
) = field
;
4626 layout_type (rec_type
);
4628 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
4629 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
4630 expr
= gnat_build_constructor (rec_type
, v
);
4631 expr
= unchecked_convert (type
, expr
, notrunc_p
);
4634 /* If we are converting from a scalar type to a type with a different size,
4635 we need to pad to have the same size on both sides.
4637 ??? We cannot do it unconditionally because unchecked conversions are
4638 used liberally by the front-end to implement polymorphism, e.g. in:
4640 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4641 return p___size__4 (p__object!(S191s.all));
4643 so we skip all expressions that are references. */
4644 else if (!REFERENCE_CLASS_P (expr
)
4645 && !AGGREGATE_TYPE_P (etype
)
4646 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
4647 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
4651 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4652 false, false, false, true),
4654 expr
= unchecked_convert (type
, expr
, notrunc_p
);
4658 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
4659 false, false, false, true);
4660 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
4661 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
4666 /* We have a special case when we are converting between two unconstrained
4667 array types. In that case, take the address, convert the fat pointer
4668 types, and dereference. */
4669 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
4670 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
4671 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
4672 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4675 /* Another special case is when we are converting to a vector type from its
4676 representative array type; this a regular conversion. */
4677 else if (code
== VECTOR_TYPE
4678 && ecode
== ARRAY_TYPE
4679 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4681 expr
= convert (type
, expr
);
4685 expr
= maybe_unconstrained_array (expr
);
4686 etype
= TREE_TYPE (expr
);
4687 ecode
= TREE_CODE (etype
);
4688 if (can_fold_for_view_convert_p (expr
))
4689 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4691 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4694 /* If the result is an integral type whose precision is not equal to its
4695 size, sign- or zero-extend the result. We need not do this if the input
4696 is an integral type of the same precision and signedness or if the output
4697 is a biased type or if both the input and output are unsigned. */
4699 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
4700 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
4701 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4702 GET_MODE_BITSIZE (TYPE_MODE (type
)))
4703 && !(INTEGRAL_TYPE_P (etype
)
4704 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
4705 && operand_equal_p (TYPE_RM_SIZE (type
),
4706 (TYPE_RM_SIZE (etype
) != 0
4707 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
4709 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
4712 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
4714 = convert (base_type
,
4715 size_binop (MINUS_EXPR
,
4717 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
4718 TYPE_RM_SIZE (type
)));
4721 build_binary_op (RSHIFT_EXPR
, base_type
,
4722 build_binary_op (LSHIFT_EXPR
, base_type
,
4723 convert (base_type
, expr
),
4728 /* An unchecked conversion should never raise Constraint_Error. The code
4729 below assumes that GCC's conversion routines overflow the same way that
4730 the underlying hardware does. This is probably true. In the rare case
4731 when it is false, we can rely on the fact that such conversions are
4732 erroneous anyway. */
4733 if (TREE_CODE (expr
) == INTEGER_CST
)
4734 TREE_OVERFLOW (expr
) = 0;
4736 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4737 show no longer constant. */
4738 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4739 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
4741 TREE_CONSTANT (expr
) = 0;
4746 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4747 the latter being a record type as predicated by Is_Record_Type. */
4750 tree_code_for_record_type (Entity_Id gnat_type
)
4752 Node_Id component_list
, component
;
4754 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
4755 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
4756 if (!Is_Unchecked_Union (gnat_type
))
4759 gnat_type
= Implementation_Base_Type (gnat_type
);
4761 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
4763 for (component
= First_Non_Pragma (Component_Items (component_list
));
4764 Present (component
);
4765 component
= Next_Non_Pragma (component
))
4766 if (Ekind (Defining_Entity (component
)) == E_Component
)
4772 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4773 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4774 according to the presence of an alignment clause on the type or, if it
4775 is an array, on the component type. */
4778 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
4780 gnat_type
= Underlying_Type (gnat_type
);
4782 *align_clause
= Present (Alignment_Clause (gnat_type
));
4784 if (Is_Array_Type (gnat_type
))
4786 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
4787 if (Present (Alignment_Clause (gnat_type
)))
4788 *align_clause
= true;
4791 if (!Is_Floating_Point_Type (gnat_type
))
4794 if (UI_To_Int (Esize (gnat_type
)) != 64)
4800 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4801 size is greater or equal to 64 bits, or an array of such a type. Set
4802 ALIGN_CLAUSE according to the presence of an alignment clause on the
4803 type or, if it is an array, on the component type. */
4806 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
4808 gnat_type
= Underlying_Type (gnat_type
);
4810 *align_clause
= Present (Alignment_Clause (gnat_type
));
4812 if (Is_Array_Type (gnat_type
))
4814 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
4815 if (Present (Alignment_Clause (gnat_type
)))
4816 *align_clause
= true;
4819 if (!Is_Scalar_Type (gnat_type
))
4822 if (UI_To_Int (Esize (gnat_type
)) < 64)
4828 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4829 component of an aggregate type. */
4832 type_for_nonaliased_component_p (tree gnu_type
)
4834 /* If the type is passed by reference, we may have pointers to the
4835 component so it cannot be made non-aliased. */
4836 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
4839 /* We used to say that any component of aggregate type is aliased
4840 because the front-end may take 'Reference of it. The front-end
4841 has been enhanced in the meantime so as to use a renaming instead
4842 in most cases, but the back-end can probably take the address of
4843 such a component too so we go for the conservative stance.
4845 For instance, we might need the address of any array type, even
4846 if normally passed by copy, to construct a fat pointer if the
4847 component is used as an actual for an unconstrained formal.
4849 Likewise for record types: even if a specific record subtype is
4850 passed by copy, the parent type might be passed by ref (e.g. if
4851 it's of variable size) and we might take the address of a child
4852 component to pass to a parent formal. We have no way to check
4853 for such conditions here. */
4854 if (AGGREGATE_TYPE_P (gnu_type
))
4860 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4863 smaller_form_type_p (tree type
, tree orig_type
)
4867 /* We're not interested in variants here. */
4868 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
4871 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4872 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
4875 size
= TYPE_SIZE (type
);
4876 osize
= TYPE_SIZE (orig_type
);
4878 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
4881 return tree_int_cst_lt (size
, osize
) != 0;
4884 /* Perform final processing on global variables. */
4886 static GTY (()) tree dummy_global
;
4889 gnat_write_global_declarations (void)
4894 /* If we have declared types as used at the global level, insert them in
4895 the global hash table. We use a dummy variable for this purpose. */
4896 if (!VEC_empty (tree
, types_used_by_cur_var_decl
))
4898 struct varpool_node
*node
;
4900 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, NULL_TREE
, void_type_node
);
4901 TREE_STATIC (dummy_global
) = 1;
4902 TREE_ASM_WRITTEN (dummy_global
) = 1;
4903 node
= varpool_node (dummy_global
);
4904 node
->force_output
= 1;
4905 varpool_mark_needed_node (node
);
4907 while (!VEC_empty (tree
, types_used_by_cur_var_decl
))
4909 tree t
= VEC_pop (tree
, types_used_by_cur_var_decl
);
4910 types_used_by_var_decl_insert (t
, dummy_global
);
4914 /* Output debug information for all global type declarations first. This
4915 ensures that global types whose compilation hasn't been finalized yet,
4916 for example pointers to Taft amendment types, have their compilation
4917 finalized in the right context. */
4918 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
4919 if (TREE_CODE (iter
) == TYPE_DECL
)
4920 debug_hooks
->global_decl (iter
);
4922 /* Proceed to optimize and emit assembly.
4923 FIXME: shouldn't be the front end's responsibility to call this. */
4924 cgraph_finalize_compilation_unit ();
4926 /* After cgraph has had a chance to emit everything that's going to
4927 be emitted, output debug information for the rest of globals. */
4930 timevar_push (TV_SYMOUT
);
4931 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
4932 if (TREE_CODE (iter
) != TYPE_DECL
)
4933 debug_hooks
->global_decl (iter
);
4934 timevar_pop (TV_SYMOUT
);
4938 /* ************************************************************************
4939 * * GCC builtins support *
4940 * ************************************************************************ */
4942 /* The general scheme is fairly simple:
4944 For each builtin function/type to be declared, gnat_install_builtins calls
4945 internal facilities which eventually get to gnat_push_decl, which in turn
4946 tracks the so declared builtin function decls in the 'builtin_decls' global
4947 datastructure. When an Intrinsic subprogram declaration is processed, we
4948 search this global datastructure to retrieve the associated BUILT_IN DECL
4951 /* Search the chain of currently available builtin declarations for a node
4952 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4953 found, if any, or NULL_TREE otherwise. */
4955 builtin_decl_for (tree name
)
4960 FOR_EACH_VEC_ELT (tree
, builtin_decls
, i
, decl
)
4961 if (DECL_NAME (decl
) == name
)
4967 /* The code below eventually exposes gnat_install_builtins, which declares
4968 the builtin types and functions we might need, either internally or as
4969 user accessible facilities.
4971 ??? This is a first implementation shot, still in rough shape. It is
4972 heavily inspired from the "C" family implementation, with chunks copied
4973 verbatim from there.
4975 Two obvious TODO candidates are
4976 o Use a more efficient name/decl mapping scheme
4977 o Devise a middle-end infrastructure to avoid having to copy
4978 pieces between front-ends. */
4980 /* ----------------------------------------------------------------------- *
4981 * BUILTIN ELEMENTARY TYPES *
4982 * ----------------------------------------------------------------------- */
4984 /* Standard data types to be used in builtin argument declarations. */
4988 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
4990 CTI_CONST_STRING_TYPE
,
4995 static tree c_global_trees
[CTI_MAX
];
4997 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4998 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4999 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5001 /* ??? In addition some attribute handlers, we currently don't support a
5002 (small) number of builtin-types, which in turns inhibits support for a
5003 number of builtin functions. */
5004 #define wint_type_node void_type_node
5005 #define intmax_type_node void_type_node
5006 #define uintmax_type_node void_type_node
5008 /* Build the void_list_node (void_type_node having been created). */
5011 build_void_list_node (void)
5013 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
5017 /* Used to help initialize the builtin-types.def table. When a type of
5018 the correct size doesn't exist, use error_mark_node instead of NULL.
5019 The later results in segfaults even when a decl using the type doesn't
5023 builtin_type_for_size (int size
, bool unsignedp
)
5025 tree type
= gnat_type_for_size (size
, unsignedp
);
5026 return type
? type
: error_mark_node
;
5029 /* Build/push the elementary type decls that builtin functions/types
5033 install_builtin_elementary_types (void)
5035 signed_size_type_node
= gnat_signed_type (size_type_node
);
5036 pid_type_node
= integer_type_node
;
5037 void_list_node
= build_void_list_node ();
5039 string_type_node
= build_pointer_type (char_type_node
);
5040 const_string_type_node
5041 = build_pointer_type (build_qualified_type
5042 (char_type_node
, TYPE_QUAL_CONST
));
5045 /* ----------------------------------------------------------------------- *
5046 * BUILTIN FUNCTION TYPES *
5047 * ----------------------------------------------------------------------- */
5049 /* Now, builtin function types per se. */
5053 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5054 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5055 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5056 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5057 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5058 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5059 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5060 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5061 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5062 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5063 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5064 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5065 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5066 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5067 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5069 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5070 #include "builtin-types.def"
5071 #undef DEF_PRIMITIVE_TYPE
5072 #undef DEF_FUNCTION_TYPE_0
5073 #undef DEF_FUNCTION_TYPE_1
5074 #undef DEF_FUNCTION_TYPE_2
5075 #undef DEF_FUNCTION_TYPE_3
5076 #undef DEF_FUNCTION_TYPE_4
5077 #undef DEF_FUNCTION_TYPE_5
5078 #undef DEF_FUNCTION_TYPE_6
5079 #undef DEF_FUNCTION_TYPE_7
5080 #undef DEF_FUNCTION_TYPE_VAR_0
5081 #undef DEF_FUNCTION_TYPE_VAR_1
5082 #undef DEF_FUNCTION_TYPE_VAR_2
5083 #undef DEF_FUNCTION_TYPE_VAR_3
5084 #undef DEF_FUNCTION_TYPE_VAR_4
5085 #undef DEF_FUNCTION_TYPE_VAR_5
5086 #undef DEF_POINTER_TYPE
5090 typedef enum c_builtin_type builtin_type
;
5092 /* A temporary array used in communication with def_fn_type. */
5093 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5095 /* A helper function for install_builtin_types. Build function type
5096 for DEF with return type RET and N arguments. If VAR is true, then the
5097 function should be variadic after those N arguments.
5099 Takes special care not to ICE if any of the types involved are
5100 error_mark_node, which indicates that said type is not in fact available
5101 (see builtin_type_for_size). In which case the function type as a whole
5102 should be error_mark_node. */
5105 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5108 tree
*args
= XALLOCAVEC (tree
, n
);
5113 for (i
= 0; i
< n
; ++i
)
5115 builtin_type a
= (builtin_type
) va_arg (list
, int);
5116 t
= builtin_types
[a
];
5117 if (t
== error_mark_node
)
5122 t
= builtin_types
[ret
];
5123 if (t
== error_mark_node
)
5126 t
= build_varargs_function_type_array (t
, n
, args
);
5128 t
= build_function_type_array (t
, n
, args
);
5131 builtin_types
[def
] = t
;
5135 /* Build the builtin function types and install them in the builtin_types
5136 array for later use in builtin function decls. */
5139 install_builtin_function_types (void)
5141 tree va_list_ref_type_node
;
5142 tree va_list_arg_type_node
;
5144 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5146 va_list_arg_type_node
= va_list_ref_type_node
=
5147 build_pointer_type (TREE_TYPE (va_list_type_node
));
5151 va_list_arg_type_node
= va_list_type_node
;
5152 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5155 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5156 builtin_types[ENUM] = VALUE;
5157 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5158 def_fn_type (ENUM, RETURN, 0, 0);
5159 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5160 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5161 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5162 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5163 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5164 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5165 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5166 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5167 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5168 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5169 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5171 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5172 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5174 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5175 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5176 def_fn_type (ENUM, RETURN, 1, 0);
5177 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5178 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5179 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5180 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5181 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5182 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5183 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5184 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5185 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5186 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5187 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5188 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5190 #include "builtin-types.def"
5192 #undef DEF_PRIMITIVE_TYPE
5193 #undef DEF_FUNCTION_TYPE_1
5194 #undef DEF_FUNCTION_TYPE_2
5195 #undef DEF_FUNCTION_TYPE_3
5196 #undef DEF_FUNCTION_TYPE_4
5197 #undef DEF_FUNCTION_TYPE_5
5198 #undef DEF_FUNCTION_TYPE_6
5199 #undef DEF_FUNCTION_TYPE_VAR_0
5200 #undef DEF_FUNCTION_TYPE_VAR_1
5201 #undef DEF_FUNCTION_TYPE_VAR_2
5202 #undef DEF_FUNCTION_TYPE_VAR_3
5203 #undef DEF_FUNCTION_TYPE_VAR_4
5204 #undef DEF_FUNCTION_TYPE_VAR_5
5205 #undef DEF_POINTER_TYPE
5206 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5209 /* ----------------------------------------------------------------------- *
5210 * BUILTIN ATTRIBUTES *
5211 * ----------------------------------------------------------------------- */
5213 enum built_in_attribute
5215 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5216 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5217 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5218 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5219 #include "builtin-attrs.def"
5220 #undef DEF_ATTR_NULL_TREE
5222 #undef DEF_ATTR_IDENT
5223 #undef DEF_ATTR_TREE_LIST
5227 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5230 install_builtin_attributes (void)
5232 /* Fill in the built_in_attributes array. */
5233 #define DEF_ATTR_NULL_TREE(ENUM) \
5234 built_in_attributes[(int) ENUM] = NULL_TREE;
5235 #define DEF_ATTR_INT(ENUM, VALUE) \
5236 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5237 #define DEF_ATTR_IDENT(ENUM, STRING) \
5238 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5239 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5240 built_in_attributes[(int) ENUM] \
5241 = tree_cons (built_in_attributes[(int) PURPOSE], \
5242 built_in_attributes[(int) VALUE], \
5243 built_in_attributes[(int) CHAIN]);
5244 #include "builtin-attrs.def"
5245 #undef DEF_ATTR_NULL_TREE
5247 #undef DEF_ATTR_IDENT
5248 #undef DEF_ATTR_TREE_LIST
5251 /* Handle a "const" attribute; arguments as in
5252 struct attribute_spec.handler. */
5255 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5256 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5259 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5260 TREE_READONLY (*node
) = 1;
5262 *no_add_attrs
= true;
5267 /* Handle a "nothrow" attribute; arguments as in
5268 struct attribute_spec.handler. */
5271 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5272 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5275 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5276 TREE_NOTHROW (*node
) = 1;
5278 *no_add_attrs
= true;
5283 /* Handle a "pure" attribute; arguments as in
5284 struct attribute_spec.handler. */
5287 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5288 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5290 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5291 DECL_PURE_P (*node
) = 1;
5292 /* ??? TODO: Support types. */
5295 warning (OPT_Wattributes
, "%qs attribute ignored",
5296 IDENTIFIER_POINTER (name
));
5297 *no_add_attrs
= true;
5303 /* Handle a "no vops" attribute; arguments as in
5304 struct attribute_spec.handler. */
5307 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5308 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5309 bool *ARG_UNUSED (no_add_attrs
))
5311 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
5312 DECL_IS_NOVOPS (*node
) = 1;
5316 /* Helper for nonnull attribute handling; fetch the operand number
5317 from the attribute argument list. */
5320 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
5322 /* Verify the arg number is a constant. */
5323 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
5324 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
5327 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
5331 /* Handle the "nonnull" attribute. */
5333 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5334 tree args
, int ARG_UNUSED (flags
),
5338 unsigned HOST_WIDE_INT attr_arg_num
;
5340 /* If no arguments are specified, all pointer arguments should be
5341 non-null. Verify a full prototype is given so that the arguments
5342 will have the correct types when we actually check them later. */
5345 if (!prototype_p (type
))
5347 error ("nonnull attribute without arguments on a non-prototype");
5348 *no_add_attrs
= true;
5353 /* Argument list specified. Verify that each argument number references
5354 a pointer argument. */
5355 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
5357 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
5359 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
5361 error ("nonnull argument has invalid operand number (argument %lu)",
5362 (unsigned long) attr_arg_num
);
5363 *no_add_attrs
= true;
5367 if (prototype_p (type
))
5369 function_args_iterator iter
;
5372 function_args_iter_init (&iter
, type
);
5373 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
5375 argument
= function_args_iter_cond (&iter
);
5376 if (!argument
|| ck_num
== arg_num
)
5381 || TREE_CODE (argument
) == VOID_TYPE
)
5383 error ("nonnull argument with out-of-range operand number "
5384 "(argument %lu, operand %lu)",
5385 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5386 *no_add_attrs
= true;
5390 if (TREE_CODE (argument
) != POINTER_TYPE
)
5392 error ("nonnull argument references non-pointer operand "
5393 "(argument %lu, operand %lu)",
5394 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5395 *no_add_attrs
= true;
5404 /* Handle a "sentinel" attribute. */
5407 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
5408 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5410 if (!prototype_p (*node
))
5412 warning (OPT_Wattributes
,
5413 "%qs attribute requires prototypes with named arguments",
5414 IDENTIFIER_POINTER (name
));
5415 *no_add_attrs
= true;
5419 if (!stdarg_p (*node
))
5421 warning (OPT_Wattributes
,
5422 "%qs attribute only applies to variadic functions",
5423 IDENTIFIER_POINTER (name
));
5424 *no_add_attrs
= true;
5430 tree position
= TREE_VALUE (args
);
5432 if (TREE_CODE (position
) != INTEGER_CST
)
5434 warning (0, "requested position is not an integer constant");
5435 *no_add_attrs
= true;
5439 if (tree_int_cst_lt (position
, integer_zero_node
))
5441 warning (0, "requested position is less than zero");
5442 *no_add_attrs
= true;
5450 /* Handle a "noreturn" attribute; arguments as in
5451 struct attribute_spec.handler. */
5454 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5455 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5457 tree type
= TREE_TYPE (*node
);
5459 /* See FIXME comment in c_common_attribute_table. */
5460 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5461 TREE_THIS_VOLATILE (*node
) = 1;
5462 else if (TREE_CODE (type
) == POINTER_TYPE
5463 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
5465 = build_pointer_type
5466 (build_type_variant (TREE_TYPE (type
),
5467 TYPE_READONLY (TREE_TYPE (type
)), 1));
5470 warning (OPT_Wattributes
, "%qs attribute ignored",
5471 IDENTIFIER_POINTER (name
));
5472 *no_add_attrs
= true;
5478 /* Handle a "leaf" attribute; arguments as in
5479 struct attribute_spec.handler. */
5482 handle_leaf_attribute (tree
*node
, tree name
,
5483 tree
ARG_UNUSED (args
),
5484 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5486 if (TREE_CODE (*node
) != FUNCTION_DECL
)
5488 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
5489 *no_add_attrs
= true;
5491 if (!TREE_PUBLIC (*node
))
5493 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
5494 *no_add_attrs
= true;
5500 /* Handle a "malloc" attribute; arguments as in
5501 struct attribute_spec.handler. */
5504 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5505 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5507 if (TREE_CODE (*node
) == FUNCTION_DECL
5508 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
5509 DECL_IS_MALLOC (*node
) = 1;
5512 warning (OPT_Wattributes
, "%qs attribute ignored",
5513 IDENTIFIER_POINTER (name
));
5514 *no_add_attrs
= true;
5520 /* Fake handler for attributes we don't properly support. */
5523 fake_attribute_handler (tree
* ARG_UNUSED (node
),
5524 tree
ARG_UNUSED (name
),
5525 tree
ARG_UNUSED (args
),
5526 int ARG_UNUSED (flags
),
5527 bool * ARG_UNUSED (no_add_attrs
))
5532 /* Handle a "type_generic" attribute. */
5535 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5536 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5537 bool * ARG_UNUSED (no_add_attrs
))
5539 /* Ensure we have a function type. */
5540 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
5542 /* Ensure we have a variadic function. */
5543 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
5548 /* Handle a "vector_size" attribute; arguments as in
5549 struct attribute_spec.handler. */
5552 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
5553 int ARG_UNUSED (flags
),
5556 unsigned HOST_WIDE_INT vecsize
, nunits
;
5557 enum machine_mode orig_mode
;
5558 tree type
= *node
, new_type
, size
;
5560 *no_add_attrs
= true;
5562 size
= TREE_VALUE (args
);
5564 if (!host_integerp (size
, 1))
5566 warning (OPT_Wattributes
, "%qs attribute ignored",
5567 IDENTIFIER_POINTER (name
));
5571 /* Get the vector size (in bytes). */
5572 vecsize
= tree_low_cst (size
, 1);
5574 /* We need to provide for vector pointers, vector arrays, and
5575 functions returning vectors. For example:
5577 __attribute__((vector_size(16))) short *foo;
5579 In this case, the mode is SI, but the type being modified is
5580 HI, so we need to look further. */
5582 while (POINTER_TYPE_P (type
)
5583 || TREE_CODE (type
) == FUNCTION_TYPE
5584 || TREE_CODE (type
) == ARRAY_TYPE
)
5585 type
= TREE_TYPE (type
);
5587 /* Get the mode of the type being modified. */
5588 orig_mode
= TYPE_MODE (type
);
5590 if ((!INTEGRAL_TYPE_P (type
)
5591 && !SCALAR_FLOAT_TYPE_P (type
)
5592 && !FIXED_POINT_TYPE_P (type
))
5593 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
5594 && GET_MODE_CLASS (orig_mode
) != MODE_INT
5595 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
5596 || !host_integerp (TYPE_SIZE_UNIT (type
), 1)
5597 || TREE_CODE (type
) == BOOLEAN_TYPE
)
5599 error ("invalid vector type for attribute %qs",
5600 IDENTIFIER_POINTER (name
));
5604 if (vecsize
% tree_low_cst (TYPE_SIZE_UNIT (type
), 1))
5606 error ("vector size not an integral multiple of component size");
5612 error ("zero vector size");
5616 /* Calculate how many units fit in the vector. */
5617 nunits
= vecsize
/ tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
5618 if (nunits
& (nunits
- 1))
5620 error ("number of components of the vector not a power of two");
5624 new_type
= build_vector_type (type
, nunits
);
5626 /* Build back pointers if needed. */
5627 *node
= reconstruct_complex_type (*node
, new_type
);
5632 /* Handle a "vector_type" attribute; arguments as in
5633 struct attribute_spec.handler. */
5636 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5637 int ARG_UNUSED (flags
),
5640 /* Vector representative type and size. */
5641 tree rep_type
= *node
;
5642 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
5645 /* Vector size in bytes and number of units. */
5646 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
5648 /* Vector element type and mode. */
5650 enum machine_mode elem_mode
;
5652 *no_add_attrs
= true;
5654 /* Get the representative array type, possibly nested within a
5655 padding record e.g. for alignment purposes. */
5657 if (TYPE_IS_PADDING_P (rep_type
))
5658 rep_type
= TREE_TYPE (TYPE_FIELDS (rep_type
));
5660 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
5662 error ("attribute %qs applies to array types only",
5663 IDENTIFIER_POINTER (name
));
5667 /* Silently punt on variable sizes. We can't make vector types for them,
5668 need to ignore them on front-end generated subtypes of unconstrained
5669 bases, and this attribute is for binding implementors, not end-users, so
5670 we should never get there from legitimate explicit uses. */
5672 if (!host_integerp (rep_size
, 1))
5675 /* Get the element type/mode and check this is something we know
5676 how to make vectors of. */
5678 elem_type
= TREE_TYPE (rep_type
);
5679 elem_mode
= TYPE_MODE (elem_type
);
5681 if ((!INTEGRAL_TYPE_P (elem_type
)
5682 && !SCALAR_FLOAT_TYPE_P (elem_type
)
5683 && !FIXED_POINT_TYPE_P (elem_type
))
5684 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
5685 && GET_MODE_CLASS (elem_mode
) != MODE_INT
5686 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
5687 || !host_integerp (TYPE_SIZE_UNIT (elem_type
), 1))
5689 error ("invalid element type for attribute %qs",
5690 IDENTIFIER_POINTER (name
));
5694 /* Sanity check the vector size and element type consistency. */
5696 vec_bytes
= tree_low_cst (rep_size
, 1);
5698 if (vec_bytes
% tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1))
5700 error ("vector size not an integral multiple of component size");
5706 error ("zero vector size");
5710 vec_units
= vec_bytes
/ tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1);
5711 if (vec_units
& (vec_units
- 1))
5713 error ("number of components of the vector not a power of two");
5717 /* Build the vector type and replace. */
5719 *node
= build_vector_type (elem_type
, vec_units
);
5720 rep_name
= TYPE_NAME (rep_type
);
5721 if (TREE_CODE (rep_name
) == TYPE_DECL
)
5722 rep_name
= DECL_NAME (rep_name
);
5723 TYPE_NAME (*node
) = rep_name
;
5724 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
5729 /* ----------------------------------------------------------------------- *
5730 * BUILTIN FUNCTIONS *
5731 * ----------------------------------------------------------------------- */
5733 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5734 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5735 if nonansi_p and flag_no_nonansi_builtin. */
5738 def_builtin_1 (enum built_in_function fncode
,
5740 enum built_in_class fnclass
,
5741 tree fntype
, tree libtype
,
5742 bool both_p
, bool fallback_p
,
5743 bool nonansi_p ATTRIBUTE_UNUSED
,
5744 tree fnattrs
, bool implicit_p
)
5747 const char *libname
;
5749 /* Preserve an already installed decl. It most likely was setup in advance
5750 (e.g. as part of the internal builtins) for specific reasons. */
5751 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
5754 gcc_assert ((!both_p
&& !fallback_p
)
5755 || !strncmp (name
, "__builtin_",
5756 strlen ("__builtin_")));
5758 libname
= name
+ strlen ("__builtin_");
5759 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
5760 (fallback_p
? libname
: NULL
),
5763 /* ??? This is normally further controlled by command-line options
5764 like -fno-builtin, but we don't have them for Ada. */
5765 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
5768 set_builtin_decl (fncode
, decl
, implicit_p
);
5771 static int flag_isoc94
= 0;
5772 static int flag_isoc99
= 0;
5774 /* Install what the common builtins.def offers. */
5777 install_builtin_functions (void)
5779 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5780 NONANSI_P, ATTRS, IMPLICIT, COND) \
5782 def_builtin_1 (ENUM, NAME, CLASS, \
5783 builtin_types[(int) TYPE], \
5784 builtin_types[(int) LIBTYPE], \
5785 BOTH_P, FALLBACK_P, NONANSI_P, \
5786 built_in_attributes[(int) ATTRS], IMPLICIT);
5787 #include "builtins.def"
5791 /* ----------------------------------------------------------------------- *
5792 * BUILTIN FUNCTIONS *
5793 * ----------------------------------------------------------------------- */
5795 /* Install the builtin functions we might need. */
5798 gnat_install_builtins (void)
5800 install_builtin_elementary_types ();
5801 install_builtin_function_types ();
5802 install_builtin_attributes ();
5804 /* Install builtins used by generic middle-end pieces first. Some of these
5805 know about internal specificities and control attributes accordingly, for
5806 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5807 the generic definition from builtins.def. */
5808 build_common_builtin_nodes ();
5810 /* Now, install the target specific builtins, such as the AltiVec family on
5811 ppc, and the common set as exposed by builtins.def. */
5812 targetm
.init_builtins ();
5813 install_builtin_functions ();
5816 #include "gt-ada-utils.h"
5817 #include "gtype-ada.h"