1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2011, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "langhooks.h"
41 #include "tree-dump.h"
42 #include "tree-inline.h"
43 #include "tree-iterator.h"
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
63 /* If nonzero, pretend we are allocating at global level. */
66 /* The default alignment of "double" floating-point types, i.e. floating
67 point types whose size is equal to 64 bits, or 0 if this alignment is
68 not specifically capped. */
69 int double_float_alignment
;
71 /* The default alignment of "double" or larger scalar types, i.e. scalar
72 types whose size is greater or equal to 64 bits, or 0 if this alignment
73 is not specifically capped. */
74 int double_scalar_alignment
;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls
[(int) ADT_LAST
];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
87 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
88 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
96 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
97 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
99 /* Fake handler for attributes we don't properly support, typically because
100 they'd require dragging a lot of the common-c front-end circuitry. */
101 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
103 /* Table of machine-independent internal attributes for Ada. We support
104 this minimal set of attributes to accommodate the needs of builtins. */
105 const struct attribute_spec gnat_internal_attribute_table
[] =
107 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
108 affects_type_identity } */
109 { "const", 0, 0, true, false, false, handle_const_attribute
,
111 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
113 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
115 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
117 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
119 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
121 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
123 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
125 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
127 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
130 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
132 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
134 { "may_alias", 0, 0, false, true, false, NULL
, false },
136 /* ??? format and format_arg are heavy and not supported, which actually
137 prevents support for stdio builtins, which we however declare as part
138 of the common builtins.def contents. */
139 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
140 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
142 { NULL
, 0, 0, false, false, false, NULL
, false }
145 /* Associates a GNAT tree node to a GCC tree node. It is used in
146 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
147 of `save_gnu_tree' for more info. */
148 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
150 #define GET_GNU_TREE(GNAT_ENTITY) \
151 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
153 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
156 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
157 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
159 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
160 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
162 #define GET_DUMMY_NODE(GNAT_ENTITY) \
163 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
165 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
168 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
169 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
171 /* This variable keeps a table for types for each precision so that we only
172 allocate each of them once. Signed and unsigned types are kept separate.
174 Note that these types are only used when fold-const requests something
175 special. Perhaps we should NOT share these types; we'll see how it
177 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
179 /* Likewise for float types, but record these by mode. */
180 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
182 /* For each binding contour we allocate a binding_level structure to indicate
183 the binding depth. */
185 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
186 /* The binding level containing this one (the enclosing binding level). */
187 struct gnat_binding_level
*chain
;
188 /* The BLOCK node for this level. */
190 /* If nonzero, the setjmp buffer that needs to be updated for any
191 variable-sized definition within this context. */
195 /* The binding level currently in effect. */
196 static GTY(()) struct gnat_binding_level
*current_binding_level
;
198 /* A chain of gnat_binding_level structures awaiting reuse. */
199 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
201 /* An array of global declarations. */
202 static GTY(()) VEC(tree
,gc
) *global_decls
;
204 /* An array of builtin function declarations. */
205 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
207 /* An array of global renaming pointers. */
208 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
210 /* A chain of unused BLOCK nodes. */
211 static GTY((deletable
)) tree free_block_chain
;
213 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
214 static tree
compute_related_constant (tree
, tree
);
215 static tree
split_plus (tree
, tree
*);
216 static tree
float_type_for_precision (int, enum machine_mode
);
217 static tree
convert_to_fat_pointer (tree
, tree
);
218 static tree
convert_to_thin_pointer (tree
, tree
);
219 static bool potential_alignment_gap (tree
, tree
, tree
);
220 static void process_attributes (tree
, struct attrib
*);
222 /* Initialize the association of GNAT nodes to GCC trees. */
225 init_gnat_to_gnu (void)
227 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
230 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
231 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
232 If NO_CHECK is true, the latter check is suppressed.
234 If GNU_DECL is zero, reset a previous association. */
237 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
239 /* Check that GNAT_ENTITY is not already defined and that it is being set
240 to something which is a decl. If that is not the case, this usually
241 means GNAT_ENTITY is defined twice, but occasionally is due to some
243 gcc_assert (!(gnu_decl
244 && (PRESENT_GNU_TREE (gnat_entity
)
245 || (!no_check
&& !DECL_P (gnu_decl
)))));
247 SET_GNU_TREE (gnat_entity
, gnu_decl
);
250 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
251 that was associated with it. If there is no such tree node, abort.
253 In some cases, such as delayed elaboration or expressions that need to
254 be elaborated only once, GNAT_ENTITY is really not an entity. */
257 get_gnu_tree (Entity_Id gnat_entity
)
259 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
260 return GET_GNU_TREE (gnat_entity
);
263 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
266 present_gnu_tree (Entity_Id gnat_entity
)
268 return PRESENT_GNU_TREE (gnat_entity
);
271 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
274 init_dummy_type (void)
276 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
279 /* Make a dummy type corresponding to GNAT_TYPE. */
282 make_dummy_type (Entity_Id gnat_type
)
284 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
287 /* If there is an equivalent type, get its underlying type. */
288 if (Present (gnat_underlying
))
289 gnat_underlying
= Underlying_Type (gnat_underlying
);
291 /* If there was no equivalent type (can only happen when just annotating
292 types) or underlying type, go back to the original type. */
293 if (No (gnat_underlying
))
294 gnat_underlying
= gnat_type
;
296 /* If it there already a dummy type, use that one. Else make one. */
297 if (PRESENT_DUMMY_NODE (gnat_underlying
))
298 return GET_DUMMY_NODE (gnat_underlying
);
300 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
302 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
303 ? tree_code_for_record_type (gnat_underlying
)
305 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
306 TYPE_DUMMY_P (gnu_type
) = 1;
307 TYPE_STUB_DECL (gnu_type
)
308 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
309 if (Is_By_Reference_Type (gnat_type
))
310 TREE_ADDRESSABLE (gnu_type
) = 1;
312 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
317 /* Return the dummy type that was made for GNAT_TYPE, if any. */
320 get_dummy_type (Entity_Id gnat_type
)
322 return GET_DUMMY_NODE (gnat_type
);
325 /* Build dummy fat and thin pointer types whose designated type is specified
326 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
329 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
331 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
332 tree gnu_fat_type
, fields
, gnu_object_type
;
334 gnu_template_type
= make_node (RECORD_TYPE
);
335 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
336 TYPE_DUMMY_P (gnu_template_type
) = 1;
337 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
339 gnu_array_type
= make_node (ENUMERAL_TYPE
);
340 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
341 TYPE_DUMMY_P (gnu_array_type
) = 1;
342 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
344 gnu_fat_type
= make_node (RECORD_TYPE
);
345 /* Build a stub DECL to trigger the special processing for fat pointer types
347 TYPE_NAME (gnu_fat_type
)
348 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
350 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
351 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
353 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
354 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
355 finish_fat_pointer_type (gnu_fat_type
, fields
);
356 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
357 /* Suppress debug info until after the type is completed. */
358 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
360 gnu_object_type
= make_node (RECORD_TYPE
);
361 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
362 TYPE_DUMMY_P (gnu_object_type
) = 1;
364 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
365 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
368 /* Return nonzero if we are currently in the global binding level. */
371 global_bindings_p (void)
373 return ((force_global
|| !current_function_decl
) ? -1 : 0);
376 /* Enter a new binding level. */
379 gnat_pushlevel (void)
381 struct gnat_binding_level
*newlevel
= NULL
;
383 /* Reuse a struct for this binding level, if there is one. */
384 if (free_binding_level
)
386 newlevel
= free_binding_level
;
387 free_binding_level
= free_binding_level
->chain
;
390 newlevel
= ggc_alloc_gnat_binding_level ();
392 /* Use a free BLOCK, if any; otherwise, allocate one. */
393 if (free_block_chain
)
395 newlevel
->block
= free_block_chain
;
396 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
397 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
400 newlevel
->block
= make_node (BLOCK
);
402 /* Point the BLOCK we just made to its parent. */
403 if (current_binding_level
)
404 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
406 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
407 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
408 TREE_USED (newlevel
->block
) = 1;
410 /* Add this level to the front of the chain (stack) of active levels. */
411 newlevel
->chain
= current_binding_level
;
412 newlevel
->jmpbuf_decl
= NULL_TREE
;
413 current_binding_level
= newlevel
;
416 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
417 and point FNDECL to this BLOCK. */
420 set_current_block_context (tree fndecl
)
422 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
423 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
424 set_block_for_group (current_binding_level
->block
);
427 /* Set the jmpbuf_decl for the current binding level to DECL. */
430 set_block_jmpbuf_decl (tree decl
)
432 current_binding_level
->jmpbuf_decl
= decl
;
435 /* Get the jmpbuf_decl, if any, for the current binding level. */
438 get_block_jmpbuf_decl (void)
440 return current_binding_level
->jmpbuf_decl
;
443 /* Exit a binding level. Set any BLOCK into the current code group. */
448 struct gnat_binding_level
*level
= current_binding_level
;
449 tree block
= level
->block
;
451 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
452 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
454 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
455 are no variables free the block and merge its subblocks into those of its
456 parent block. Otherwise, add it to the list of its parent. */
457 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
459 else if (BLOCK_VARS (block
) == NULL_TREE
)
461 BLOCK_SUBBLOCKS (level
->chain
->block
)
462 = block_chainon (BLOCK_SUBBLOCKS (block
),
463 BLOCK_SUBBLOCKS (level
->chain
->block
));
464 BLOCK_CHAIN (block
) = free_block_chain
;
465 free_block_chain
= block
;
469 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
470 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
471 TREE_USED (block
) = 1;
472 set_block_for_group (block
);
475 /* Free this binding structure. */
476 current_binding_level
= level
->chain
;
477 level
->chain
= free_binding_level
;
478 free_binding_level
= level
;
481 /* Exit a binding level and discard the associated BLOCK. */
486 struct gnat_binding_level
*level
= current_binding_level
;
487 tree block
= level
->block
;
489 BLOCK_CHAIN (block
) = free_block_chain
;
490 free_block_chain
= block
;
492 /* Free this binding structure. */
493 current_binding_level
= level
->chain
;
494 level
->chain
= free_binding_level
;
495 free_binding_level
= level
;
498 /* Records a ..._DECL node DECL as belonging to the current lexical scope
499 and uses GNAT_NODE for location information and propagating flags. */
502 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
504 /* If this decl is public external or at toplevel, there is no context. */
505 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
506 DECL_CONTEXT (decl
) = 0;
509 DECL_CONTEXT (decl
) = current_function_decl
;
511 /* Functions imported in another function are not really nested.
512 For really nested functions mark them initially as needing
513 a static chain for uses of that flag before unnesting;
514 lower_nested_functions will then recompute it. */
515 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
516 DECL_STATIC_CHAIN (decl
) = 1;
519 TREE_NO_WARNING (decl
) = (gnat_node
== Empty
|| Warnings_Off (gnat_node
));
521 /* Set the location of DECL and emit a declaration for it. */
522 if (Present (gnat_node
))
523 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
524 add_decl_expr (decl
, gnat_node
);
526 /* Put the declaration on the list. The list of declarations is in reverse
527 order. The list will be reversed later. Put global declarations in the
528 globals list and local ones in the current block. But skip TYPE_DECLs
529 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
530 with the debugger and aren't needed anyway. */
531 if (!(TREE_CODE (decl
) == TYPE_DECL
532 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
534 if (global_bindings_p ())
536 VEC_safe_push (tree
, gc
, global_decls
, decl
);
538 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
539 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
541 else if (!DECL_EXTERNAL (decl
))
543 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
544 BLOCK_VARS (current_binding_level
->block
) = decl
;
548 /* For the declaration of a type, set its name if it either is not already
549 set or if the previous type name was not derived from a source name.
550 We'd rather have the type named with a real name and all the pointer
551 types to the same object have the same POINTER_TYPE node. Code in the
552 equivalent function of c-decl.c makes a copy of the type node here, but
553 that may cause us trouble with incomplete types. We make an exception
554 for fat pointer types because the compiler automatically builds them
555 for unconstrained array types and the debugger uses them to represent
556 both these and pointers to these. */
557 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
559 tree t
= TREE_TYPE (decl
);
561 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
563 /* Array and pointer types aren't "tagged" types so we force the
564 type to be associated with its typedef in the DWARF back-end,
565 in order to make sure that the latter is always preserved. */
566 if (!DECL_ARTIFICIAL (decl
)
567 && (TREE_CODE (t
) == ARRAY_TYPE
568 || TREE_CODE (t
) == POINTER_TYPE
))
570 tree tt
= build_distinct_type_copy (t
);
571 if (TREE_CODE (t
) == POINTER_TYPE
)
572 TYPE_NEXT_PTR_TO (t
) = tt
;
573 TYPE_NAME (tt
) = DECL_NAME (decl
);
574 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
575 DECL_ORIGINAL_TYPE (decl
) = tt
;
578 else if (TYPE_IS_FAT_POINTER_P (t
))
580 /* We need a variant for the placeholder machinery to work. */
581 tree tt
= build_variant_type_copy (t
);
582 TYPE_NAME (tt
) = decl
;
583 TREE_USED (tt
) = TREE_USED (t
);
584 TREE_TYPE (decl
) = tt
;
585 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
586 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
588 DECL_ORIGINAL_TYPE (decl
) = t
;
589 DECL_ARTIFICIAL (decl
) = 0;
592 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
597 /* Propagate the name to all the anonymous variants. This is needed
598 for the type qualifiers machinery to work properly. */
600 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
601 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
602 TYPE_NAME (t
) = decl
;
606 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
607 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
610 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
612 tree type_decl
= build_decl (input_location
,
613 TYPE_DECL
, get_identifier (name
), type
);
614 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
615 gnat_pushdecl (type_decl
, Empty
);
617 if (debug_hooks
->type_decl
)
618 debug_hooks
->type_decl (type_decl
, false);
621 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
622 finish constructing the record type as a fat pointer type. */
625 finish_fat_pointer_type (tree record_type
, tree field_list
)
627 /* Make sure we can put it into a register. */
628 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
630 /* Show what it really is. */
631 TYPE_FAT_POINTER_P (record_type
) = 1;
633 /* Do not emit debug info for it since the types of its fields may still be
634 incomplete at this point. */
635 finish_record_type (record_type
, field_list
, 0, false);
637 /* Force type_contains_placeholder_p to return true on it. Although the
638 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
639 type but the representation of the unconstrained array. */
640 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
643 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
644 finish constructing the record or union type. If REP_LEVEL is zero, this
645 record has no representation clause and so will be entirely laid out here.
646 If REP_LEVEL is one, this record has a representation clause and has been
647 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
648 this record is derived from a parent record and thus inherits its layout;
649 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
650 we need to write debug information about this type. */
653 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
656 enum tree_code code
= TREE_CODE (record_type
);
657 tree name
= TYPE_NAME (record_type
);
658 tree ada_size
= bitsize_zero_node
;
659 tree size
= bitsize_zero_node
;
660 bool had_size
= TYPE_SIZE (record_type
) != 0;
661 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
662 bool had_align
= TYPE_ALIGN (record_type
) != 0;
665 TYPE_FIELDS (record_type
) = field_list
;
667 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
668 generate debug info and have a parallel type. */
669 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
670 name
= DECL_NAME (name
);
671 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
673 /* Globally initialize the record first. If this is a rep'ed record,
674 that just means some initializations; otherwise, layout the record. */
677 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
680 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
683 TYPE_SIZE (record_type
) = bitsize_zero_node
;
685 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
686 out just like a UNION_TYPE, since the size will be fixed. */
687 else if (code
== QUAL_UNION_TYPE
)
692 /* Ensure there isn't a size already set. There can be in an error
693 case where there is a rep clause but all fields have errors and
694 no longer have a position. */
695 TYPE_SIZE (record_type
) = 0;
696 layout_type (record_type
);
699 /* At this point, the position and size of each field is known. It was
700 either set before entry by a rep clause, or by laying out the type above.
702 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
703 to compute the Ada size; the GCC size and alignment (for rep'ed records
704 that are not padding types); and the mode (for rep'ed records). We also
705 clear the DECL_BIT_FIELD indication for the cases we know have not been
706 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
708 if (code
== QUAL_UNION_TYPE
)
709 field_list
= nreverse (field_list
);
711 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
713 tree type
= TREE_TYPE (field
);
714 tree pos
= bit_position (field
);
715 tree this_size
= DECL_SIZE (field
);
718 if ((TREE_CODE (type
) == RECORD_TYPE
719 || TREE_CODE (type
) == UNION_TYPE
720 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
721 && !TYPE_FAT_POINTER_P (type
)
722 && !TYPE_CONTAINS_TEMPLATE_P (type
)
723 && TYPE_ADA_SIZE (type
))
724 this_ada_size
= TYPE_ADA_SIZE (type
);
726 this_ada_size
= this_size
;
728 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
729 if (DECL_BIT_FIELD (field
)
730 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
732 unsigned int align
= TYPE_ALIGN (type
);
734 /* In the general case, type alignment is required. */
735 if (value_factor_p (pos
, align
))
737 /* The enclosing record type must be sufficiently aligned.
738 Otherwise, if no alignment was specified for it and it
739 has been laid out already, bump its alignment to the
740 desired one if this is compatible with its size. */
741 if (TYPE_ALIGN (record_type
) >= align
)
743 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
744 DECL_BIT_FIELD (field
) = 0;
748 && value_factor_p (TYPE_SIZE (record_type
), align
))
750 TYPE_ALIGN (record_type
) = align
;
751 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
752 DECL_BIT_FIELD (field
) = 0;
756 /* In the non-strict alignment case, only byte alignment is. */
757 if (!STRICT_ALIGNMENT
758 && DECL_BIT_FIELD (field
)
759 && value_factor_p (pos
, BITS_PER_UNIT
))
760 DECL_BIT_FIELD (field
) = 0;
763 /* If we still have DECL_BIT_FIELD set at this point, we know that the
764 field is technically not addressable. Except that it can actually
765 be addressed if it is BLKmode and happens to be properly aligned. */
766 if (DECL_BIT_FIELD (field
)
767 && !(DECL_MODE (field
) == BLKmode
768 && value_factor_p (pos
, BITS_PER_UNIT
)))
769 DECL_NONADDRESSABLE_P (field
) = 1;
771 /* A type must be as aligned as its most aligned field that is not
772 a bit-field. But this is already enforced by layout_type. */
773 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
774 TYPE_ALIGN (record_type
)
775 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
780 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
781 size
= size_binop (MAX_EXPR
, size
, this_size
);
784 case QUAL_UNION_TYPE
:
786 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
787 this_ada_size
, ada_size
);
788 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
793 /* Since we know here that all fields are sorted in order of
794 increasing bit position, the size of the record is one
795 higher than the ending bit of the last field processed
796 unless we have a rep clause, since in that case we might
797 have a field outside a QUAL_UNION_TYPE that has a higher ending
798 position. So use a MAX in that case. Also, if this field is a
799 QUAL_UNION_TYPE, we need to take into account the previous size in
800 the case of empty variants. */
802 = merge_sizes (ada_size
, pos
, this_ada_size
,
803 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
805 = merge_sizes (size
, pos
, this_size
,
806 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
814 if (code
== QUAL_UNION_TYPE
)
815 nreverse (field_list
);
819 /* If this is a padding record, we never want to make the size smaller
820 than what was specified in it, if any. */
821 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
822 size
= TYPE_SIZE (record_type
);
824 /* Now set any of the values we've just computed that apply. */
825 if (!TYPE_FAT_POINTER_P (record_type
)
826 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
827 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
831 tree size_unit
= had_size_unit
832 ? TYPE_SIZE_UNIT (record_type
)
834 size_binop (CEIL_DIV_EXPR
, size
,
836 unsigned int align
= TYPE_ALIGN (record_type
);
838 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
839 TYPE_SIZE_UNIT (record_type
)
840 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
842 compute_record_mode (record_type
);
847 rest_of_record_type_compilation (record_type
);
850 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
851 associated with it. It need not be invoked directly in most cases since
852 finish_record_type takes care of doing so, but this can be necessary if
853 a parallel type is to be attached to the record type. */
856 rest_of_record_type_compilation (tree record_type
)
858 tree field_list
= TYPE_FIELDS (record_type
);
860 enum tree_code code
= TREE_CODE (record_type
);
861 bool var_size
= false;
863 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
865 /* We need to make an XVE/XVU record if any field has variable size,
866 whether or not the record does. For example, if we have a union,
867 it may be that all fields, rounded up to the alignment, have the
868 same size, in which case we'll use that size. But the debug
869 output routines (except Dwarf2) won't be able to output the fields,
870 so we need to make the special record. */
871 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
872 /* If a field has a non-constant qualifier, the record will have
873 variable size too. */
874 || (code
== QUAL_UNION_TYPE
875 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
882 /* If this record is of variable size, rename it so that the
883 debugger knows it is and make a new, parallel, record
884 that tells the debugger how the record is laid out. See
885 exp_dbug.ads. But don't do this for records that are padding
886 since they confuse GDB. */
887 if (var_size
&& !TYPE_IS_PADDING_P (record_type
))
890 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
891 ? UNION_TYPE
: TREE_CODE (record_type
));
892 tree orig_name
= TYPE_NAME (record_type
), new_name
;
893 tree last_pos
= bitsize_zero_node
;
894 tree old_field
, prev_old_field
= NULL_TREE
;
896 if (TREE_CODE (orig_name
) == TYPE_DECL
)
897 orig_name
= DECL_NAME (orig_name
);
900 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
902 TYPE_NAME (new_record_type
) = new_name
;
903 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
904 TYPE_STUB_DECL (new_record_type
)
905 = create_type_stub_decl (new_name
, new_record_type
);
906 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
907 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
908 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
909 TYPE_SIZE_UNIT (new_record_type
)
910 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
912 add_parallel_type (TYPE_STUB_DECL (record_type
), new_record_type
);
914 /* Now scan all the fields, replacing each field with a new
915 field corresponding to the new encoding. */
916 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
917 old_field
= DECL_CHAIN (old_field
))
919 tree field_type
= TREE_TYPE (old_field
);
920 tree field_name
= DECL_NAME (old_field
);
922 tree curpos
= bit_position (old_field
);
924 unsigned int align
= 0;
927 /* See how the position was modified from the last position.
929 There are two basic cases we support: a value was added
930 to the last position or the last position was rounded to
931 a boundary and they something was added. Check for the
932 first case first. If not, see if there is any evidence
933 of rounding. If so, round the last position and try
936 If this is a union, the position can be taken as zero. */
938 /* Some computations depend on the shape of the position expression,
939 so strip conversions to make sure it's exposed. */
940 curpos
= remove_conversions (curpos
, true);
942 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
943 pos
= bitsize_zero_node
, align
= 0;
945 pos
= compute_related_constant (curpos
, last_pos
);
947 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
948 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
950 tree offset
= TREE_OPERAND (curpos
, 0);
951 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
953 /* An offset which is a bitwise AND with a negative power of 2
954 means an alignment corresponding to this power of 2. Note
955 that, as sizetype is sign-extended but nonetheless unsigned,
956 we don't directly use tree_int_cst_sgn. */
957 offset
= remove_conversions (offset
, true);
958 if (TREE_CODE (offset
) == BIT_AND_EXPR
959 && host_integerp (TREE_OPERAND (offset
, 1), 0)
960 && TREE_INT_CST_HIGH (TREE_OPERAND (offset
, 1)) < 0)
963 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
964 if (exact_log2 (pow
) > 0)
968 pos
= compute_related_constant (curpos
,
969 round_up (last_pos
, align
));
971 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
972 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
973 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
974 && host_integerp (TREE_OPERAND
975 (TREE_OPERAND (curpos
, 0), 1),
980 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
981 pos
= compute_related_constant (curpos
,
982 round_up (last_pos
, align
));
984 else if (potential_alignment_gap (prev_old_field
, old_field
,
987 align
= TYPE_ALIGN (field_type
);
988 pos
= compute_related_constant (curpos
,
989 round_up (last_pos
, align
));
992 /* If we can't compute a position, set it to zero.
994 ??? We really should abort here, but it's too much work
995 to get this correct for all cases. */
998 pos
= bitsize_zero_node
;
1000 /* See if this type is variable-sized and make a pointer type
1001 and indicate the indirection if so. Beware that the debug
1002 back-end may adjust the position computed above according
1003 to the alignment of the field type, i.e. the pointer type
1004 in this case, if we don't preventively counter that. */
1005 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1007 field_type
= build_pointer_type (field_type
);
1008 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1010 field_type
= copy_node (field_type
);
1011 TYPE_ALIGN (field_type
) = align
;
1016 /* Make a new field name, if necessary. */
1017 if (var
|| align
!= 0)
1022 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1023 align
/ BITS_PER_UNIT
);
1025 strcpy (suffix
, "XVL");
1027 field_name
= concat_name (field_name
, suffix
);
1031 = create_field_decl (field_name
, field_type
, new_record_type
,
1032 DECL_SIZE (old_field
), pos
, 0, 0);
1033 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1034 TYPE_FIELDS (new_record_type
) = new_field
;
1036 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1037 zero. The only time it's not the last field of the record
1038 is when there are other components at fixed positions after
1039 it (meaning there was a rep clause for every field) and we
1040 want to be able to encode them. */
1041 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1042 (TREE_CODE (TREE_TYPE (old_field
))
1045 : DECL_SIZE (old_field
));
1046 prev_old_field
= old_field
;
1049 TYPE_FIELDS (new_record_type
)
1050 = nreverse (TYPE_FIELDS (new_record_type
));
1052 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type
));
1055 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type
));
1058 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1061 add_parallel_type (tree decl
, tree parallel_type
)
1065 while (DECL_PARALLEL_TYPE (d
))
1066 d
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d
));
1068 SET_DECL_PARALLEL_TYPE (d
, parallel_type
);
1071 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1072 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1073 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1074 replace a value of zero with the old size. If HAS_REP is true, we take the
1075 MAX of the end position of this field with LAST_SIZE. In all other cases,
1076 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1079 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1082 tree type
= TREE_TYPE (last_size
);
1085 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1087 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1089 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1093 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1094 integer_zerop (TREE_OPERAND (size
, 1))
1095 ? last_size
: merge_sizes (last_size
, first_bit
,
1096 TREE_OPERAND (size
, 1),
1098 integer_zerop (TREE_OPERAND (size
, 2))
1099 ? last_size
: merge_sizes (last_size
, first_bit
,
1100 TREE_OPERAND (size
, 2),
1103 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1104 when fed through substitute_in_expr) into thinking that a constant
1105 size is not constant. */
1106 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1107 new_size
= TREE_OPERAND (new_size
, 0);
1112 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1113 related by the addition of a constant. Return that constant if so. */
1116 compute_related_constant (tree op0
, tree op1
)
1118 tree op0_var
, op1_var
;
1119 tree op0_con
= split_plus (op0
, &op0_var
);
1120 tree op1_con
= split_plus (op1
, &op1_var
);
1121 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1123 if (operand_equal_p (op0_var
, op1_var
, 0))
1125 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1131 /* Utility function of above to split a tree OP which may be a sum, into a
1132 constant part, which is returned, and a variable part, which is stored
1133 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1137 split_plus (tree in
, tree
*pvar
)
1139 /* Strip NOPS in order to ease the tree traversal and maximize the
1140 potential for constant or plus/minus discovery. We need to be careful
1141 to always return and set *pvar to bitsizetype trees, but it's worth
1145 *pvar
= convert (bitsizetype
, in
);
1147 if (TREE_CODE (in
) == INTEGER_CST
)
1149 *pvar
= bitsize_zero_node
;
1150 return convert (bitsizetype
, in
);
1152 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1154 tree lhs_var
, rhs_var
;
1155 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1156 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1158 if (lhs_var
== TREE_OPERAND (in
, 0)
1159 && rhs_var
== TREE_OPERAND (in
, 1))
1160 return bitsize_zero_node
;
1162 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1163 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1166 return bitsize_zero_node
;
1169 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1170 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1171 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1172 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1173 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1174 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1175 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1176 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1177 invisible reference. */
1180 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1181 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1182 bool return_by_invisi_ref_p
)
1184 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1185 the subprogram formal parameters. This list is generated by traversing
1186 the input list of PARM_DECL nodes. */
1187 tree param_type_list
= NULL_TREE
;
1190 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1191 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (t
), param_type_list
);
1193 /* The list of the function parameter types has to be terminated by the void
1194 type to signal to the back-end that we are not dealing with a variable
1195 parameter subprogram, but that it has a fixed number of parameters. */
1196 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1198 /* The list of argument types has been created in reverse so reverse it. */
1199 param_type_list
= nreverse (param_type_list
);
1201 type
= build_function_type (return_type
, param_type_list
);
1203 /* TYPE may have been shared since GCC hashes types. If it has a different
1204 CICO_LIST, make a copy. Likewise for the various flags. */
1205 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1206 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1208 type
= copy_type (type
);
1209 TYPE_CI_CO_LIST (type
) = cico_list
;
1210 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1211 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1212 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1218 /* Return a copy of TYPE but safe to modify in any way. */
1221 copy_type (tree type
)
1223 tree new_type
= copy_node (type
);
1225 /* Unshare the language-specific data. */
1226 if (TYPE_LANG_SPECIFIC (type
))
1228 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1229 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1232 /* And the contents of the language-specific slot if needed. */
1233 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1234 && TYPE_RM_VALUES (type
))
1236 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1237 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1238 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
1239 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
1242 /* copy_node clears this field instead of copying it, because it is
1243 aliased with TREE_CHAIN. */
1244 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
1246 TYPE_POINTER_TO (new_type
) = 0;
1247 TYPE_REFERENCE_TO (new_type
) = 0;
1248 TYPE_MAIN_VARIANT (new_type
) = new_type
;
1249 TYPE_NEXT_VARIANT (new_type
) = 0;
1254 /* Return a subtype of sizetype with range MIN to MAX and whose
1255 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1256 of the associated TYPE_DECL. */
1259 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
1261 /* First build a type for the desired range. */
1262 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
1264 /* Then set the index type. */
1265 SET_TYPE_INDEX_TYPE (type
, index
);
1266 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
1271 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1272 sizetype is used. */
1275 create_range_type (tree type
, tree min
, tree max
)
1279 if (type
== NULL_TREE
)
1282 /* First build a type with the base range. */
1283 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
1284 TYPE_MAX_VALUE (type
));
1286 /* Then set the actual range. */
1287 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
1288 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
1293 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1294 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1298 create_type_stub_decl (tree type_name
, tree type
)
1300 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1301 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1302 emitted in DWARF. */
1303 tree type_decl
= build_decl (input_location
,
1304 TYPE_DECL
, type_name
, type
);
1305 DECL_ARTIFICIAL (type_decl
) = 1;
1309 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1310 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1311 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1312 true if we need to write debug information about this type. GNAT_NODE
1313 is used for the position of the decl. */
1316 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
1317 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
1319 enum tree_code code
= TREE_CODE (type
);
1320 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
1323 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1324 gcc_assert (!TYPE_IS_DUMMY_P (type
));
1326 /* If the type hasn't been named yet, we're naming it; preserve an existing
1327 TYPE_STUB_DECL that has been attached to it for some purpose. */
1328 if (!named
&& TYPE_STUB_DECL (type
))
1330 type_decl
= TYPE_STUB_DECL (type
);
1331 DECL_NAME (type_decl
) = type_name
;
1334 type_decl
= build_decl (input_location
,
1335 TYPE_DECL
, type_name
, type
);
1337 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1339 /* Add this decl to the current binding level. */
1340 gnat_pushdecl (type_decl
, gnat_node
);
1342 process_attributes (type_decl
, attr_list
);
1344 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1345 This causes the name to be also viewed as a "tag" by the debug
1346 back-end, with the advantage that no DW_TAG_typedef is emitted
1347 for artificial "tagged" types in DWARF. */
1349 TYPE_STUB_DECL (type
) = type_decl
;
1351 /* Pass the type declaration to the debug back-end unless this is an
1352 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1353 type for which debugging information was not requested, or else an
1354 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1355 handled separately. And do not pass dummy types either. */
1356 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
1357 DECL_IGNORED_P (type_decl
) = 1;
1358 else if (code
!= ENUMERAL_TYPE
1359 && (code
!= RECORD_TYPE
|| TYPE_FAT_POINTER_P (type
))
1360 && !((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1361 && TYPE_IS_DUMMY_P (TREE_TYPE (type
)))
1362 && !(code
== RECORD_TYPE
1364 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))))))
1365 rest_of_type_decl_compilation (type_decl
);
1370 /* Return a VAR_DECL or CONST_DECL node.
1372 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1373 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1374 the GCC tree for an optional initial expression; NULL_TREE if none.
1376 CONST_FLAG is true if this variable is constant, in which case we might
1377 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1379 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1380 definition to be made visible outside of the current compilation unit, for
1381 instance variable definitions in a package specification.
1383 EXTERN_FLAG is true when processing an external variable declaration (as
1384 opposed to a definition: no storage is to be allocated for the variable).
1386 STATIC_FLAG is only relevant when not at top level. In that case
1387 it indicates whether to always allocate storage to the variable.
1389 GNAT_NODE is used for the position of the decl. */
1392 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1393 bool const_flag
, bool public_flag
, bool extern_flag
,
1394 bool static_flag
, bool const_decl_allowed_p
,
1395 struct attrib
*attr_list
, Node_Id gnat_node
)
1399 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
1400 && (global_bindings_p () || static_flag
1401 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
1402 : TREE_CONSTANT (var_init
)));
1404 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1405 case the initializer may be used in-lieu of the DECL node (as done in
1406 Identifier_to_gnu). This is useful to prevent the need of elaboration
1407 code when an identifier for which such a decl is made is in turn used as
1408 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1409 but extra constraints apply to this choice (see below) and are not
1410 relevant to the distinction we wish to make. */
1411 bool constant_p
= const_flag
&& init_const
;
1413 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1414 and may be used for scalars in general but not for aggregates. */
1416 = build_decl (input_location
,
1417 (constant_p
&& const_decl_allowed_p
1418 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
1421 /* If this is external, throw away any initializations (they will be done
1422 elsewhere) unless this is a constant for which we would like to remain
1423 able to get the initializer. If we are defining a global here, leave a
1424 constant initialization and save any variable elaborations for the
1425 elaboration routine. If we are just annotating types, throw away the
1426 initialization if it isn't a constant. */
1427 if ((extern_flag
&& !constant_p
)
1428 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
1429 var_init
= NULL_TREE
;
1431 /* At the global level, an initializer requiring code to be generated
1432 produces elaboration statements. Check that such statements are allowed,
1433 that is, not violating a No_Elaboration_Code restriction. */
1434 if (global_bindings_p () && var_init
!= 0 && !init_const
)
1435 Check_Elaboration_Code_Allowed (gnat_node
);
1437 DECL_INITIAL (var_decl
) = var_init
;
1438 TREE_READONLY (var_decl
) = const_flag
;
1439 DECL_EXTERNAL (var_decl
) = extern_flag
;
1440 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1441 TREE_CONSTANT (var_decl
) = constant_p
;
1442 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1443 = TYPE_VOLATILE (type
);
1445 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1446 try to fiddle with DECL_COMMON. However, on platforms that don't
1447 support global BSS sections, uninitialized global variables would
1448 go in DATA instead, thus increasing the size of the executable. */
1450 && TREE_CODE (var_decl
) == VAR_DECL
1451 && TREE_PUBLIC (var_decl
)
1452 && !have_global_bss_p ())
1453 DECL_COMMON (var_decl
) = 1;
1455 /* At the global binding level, we need to allocate static storage for the
1456 variable if it isn't external. Otherwise, we allocate automatic storage
1457 unless requested not to. */
1458 TREE_STATIC (var_decl
)
1459 = !extern_flag
&& (static_flag
|| global_bindings_p ());
1461 /* For an external constant whose initializer is not absolute, do not emit
1462 debug info. In DWARF this would mean a global relocation in a read-only
1463 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1466 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
1467 != null_pointer_node
)
1468 DECL_IGNORED_P (var_decl
) = 1;
1470 /* Add this decl to the current binding level. */
1471 gnat_pushdecl (var_decl
, gnat_node
);
1473 if (TREE_SIDE_EFFECTS (var_decl
))
1474 TREE_ADDRESSABLE (var_decl
) = 1;
1476 if (TREE_CODE (var_decl
) == VAR_DECL
)
1479 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1480 process_attributes (var_decl
, attr_list
);
1481 if (global_bindings_p ())
1482 rest_of_decl_compilation (var_decl
, true, 0);
1485 expand_decl (var_decl
);
1490 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1493 aggregate_type_contains_array_p (tree type
)
1495 switch (TREE_CODE (type
))
1499 case QUAL_UNION_TYPE
:
1502 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1503 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
1504 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
1517 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1518 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1519 nonzero, it is the specified size of the field. If POS is nonzero, it is
1520 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1521 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1522 means we are allowed to take the address of the field; if it is negative,
1523 we should not make a bitfield, which is used by make_aligning_type. */
1526 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
1527 tree size
, tree pos
, int packed
, int addressable
)
1529 tree field_decl
= build_decl (input_location
,
1530 FIELD_DECL
, field_name
, field_type
);
1532 DECL_CONTEXT (field_decl
) = record_type
;
1533 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1535 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1536 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1537 Likewise for an aggregate without specified position that contains an
1538 array, because in this case slices of variable length of this array
1539 must be handled by GCC and variable-sized objects need to be aligned
1540 to at least a byte boundary. */
1541 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
1543 && AGGREGATE_TYPE_P (field_type
)
1544 && aggregate_type_contains_array_p (field_type
))))
1545 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1547 /* If a size is specified, use it. Otherwise, if the record type is packed
1548 compute a size to use, which may differ from the object's natural size.
1549 We always set a size in this case to trigger the checks for bitfield
1550 creation below, which is typically required when no position has been
1553 size
= convert (bitsizetype
, size
);
1554 else if (packed
== 1)
1556 size
= rm_size (field_type
);
1557 if (TYPE_MODE (field_type
) == BLKmode
)
1558 size
= round_up (size
, BITS_PER_UNIT
);
1561 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1562 specified for two reasons: first if the size differs from the natural
1563 size. Second, if the alignment is insufficient. There are a number of
1564 ways the latter can be true.
1566 We never make a bitfield if the type of the field has a nonconstant size,
1567 because no such entity requiring bitfield operations should reach here.
1569 We do *preventively* make a bitfield when there might be the need for it
1570 but we don't have all the necessary information to decide, as is the case
1571 of a field with no specified position in a packed record.
1573 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1574 in layout_decl or finish_record_type to clear the bit_field indication if
1575 it is in fact not needed. */
1576 if (addressable
>= 0
1578 && TREE_CODE (size
) == INTEGER_CST
1579 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1580 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
1581 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
1583 || (TYPE_ALIGN (record_type
) != 0
1584 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1586 DECL_BIT_FIELD (field_decl
) = 1;
1587 DECL_SIZE (field_decl
) = size
;
1588 if (!packed
&& !pos
)
1590 if (TYPE_ALIGN (record_type
) != 0
1591 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
1592 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
1594 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1598 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
1600 /* Bump the alignment if need be, either for bitfield/packing purposes or
1601 to satisfy the type requirements if no such consideration applies. When
1602 we get the alignment from the type, indicate if this is from an explicit
1603 user request, which prevents stor-layout from lowering it later on. */
1605 unsigned int bit_align
1606 = (DECL_BIT_FIELD (field_decl
) ? 1
1607 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
1609 if (bit_align
> DECL_ALIGN (field_decl
))
1610 DECL_ALIGN (field_decl
) = bit_align
;
1611 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
1613 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1614 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
1620 /* We need to pass in the alignment the DECL is known to have.
1621 This is the lowest-order bit set in POS, but no more than
1622 the alignment of the record, if one is specified. Note
1623 that an alignment of 0 is taken as infinite. */
1624 unsigned int known_align
;
1626 if (host_integerp (pos
, 1))
1627 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1629 known_align
= BITS_PER_UNIT
;
1631 if (TYPE_ALIGN (record_type
)
1632 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1633 known_align
= TYPE_ALIGN (record_type
);
1635 layout_decl (field_decl
, known_align
);
1636 SET_DECL_OFFSET_ALIGN (field_decl
,
1637 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1639 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1640 &DECL_FIELD_BIT_OFFSET (field_decl
),
1641 DECL_OFFSET_ALIGN (field_decl
), pos
);
1644 /* In addition to what our caller says, claim the field is addressable if we
1645 know that its type is not suitable.
1647 The field may also be "technically" nonaddressable, meaning that even if
1648 we attempt to take the field's address we will actually get the address
1649 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1650 value we have at this point is not accurate enough, so we don't account
1651 for this here and let finish_record_type decide. */
1652 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
1655 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
1660 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1661 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1662 (either an In parameter or an address of a pass-by-ref parameter). */
1665 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
1667 tree param_decl
= build_decl (input_location
,
1668 PARM_DECL
, param_name
, param_type
);
1670 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1671 can lead to various ABI violations. */
1672 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
1673 && INTEGRAL_TYPE_P (param_type
)
1674 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1676 /* We have to be careful about biased types here. Make a subtype
1677 of integer_type_node with the proper biasing. */
1678 if (TREE_CODE (param_type
) == INTEGER_TYPE
1679 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1682 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
1683 TREE_TYPE (subtype
) = integer_type_node
;
1684 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
1685 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
1686 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
1687 param_type
= subtype
;
1690 param_type
= integer_type_node
;
1693 DECL_ARG_TYPE (param_decl
) = param_type
;
1694 TREE_READONLY (param_decl
) = readonly
;
1698 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1701 process_attributes (tree decl
, struct attrib
*attr_list
)
1703 for (; attr_list
; attr_list
= attr_list
->next
)
1704 switch (attr_list
->type
)
1706 case ATTR_MACHINE_ATTRIBUTE
:
1707 input_location
= DECL_SOURCE_LOCATION (decl
);
1708 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
1710 ATTR_FLAG_TYPE_IN_PLACE
);
1713 case ATTR_LINK_ALIAS
:
1714 if (! DECL_EXTERNAL (decl
))
1716 TREE_STATIC (decl
) = 1;
1717 assemble_alias (decl
, attr_list
->name
);
1721 case ATTR_WEAK_EXTERNAL
:
1723 declare_weak (decl
);
1725 post_error ("?weak declarations not supported on this target",
1726 attr_list
->error_point
);
1729 case ATTR_LINK_SECTION
:
1730 if (targetm
.have_named_sections
)
1732 DECL_SECTION_NAME (decl
)
1733 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1734 IDENTIFIER_POINTER (attr_list
->name
));
1735 DECL_COMMON (decl
) = 0;
1738 post_error ("?section attributes are not supported for this target",
1739 attr_list
->error_point
);
1742 case ATTR_LINK_CONSTRUCTOR
:
1743 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1744 TREE_USED (decl
) = 1;
1747 case ATTR_LINK_DESTRUCTOR
:
1748 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1749 TREE_USED (decl
) = 1;
1752 case ATTR_THREAD_LOCAL_STORAGE
:
1753 DECL_TLS_MODEL (decl
) = decl_default_tls_model (decl
);
1754 DECL_COMMON (decl
) = 0;
1759 /* Record DECL as a global renaming pointer. */
1762 record_global_renaming_pointer (tree decl
)
1764 gcc_assert (DECL_RENAMED_OBJECT (decl
));
1765 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
1768 /* Invalidate the global renaming pointers. */
1771 invalidate_global_renaming_pointers (void)
1776 FOR_EACH_VEC_ELT (tree
, global_renaming_pointers
, i
, iter
)
1777 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
1779 VEC_free (tree
, gc
, global_renaming_pointers
);
1782 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1786 value_factor_p (tree value
, HOST_WIDE_INT factor
)
1788 if (host_integerp (value
, 1))
1789 return tree_low_cst (value
, 1) % factor
== 0;
1791 if (TREE_CODE (value
) == MULT_EXPR
)
1792 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1793 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1798 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1799 unless we can prove these 2 fields are laid out in such a way that no gap
1800 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1801 is the distance in bits between the end of PREV_FIELD and the starting
1802 position of CURR_FIELD. It is ignored if null. */
1805 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1807 /* If this is the first field of the record, there cannot be any gap */
1811 /* If the previous field is a union type, then return False: The only
1812 time when such a field is not the last field of the record is when
1813 there are other components at fixed positions after it (meaning there
1814 was a rep clause for every field), in which case we don't want the
1815 alignment constraint to override them. */
1816 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1819 /* If the distance between the end of prev_field and the beginning of
1820 curr_field is constant, then there is a gap if the value of this
1821 constant is not null. */
1822 if (offset
&& host_integerp (offset
, 1))
1823 return !integer_zerop (offset
);
1825 /* If the size and position of the previous field are constant,
1826 then check the sum of this size and position. There will be a gap
1827 iff it is not multiple of the current field alignment. */
1828 if (host_integerp (DECL_SIZE (prev_field
), 1)
1829 && host_integerp (bit_position (prev_field
), 1))
1830 return ((tree_low_cst (bit_position (prev_field
), 1)
1831 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1832 % DECL_ALIGN (curr_field
) != 0);
1834 /* If both the position and size of the previous field are multiples
1835 of the current field alignment, there cannot be any gap. */
1836 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1837 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1840 /* Fallback, return that there may be a potential gap */
1844 /* Returns a LABEL_DECL node for LABEL_NAME. */
1847 create_label_decl (tree label_name
)
1849 tree label_decl
= build_decl (input_location
,
1850 LABEL_DECL
, label_name
, void_type_node
);
1852 DECL_CONTEXT (label_decl
) = current_function_decl
;
1853 DECL_MODE (label_decl
) = VOIDmode
;
1854 DECL_SOURCE_LOCATION (label_decl
) = input_location
;
1859 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1860 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1861 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1862 PARM_DECL nodes chained through the TREE_CHAIN field).
1864 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1865 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1868 create_subprog_decl (tree subprog_name
, tree asm_name
,
1869 tree subprog_type
, tree param_decl_list
, bool inline_flag
,
1870 bool public_flag
, bool extern_flag
,
1871 struct attrib
*attr_list
, Node_Id gnat_node
)
1873 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
1875 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
1876 TREE_TYPE (subprog_type
));
1878 /* If this is a non-inline function nested inside an inlined external
1879 function, we cannot honor both requests without cloning the nested
1880 function in the current unit since it is private to the other unit.
1881 We could inline the nested function as well but it's probably better
1882 to err on the side of too little inlining. */
1885 && current_function_decl
1886 && DECL_DECLARED_INLINE_P (current_function_decl
)
1887 && DECL_EXTERNAL (current_function_decl
))
1888 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
1890 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1891 TREE_PUBLIC (subprog_decl
) = public_flag
;
1892 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1893 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1894 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1895 DECL_DECLARED_INLINE_P (subprog_decl
) = inline_flag
;
1896 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1898 DECL_ARTIFICIAL (result_decl
) = 1;
1899 DECL_IGNORED_P (result_decl
) = 1;
1900 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
1901 DECL_RESULT (subprog_decl
) = result_decl
;
1905 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1907 /* The expand_main_function circuitry expects "main_identifier_node" to
1908 designate the DECL_NAME of the 'main' entry point, in turn expected
1909 to be declared as the "main" function literally by default. Ada
1910 program entry points are typically declared with a different name
1911 within the binder generated file, exported as 'main' to satisfy the
1912 system expectations. Force main_identifier_node in this case. */
1913 if (asm_name
== main_identifier_node
)
1914 DECL_NAME (subprog_decl
) = main_identifier_node
;
1917 /* Add this decl to the current binding level. */
1918 gnat_pushdecl (subprog_decl
, gnat_node
);
1920 process_attributes (subprog_decl
, attr_list
);
1922 /* Output the assembler code and/or RTL for the declaration. */
1923 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
1925 return subprog_decl
;
1928 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1929 body. This routine needs to be invoked before processing the declarations
1930 appearing in the subprogram. */
1933 begin_subprog_body (tree subprog_decl
)
1937 announce_function (subprog_decl
);
1939 /* This function is being defined. */
1940 TREE_STATIC (subprog_decl
) = 1;
1942 current_function_decl
= subprog_decl
;
1944 /* Enter a new binding level and show that all the parameters belong to
1948 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1949 param_decl
= DECL_CHAIN (param_decl
))
1950 DECL_CONTEXT (param_decl
) = subprog_decl
;
1952 make_decl_rtl (subprog_decl
);
1954 /* We handle pending sizes via the elaboration of types, so we don't need to
1955 save them. This causes them to be marked as part of the outer function
1956 and then discarded. */
1957 get_pending_sizes ();
1960 /* Finish the definition of the current subprogram BODY and finalize it. */
1963 end_subprog_body (tree body
)
1965 tree fndecl
= current_function_decl
;
1967 /* Attach the BLOCK for this level to the function and pop the level. */
1968 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
1969 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
1972 /* We handle pending sizes via the elaboration of types, so we don't
1973 need to save them. */
1974 get_pending_sizes ();
1976 /* Mark the RESULT_DECL as being in this subprogram. */
1977 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
1979 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
1980 if (TREE_CODE (body
) == BIND_EXPR
)
1982 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
1983 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
1986 DECL_SAVED_TREE (fndecl
) = body
;
1988 current_function_decl
= DECL_CONTEXT (fndecl
);
1990 /* We cannot track the location of errors past this point. */
1991 error_gnat_node
= Empty
;
1993 /* If we're only annotating types, don't actually compile this function. */
1994 if (type_annotate_only
)
1997 /* Dump functions before gimplification. */
1998 dump_function (TDI_original
, fndecl
);
2000 /* ??? This special handling of nested functions is probably obsolete. */
2001 if (!DECL_CONTEXT (fndecl
))
2002 cgraph_finalize_function (fndecl
, false);
2004 /* Register this function with cgraph just far enough to get it
2005 added to our parent's nested function list. */
2006 (void) cgraph_get_create_node (fndecl
);
2010 gnat_builtin_function (tree decl
)
2012 gnat_pushdecl (decl
, Empty
);
2016 /* Return an integer type with the number of bits of precision given by
2017 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2018 it is a signed type. */
2021 gnat_type_for_size (unsigned precision
, int unsignedp
)
2026 if (precision
<= 2 * MAX_BITS_PER_WORD
2027 && signed_and_unsigned_types
[precision
][unsignedp
])
2028 return signed_and_unsigned_types
[precision
][unsignedp
];
2031 t
= make_unsigned_type (precision
);
2033 t
= make_signed_type (precision
);
2035 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2036 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2040 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2041 TYPE_NAME (t
) = get_identifier (type_name
);
2047 /* Likewise for floating-point types. */
2050 float_type_for_precision (int precision
, enum machine_mode mode
)
2055 if (float_types
[(int) mode
])
2056 return float_types
[(int) mode
];
2058 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2059 TYPE_PRECISION (t
) = precision
;
2062 gcc_assert (TYPE_MODE (t
) == mode
);
2065 sprintf (type_name
, "FLOAT_%d", precision
);
2066 TYPE_NAME (t
) = get_identifier (type_name
);
2072 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2073 an unsigned type; otherwise a signed type is returned. */
2076 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2078 if (mode
== BLKmode
)
2081 if (mode
== VOIDmode
)
2082 return void_type_node
;
2084 if (COMPLEX_MODE_P (mode
))
2087 if (SCALAR_FLOAT_MODE_P (mode
))
2088 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2090 if (SCALAR_INT_MODE_P (mode
))
2091 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2093 if (VECTOR_MODE_P (mode
))
2095 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2096 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2098 return build_vector_type_for_mode (inner_type
, mode
);
2104 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2107 gnat_unsigned_type (tree type_node
)
2109 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2111 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2113 type
= copy_node (type
);
2114 TREE_TYPE (type
) = type_node
;
2116 else if (TREE_TYPE (type_node
)
2117 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2118 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2120 type
= copy_node (type
);
2121 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2127 /* Return the signed version of a TYPE_NODE, a scalar type. */
2130 gnat_signed_type (tree type_node
)
2132 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2134 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2136 type
= copy_node (type
);
2137 TREE_TYPE (type
) = type_node
;
2139 else if (TREE_TYPE (type_node
)
2140 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2141 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2143 type
= copy_node (type
);
2144 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2150 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2151 transparently converted to each other. */
2154 gnat_types_compatible_p (tree t1
, tree t2
)
2156 enum tree_code code
;
2158 /* This is the default criterion. */
2159 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2162 /* We only check structural equivalence here. */
2163 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2166 /* Vector types are also compatible if they have the same number of subparts
2167 and the same form of (scalar) element type. */
2168 if (code
== VECTOR_TYPE
2169 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2170 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2171 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2174 /* Array types are also compatible if they are constrained and have the same
2175 domain(s) and the same component type. */
2176 if (code
== ARRAY_TYPE
2177 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2178 || (TYPE_DOMAIN (t1
)
2180 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2181 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2182 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2183 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2184 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2185 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2186 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2189 /* Padding record types are also compatible if they pad the same
2190 type and have the same constant size. */
2191 if (code
== RECORD_TYPE
2192 && TYPE_PADDING_P (t1
) && TYPE_PADDING_P (t2
)
2193 && TREE_TYPE (TYPE_FIELDS (t1
)) == TREE_TYPE (TYPE_FIELDS (t2
))
2194 && tree_int_cst_equal (TYPE_SIZE (t1
), TYPE_SIZE (t2
)))
2200 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2203 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
2204 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
2206 return TYPE_CI_CO_LIST (t
) == cico_list
2207 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
2208 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
2209 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
2212 /* EXP is an expression for the size of an object. If this size contains
2213 discriminant references, replace them with the maximum (if MAX_P) or
2214 minimum (if !MAX_P) possible value of the discriminant. */
2217 max_size (tree exp
, bool max_p
)
2219 enum tree_code code
= TREE_CODE (exp
);
2220 tree type
= TREE_TYPE (exp
);
2222 switch (TREE_CODE_CLASS (code
))
2224 case tcc_declaration
:
2229 if (code
== CALL_EXPR
)
2234 t
= maybe_inline_call_in_expr (exp
);
2236 return max_size (t
, max_p
);
2238 n
= call_expr_nargs (exp
);
2240 argarray
= XALLOCAVEC (tree
, n
);
2241 for (i
= 0; i
< n
; i
++)
2242 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
2243 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
2248 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2249 modify. Otherwise, we treat it like a variable. */
2250 if (!CONTAINS_PLACEHOLDER_P (exp
))
2253 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2255 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
2257 case tcc_comparison
:
2258 return max_p
? size_one_node
: size_zero_node
;
2262 case tcc_expression
:
2263 switch (TREE_CODE_LENGTH (code
))
2266 if (code
== NON_LVALUE_EXPR
)
2267 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2270 fold_build1 (code
, type
,
2271 max_size (TREE_OPERAND (exp
, 0),
2272 code
== NEGATE_EXPR
? !max_p
: max_p
));
2275 if (code
== COMPOUND_EXPR
)
2276 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2279 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2280 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2281 code
== MINUS_EXPR
? !max_p
: max_p
);
2283 /* Special-case wanting the maximum value of a MIN_EXPR.
2284 In that case, if one side overflows, return the other.
2285 sizetype is signed, but we know sizes are non-negative.
2286 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2287 overflowing and the RHS a variable. */
2290 && TREE_CODE (rhs
) == INTEGER_CST
2291 && TREE_OVERFLOW (rhs
))
2295 && TREE_CODE (lhs
) == INTEGER_CST
2296 && TREE_OVERFLOW (lhs
))
2298 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2299 && TREE_CODE (lhs
) == INTEGER_CST
2300 && TREE_OVERFLOW (lhs
)
2301 && !TREE_CONSTANT (rhs
))
2304 return fold_build2 (code
, type
, lhs
, rhs
);
2308 if (code
== SAVE_EXPR
)
2310 else if (code
== COND_EXPR
)
2311 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2312 max_size (TREE_OPERAND (exp
, 1), max_p
),
2313 max_size (TREE_OPERAND (exp
, 2), max_p
));
2316 /* Other tree classes cannot happen. */
2324 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2325 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2326 Return a constructor for the template. */
2329 build_template (tree template_type
, tree array_type
, tree expr
)
2331 VEC(constructor_elt
,gc
) *template_elts
= NULL
;
2332 tree bound_list
= NULL_TREE
;
2335 while (TREE_CODE (array_type
) == RECORD_TYPE
2336 && (TYPE_PADDING_P (array_type
)
2337 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
2338 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2340 if (TREE_CODE (array_type
) == ARRAY_TYPE
2341 || (TREE_CODE (array_type
) == INTEGER_TYPE
2342 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2343 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2345 /* First make the list for a CONSTRUCTOR for the template. Go down the
2346 field list of the template instead of the type chain because this
2347 array might be an Ada array of arrays and we can't tell where the
2348 nested arrays stop being the underlying object. */
2350 for (field
= TYPE_FIELDS (template_type
); field
;
2352 ? (bound_list
= TREE_CHAIN (bound_list
))
2353 : (array_type
= TREE_TYPE (array_type
))),
2354 field
= DECL_CHAIN (DECL_CHAIN (field
)))
2356 tree bounds
, min
, max
;
2358 /* If we have a bound list, get the bounds from there. Likewise
2359 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2360 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2361 This will give us a maximum range. */
2363 bounds
= TREE_VALUE (bound_list
);
2364 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2365 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2366 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
2367 && DECL_BY_COMPONENT_PTR_P (expr
))
2368 bounds
= TREE_TYPE (field
);
2372 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
2373 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
2375 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2376 substitute it from OBJECT. */
2377 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2378 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2380 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
2381 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
2384 return gnat_build_constructor (template_type
, template_elts
);
2387 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2388 being built; the new decl is chained on to the front of the list. */
2391 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
2392 tree initial
, tree field_list
)
2395 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
2398 DECL_INITIAL (field
) = initial
;
2399 DECL_CHAIN (field
) = field_list
;
2403 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2404 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2405 type contains in its DECL_INITIAL the expression to use when a constructor
2406 is made for the type. GNAT_ENTITY is an entity used to print out an error
2407 message if the mechanism cannot be applied to an object of that type and
2408 also for the name. */
2411 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2413 tree record_type
= make_node (RECORD_TYPE
);
2414 tree pointer32_type
, pointer64_type
;
2415 tree field_list
= NULL_TREE
;
2416 int klass
, ndim
, i
, dtype
= 0;
2417 tree inner_type
, tem
;
2420 /* If TYPE is an unconstrained array, use the underlying array type. */
2421 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2422 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2424 /* If this is an array, compute the number of dimensions in the array,
2425 get the index types, and point to the inner type. */
2426 if (TREE_CODE (type
) != ARRAY_TYPE
)
2429 for (ndim
= 1, inner_type
= type
;
2430 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2431 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2432 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2435 idx_arr
= XALLOCAVEC (tree
, ndim
);
2437 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
2438 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2439 for (i
= ndim
- 1, inner_type
= type
;
2441 i
--, inner_type
= TREE_TYPE (inner_type
))
2442 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2444 for (i
= 0, inner_type
= type
;
2446 i
++, inner_type
= TREE_TYPE (inner_type
))
2447 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2449 /* Now get the DTYPE value. */
2450 switch (TREE_CODE (type
))
2455 if (TYPE_VAX_FLOATING_POINT_P (type
))
2456 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2469 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2472 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2475 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2478 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2481 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2484 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2490 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2494 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2495 && TYPE_VAX_FLOATING_POINT_P (type
))
2496 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2508 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2519 /* Get the CLASS value. */
2522 case By_Descriptor_A
:
2523 case By_Short_Descriptor_A
:
2526 case By_Descriptor_NCA
:
2527 case By_Short_Descriptor_NCA
:
2530 case By_Descriptor_SB
:
2531 case By_Short_Descriptor_SB
:
2535 case By_Short_Descriptor
:
2536 case By_Descriptor_S
:
2537 case By_Short_Descriptor_S
:
2543 /* Make the type for a descriptor for VMS. The first four fields are the
2544 same for all types. */
2546 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2547 size_in_bytes ((mech
== By_Descriptor_A
2548 || mech
== By_Short_Descriptor_A
)
2549 ? inner_type
: type
),
2552 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
2553 size_int (dtype
), field_list
);
2555 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
2556 size_int (klass
), field_list
);
2558 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
2559 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
2561 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2562 that we cannot build a template call to the CE routine as it would get a
2563 wrong source location; instead we use a second placeholder for it. */
2564 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
2565 build0 (PLACEHOLDER_EXPR
, type
));
2566 tem
= build3 (COND_EXPR
, pointer32_type
,
2567 build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
2568 build_int_cstu (pointer64_type
, 0x80000000)),
2569 build0 (PLACEHOLDER_EXPR
, void_type_node
),
2570 convert (pointer32_type
, tem
));
2573 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
2579 case By_Short_Descriptor
:
2580 case By_Descriptor_S
:
2581 case By_Short_Descriptor_S
:
2584 case By_Descriptor_SB
:
2585 case By_Short_Descriptor_SB
:
2587 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2589 (TREE_CODE (type
) == ARRAY_TYPE
2590 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
2594 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2596 (TREE_CODE (type
) == ARRAY_TYPE
2597 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
2602 case By_Descriptor_A
:
2603 case By_Short_Descriptor_A
:
2604 case By_Descriptor_NCA
:
2605 case By_Short_Descriptor_NCA
:
2607 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2608 record_type
, size_zero_node
, field_list
);
2611 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2612 record_type
, size_zero_node
, field_list
);
2615 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2617 size_int ((mech
== By_Descriptor_NCA
2618 || mech
== By_Short_Descriptor_NCA
)
2620 /* Set FL_COLUMN, FL_COEFF, and
2622 : (TREE_CODE (type
) == ARRAY_TYPE
2623 && TYPE_CONVENTION_FORTRAN_P
2629 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2630 record_type
, size_int (ndim
), field_list
);
2633 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2634 record_type
, size_in_bytes (type
),
2637 /* Now build a pointer to the 0,0,0... element. */
2638 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2639 for (i
= 0, inner_type
= type
; i
< ndim
;
2640 i
++, inner_type
= TREE_TYPE (inner_type
))
2641 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2642 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2643 NULL_TREE
, NULL_TREE
);
2646 = make_descriptor_field ("A0", pointer32_type
, record_type
,
2647 build1 (ADDR_EXPR
, pointer32_type
, tem
),
2650 /* Next come the addressing coefficients. */
2651 tem
= size_one_node
;
2652 for (i
= 0; i
< ndim
; i
++)
2656 = size_binop (MULT_EXPR
, tem
,
2657 size_binop (PLUS_EXPR
,
2658 size_binop (MINUS_EXPR
,
2659 TYPE_MAX_VALUE (idx_arr
[i
]),
2660 TYPE_MIN_VALUE (idx_arr
[i
])),
2663 fname
[0] = ((mech
== By_Descriptor_NCA
||
2664 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
2665 fname
[1] = '0' + i
, fname
[2] = 0;
2667 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2668 record_type
, idx_length
, field_list
);
2670 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
2674 /* Finally here are the bounds. */
2675 for (i
= 0; i
< ndim
; i
++)
2679 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2681 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2682 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
2687 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
2688 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
2694 post_error ("unsupported descriptor type for &", gnat_entity
);
2697 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
2698 finish_record_type (record_type
, nreverse (field_list
), 0, false);
2702 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2703 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2704 type contains in its DECL_INITIAL the expression to use when a constructor
2705 is made for the type. GNAT_ENTITY is an entity used to print out an error
2706 message if the mechanism cannot be applied to an object of that type and
2707 also for the name. */
2710 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2712 tree record_type
= make_node (RECORD_TYPE
);
2713 tree pointer64_type
;
2714 tree field_list
= NULL_TREE
;
2715 int klass
, ndim
, i
, dtype
= 0;
2716 tree inner_type
, tem
;
2719 /* If TYPE is an unconstrained array, use the underlying array type. */
2720 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2721 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2723 /* If this is an array, compute the number of dimensions in the array,
2724 get the index types, and point to the inner type. */
2725 if (TREE_CODE (type
) != ARRAY_TYPE
)
2728 for (ndim
= 1, inner_type
= type
;
2729 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2730 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2731 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2734 idx_arr
= XALLOCAVEC (tree
, ndim
);
2736 if (mech
!= By_Descriptor_NCA
2737 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2738 for (i
= ndim
- 1, inner_type
= type
;
2740 i
--, inner_type
= TREE_TYPE (inner_type
))
2741 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2743 for (i
= 0, inner_type
= type
;
2745 i
++, inner_type
= TREE_TYPE (inner_type
))
2746 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2748 /* Now get the DTYPE value. */
2749 switch (TREE_CODE (type
))
2754 if (TYPE_VAX_FLOATING_POINT_P (type
))
2755 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2768 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2771 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2774 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2777 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2780 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2783 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2789 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2793 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2794 && TYPE_VAX_FLOATING_POINT_P (type
))
2795 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2807 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2818 /* Get the CLASS value. */
2821 case By_Descriptor_A
:
2824 case By_Descriptor_NCA
:
2827 case By_Descriptor_SB
:
2831 case By_Descriptor_S
:
2837 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2838 are the same for all types. */
2840 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2841 record_type
, size_int (1), field_list
);
2843 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2844 record_type
, size_int (dtype
), field_list
);
2846 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2847 record_type
, size_int (klass
), field_list
);
2849 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2850 record_type
, ssize_int (-1), field_list
);
2852 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2854 size_in_bytes (mech
== By_Descriptor_A
2855 ? inner_type
: type
),
2858 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
2861 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
2862 build_unary_op (ADDR_EXPR
, pointer64_type
,
2863 build0 (PLACEHOLDER_EXPR
, type
)),
2869 case By_Descriptor_S
:
2872 case By_Descriptor_SB
:
2874 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2876 (TREE_CODE (type
) == ARRAY_TYPE
2877 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
2881 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2883 (TREE_CODE (type
) == ARRAY_TYPE
2884 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
2889 case By_Descriptor_A
:
2890 case By_Descriptor_NCA
:
2892 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2893 record_type
, size_zero_node
, field_list
);
2896 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2897 record_type
, size_zero_node
, field_list
);
2899 dtype
= (mech
== By_Descriptor_NCA
2901 /* Set FL_COLUMN, FL_COEFF, and
2903 : (TREE_CODE (type
) == ARRAY_TYPE
2904 && TYPE_CONVENTION_FORTRAN_P (type
)
2907 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2908 record_type
, size_int (dtype
),
2912 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2913 record_type
, size_int (ndim
), field_list
);
2916 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2917 record_type
, size_int (0), field_list
);
2919 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2920 record_type
, size_in_bytes (type
),
2923 /* Now build a pointer to the 0,0,0... element. */
2924 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2925 for (i
= 0, inner_type
= type
; i
< ndim
;
2926 i
++, inner_type
= TREE_TYPE (inner_type
))
2927 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2928 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2929 NULL_TREE
, NULL_TREE
);
2932 = make_descriptor_field ("A0", pointer64_type
, record_type
,
2933 build1 (ADDR_EXPR
, pointer64_type
, tem
),
2936 /* Next come the addressing coefficients. */
2937 tem
= size_one_node
;
2938 for (i
= 0; i
< ndim
; i
++)
2942 = size_binop (MULT_EXPR
, tem
,
2943 size_binop (PLUS_EXPR
,
2944 size_binop (MINUS_EXPR
,
2945 TYPE_MAX_VALUE (idx_arr
[i
]),
2946 TYPE_MIN_VALUE (idx_arr
[i
])),
2949 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2950 fname
[1] = '0' + i
, fname
[2] = 0;
2952 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2953 record_type
, idx_length
, field_list
);
2955 if (mech
== By_Descriptor_NCA
)
2959 /* Finally here are the bounds. */
2960 for (i
= 0; i
< ndim
; i
++)
2964 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2966 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2968 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
2972 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
2974 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
2979 post_error ("unsupported descriptor type for &", gnat_entity
);
2982 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
2983 finish_record_type (record_type
, nreverse (field_list
), 0, false);
2987 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
2988 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
2991 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
2993 VEC(constructor_elt
,gc
) *v
= NULL
;
2996 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
2997 gnu_expr
= gnat_protect_expr (gnu_expr
);
2998 gnat_mark_addressable (gnu_expr
);
3000 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3001 routine in case we have a 32-bit descriptor. */
3002 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3003 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3004 N_Raise_Constraint_Error
),
3007 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3010 = convert (TREE_TYPE (field
),
3011 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3013 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3016 return gnat_build_constructor (gnu_type
, v
);
3019 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3020 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3021 which the VMS descriptor is passed. */
3024 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3026 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3027 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3028 /* The CLASS field is the 3rd field in the descriptor. */
3029 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3030 /* The POINTER field is the 6th field in the descriptor. */
3031 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3033 /* Retrieve the value of the POINTER field. */
3035 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3037 if (POINTER_TYPE_P (gnu_type
))
3038 return convert (gnu_type
, gnu_expr64
);
3040 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3042 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3043 tree p_bounds_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type
)));
3044 tree template_type
= TREE_TYPE (p_bounds_type
);
3045 tree min_field
= TYPE_FIELDS (template_type
);
3046 tree max_field
= TREE_CHAIN (TYPE_FIELDS (template_type
));
3047 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3048 /* See the head comment of build_vms_descriptor. */
3049 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3050 tree lfield
, ufield
;
3051 VEC(constructor_elt
,gc
) *v
;
3053 /* Convert POINTER to the pointer-to-array type. */
3054 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3058 case 1: /* Class S */
3059 case 15: /* Class SB */
3060 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3061 v
= VEC_alloc (constructor_elt
, gc
, 2);
3062 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3063 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3064 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3065 convert (TREE_TYPE (min_field
),
3067 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3068 convert (TREE_TYPE (max_field
), t
));
3069 template_tree
= gnat_build_constructor (template_type
, v
);
3070 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3072 /* For class S, we are done. */
3076 /* Test that we really have a SB descriptor, like DEC Ada. */
3077 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3078 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3079 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3080 /* If so, there is already a template in the descriptor and
3081 it is located right after the POINTER field. The fields are
3082 64bits so they must be repacked. */
3083 t
= TREE_CHAIN (pointer
);
3084 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3085 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3088 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3090 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3092 /* Build the template in the form of a constructor. */
3093 v
= VEC_alloc (constructor_elt
, gc
, 2);
3094 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3095 CONSTRUCTOR_APPEND_ELT (v
, TREE_CHAIN (TYPE_FIELDS (template_type
)),
3097 template_tree
= gnat_build_constructor (template_type
, v
);
3099 /* Otherwise use the {1, LENGTH} template we build above. */
3100 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3101 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3106 case 4: /* Class A */
3107 /* The AFLAGS field is the 3rd field after the pointer in the
3109 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3110 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3111 /* The DIMCT field is the next field in the descriptor after
3114 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3115 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3116 or FL_COEFF or FL_BOUNDS not set. */
3117 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3118 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3119 build_binary_op (NE_EXPR
, boolean_type_node
,
3121 convert (TREE_TYPE (dimct
),
3123 build_binary_op (NE_EXPR
, boolean_type_node
,
3124 build2 (BIT_AND_EXPR
,
3128 /* There is already a template in the descriptor and it is located
3129 in block 3. The fields are 64bits so they must be repacked. */
3130 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3132 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3133 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3136 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3138 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3140 /* Build the template in the form of a constructor. */
3141 v
= VEC_alloc (constructor_elt
, gc
, 2);
3142 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3143 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3145 template_tree
= gnat_build_constructor (template_type
, v
);
3146 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3147 build_call_raise (CE_Length_Check_Failed
, Empty
,
3148 N_Raise_Constraint_Error
),
3151 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3154 case 10: /* Class NCA */
3156 post_error ("unsupported descriptor type for &", gnat_subprog
);
3157 template_addr
= integer_zero_node
;
3161 /* Build the fat pointer in the form of a constructor. */
3162 v
= VEC_alloc (constructor_elt
, gc
, 2);
3163 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3164 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3166 return gnat_build_constructor (gnu_type
, v
);
3173 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3174 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3175 which the VMS descriptor is passed. */
3178 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3180 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3181 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3182 /* The CLASS field is the 3rd field in the descriptor. */
3183 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3184 /* The POINTER field is the 4th field in the descriptor. */
3185 tree pointer
= DECL_CHAIN (klass
);
3187 /* Retrieve the value of the POINTER field. */
3189 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3191 if (POINTER_TYPE_P (gnu_type
))
3192 return convert (gnu_type
, gnu_expr32
);
3194 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3196 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3197 tree p_bounds_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type
)));
3198 tree template_type
= TREE_TYPE (p_bounds_type
);
3199 tree min_field
= TYPE_FIELDS (template_type
);
3200 tree max_field
= TREE_CHAIN (TYPE_FIELDS (template_type
));
3201 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3202 /* See the head comment of build_vms_descriptor. */
3203 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3204 VEC(constructor_elt
,gc
) *v
;
3206 /* Convert POINTER to the pointer-to-array type. */
3207 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
3211 case 1: /* Class S */
3212 case 15: /* Class SB */
3213 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3214 v
= VEC_alloc (constructor_elt
, gc
, 2);
3215 t
= TYPE_FIELDS (desc_type
);
3216 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3217 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3218 convert (TREE_TYPE (min_field
),
3220 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3221 convert (TREE_TYPE (max_field
), t
));
3222 template_tree
= gnat_build_constructor (template_type
, v
);
3223 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3225 /* For class S, we are done. */
3229 /* Test that we really have a SB descriptor, like DEC Ada. */
3230 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3231 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3232 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3233 /* If so, there is already a template in the descriptor and
3234 it is located right after the POINTER field. */
3235 t
= TREE_CHAIN (pointer
);
3237 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3238 /* Otherwise use the {1, LENGTH} template we build above. */
3239 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3240 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3245 case 4: /* Class A */
3246 /* The AFLAGS field is the 7th field in the descriptor. */
3247 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3248 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3249 /* The DIMCT field is the 8th field in the descriptor. */
3251 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3252 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3253 or FL_COEFF or FL_BOUNDS not set. */
3254 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3255 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3256 build_binary_op (NE_EXPR
, boolean_type_node
,
3258 convert (TREE_TYPE (dimct
),
3260 build_binary_op (NE_EXPR
, boolean_type_node
,
3261 build2 (BIT_AND_EXPR
,
3265 /* There is already a template in the descriptor and it is
3266 located at the start of block 3 (12th field). */
3267 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
3269 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3270 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
3271 build_call_raise (CE_Length_Check_Failed
, Empty
,
3272 N_Raise_Constraint_Error
),
3275 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3278 case 10: /* Class NCA */
3280 post_error ("unsupported descriptor type for &", gnat_subprog
);
3281 template_addr
= integer_zero_node
;
3285 /* Build the fat pointer in the form of a constructor. */
3286 v
= VEC_alloc (constructor_elt
, gc
, 2);
3287 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
3288 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3291 return gnat_build_constructor (gnu_type
, v
);
3298 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3299 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3300 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3301 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3305 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
3306 bool by_ref
, Entity_Id gnat_subprog
)
3308 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3309 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3310 tree mbo
= TYPE_FIELDS (desc_type
);
3311 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
3312 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
3313 tree real_type
, is64bit
, gnu_expr32
, gnu_expr64
;
3316 real_type
= TREE_TYPE (gnu_type
);
3318 real_type
= gnu_type
;
3320 /* If the field name is not MBO, it must be 32-bit and no alternate.
3321 Otherwise primary must be 64-bit and alternate 32-bit. */
3322 if (strcmp (mbostr
, "MBO") != 0)
3324 tree ret
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
3326 ret
= build_unary_op (ADDR_EXPR
, gnu_type
, ret
);
3330 /* Build the test for 64-bit descriptor. */
3331 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
3332 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
3334 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
3335 build_binary_op (EQ_EXPR
, boolean_type_node
,
3336 convert (integer_type_node
, mbo
),
3338 build_binary_op (EQ_EXPR
, boolean_type_node
,
3339 convert (integer_type_node
, mbmo
),
3340 integer_minus_one_node
));
3342 /* Build the 2 possible end results. */
3343 gnu_expr64
= convert_vms_descriptor64 (real_type
, gnu_expr
, gnat_subprog
);
3345 gnu_expr64
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr64
);
3346 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
3347 gnu_expr32
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
3349 gnu_expr32
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr32
);
3351 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
3354 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3355 and the GNAT node GNAT_SUBPROG. */
3358 build_function_stub (tree gnu_subprog
, Entity_Id gnat_subprog
)
3360 tree gnu_subprog_type
, gnu_subprog_addr
, gnu_subprog_call
;
3361 tree gnu_subprog_param
, gnu_stub_param
, gnu_param
;
3362 tree gnu_stub_decl
= DECL_FUNCTION_STUB (gnu_subprog
);
3363 VEC(tree
,gc
) *gnu_param_vec
= NULL
;
3365 gnu_subprog_type
= TREE_TYPE (gnu_subprog
);
3367 /* Initialize the information structure for the function. */
3368 allocate_struct_function (gnu_stub_decl
, false);
3371 begin_subprog_body (gnu_stub_decl
);
3373 start_stmt_group ();
3376 /* Loop over the parameters of the stub and translate any of them
3377 passed by descriptor into a by reference one. */
3378 for (gnu_stub_param
= DECL_ARGUMENTS (gnu_stub_decl
),
3379 gnu_subprog_param
= DECL_ARGUMENTS (gnu_subprog
);
3381 gnu_stub_param
= TREE_CHAIN (gnu_stub_param
),
3382 gnu_subprog_param
= TREE_CHAIN (gnu_subprog_param
))
3384 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param
))
3386 gcc_assert (DECL_BY_REF_P (gnu_subprog_param
));
3388 = convert_vms_descriptor (TREE_TYPE (gnu_subprog_param
),
3390 DECL_PARM_ALT_TYPE (gnu_stub_param
),
3391 DECL_BY_DOUBLE_REF_P (gnu_subprog_param
),
3395 gnu_param
= gnu_stub_param
;
3397 VEC_safe_push (tree
, gc
, gnu_param_vec
, gnu_param
);
3400 /* Invoke the internal subprogram. */
3401 gnu_subprog_addr
= build1 (ADDR_EXPR
, build_pointer_type (gnu_subprog_type
),
3403 gnu_subprog_call
= build_call_vec (TREE_TYPE (gnu_subprog_type
),
3404 gnu_subprog_addr
, gnu_param_vec
);
3406 /* Propagate the return value, if any. */
3407 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type
)))
3408 add_stmt (gnu_subprog_call
);
3410 add_stmt (build_return_expr (DECL_RESULT (gnu_stub_decl
),
3414 end_subprog_body (end_stmt_group ());
3417 /* Build a type to be used to represent an aliased object whose nominal type
3418 is an unconstrained array. This consists of a RECORD_TYPE containing a
3419 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3420 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3421 an arbitrary unconstrained object. Use NAME as the name of the record.
3422 DEBUG_INFO_P is true if we need to write debug information for the type. */
3425 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
3428 tree type
= make_node (RECORD_TYPE
);
3430 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
3431 NULL_TREE
, NULL_TREE
, 0, 1);
3433 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
3434 NULL_TREE
, NULL_TREE
, 0, 1);
3436 TYPE_NAME (type
) = name
;
3437 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
3438 DECL_CHAIN (template_field
) = array_field
;
3439 finish_record_type (type
, template_field
, 0, true);
3441 /* Declare it now since it will never be declared otherwise. This is
3442 necessary to ensure that its subtrees are properly marked. */
3443 create_type_decl (name
, type
, NULL
, true, debug_info_p
, Empty
);
3448 /* Same, taking a thin or fat pointer type instead of a template type. */
3451 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
3452 tree name
, bool debug_info_p
)
3456 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
3459 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
3460 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
3461 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
3464 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
3467 /* Shift the component offsets within an unconstrained object TYPE to make it
3468 suitable for use as a designated type for thin pointers. */
3471 shift_unc_components_for_thin_pointers (tree type
)
3473 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3474 allocated past the BOUNDS template. The designated type is adjusted to
3475 have ARRAY at position zero and the template at a negative offset, so
3476 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3478 tree bounds_field
= TYPE_FIELDS (type
);
3479 tree array_field
= DECL_CHAIN (TYPE_FIELDS (type
));
3481 DECL_FIELD_OFFSET (bounds_field
)
3482 = size_binop (MINUS_EXPR
, size_zero_node
, byte_position (array_field
));
3484 DECL_FIELD_OFFSET (array_field
) = size_zero_node
;
3485 DECL_FIELD_BIT_OFFSET (array_field
) = bitsize_zero_node
;
3488 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3489 In the normal case this is just two adjustments, but we have more to
3490 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3493 update_pointer_to (tree old_type
, tree new_type
)
3495 tree ptr
= TYPE_POINTER_TO (old_type
);
3496 tree ref
= TYPE_REFERENCE_TO (old_type
);
3499 /* If this is the main variant, process all the other variants first. */
3500 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
3501 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
3502 update_pointer_to (t
, new_type
);
3504 /* If no pointers and no references, we are done. */
3508 /* Merge the old type qualifiers in the new type.
3510 Each old variant has qualifiers for specific reasons, and the new
3511 designated type as well. Each set of qualifiers represents useful
3512 information grabbed at some point, and merging the two simply unifies
3513 these inputs into the final type description.
3515 Consider for instance a volatile type frozen after an access to constant
3516 type designating it; after the designated type's freeze, we get here with
3517 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3518 when the access type was processed. We will make a volatile and readonly
3519 designated type, because that's what it really is.
3521 We might also get here for a non-dummy OLD_TYPE variant with different
3522 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3523 to private record type elaboration (see the comments around the call to
3524 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3525 the qualifiers in those cases too, to avoid accidentally discarding the
3526 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3528 = build_qualified_type (new_type
,
3529 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
3531 /* If old type and new type are identical, there is nothing to do. */
3532 if (old_type
== new_type
)
3535 /* Otherwise, first handle the simple case. */
3536 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
3538 tree new_ptr
, new_ref
;
3540 /* If pointer or reference already points to new type, nothing to do.
3541 This can happen as update_pointer_to can be invoked multiple times
3542 on the same couple of types because of the type variants. */
3543 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
3544 || (ref
&& TREE_TYPE (ref
) == new_type
))
3547 /* Chain PTR and its variants at the end. */
3548 new_ptr
= TYPE_POINTER_TO (new_type
);
3551 while (TYPE_NEXT_PTR_TO (new_ptr
))
3552 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
3553 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
3556 TYPE_POINTER_TO (new_type
) = ptr
;
3558 /* Now adjust them. */
3559 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
3560 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
3561 TREE_TYPE (t
) = new_type
;
3563 /* If we have adjusted named types, finalize them. This is necessary
3564 since we had forced a DWARF typedef for them in gnat_pushdecl. */
3565 for (ptr
= TYPE_POINTER_TO (old_type
); ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
3566 if (TYPE_NAME (ptr
) && TREE_CODE (TYPE_NAME (ptr
)) == TYPE_DECL
)
3567 rest_of_type_decl_compilation (TYPE_NAME (ptr
));
3569 /* Chain REF and its variants at the end. */
3570 new_ref
= TYPE_REFERENCE_TO (new_type
);
3573 while (TYPE_NEXT_REF_TO (new_ref
))
3574 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
3575 TYPE_NEXT_REF_TO (new_ref
) = ref
;
3578 TYPE_REFERENCE_TO (new_type
) = ref
;
3580 /* Now adjust them. */
3581 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
3582 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
3583 TREE_TYPE (t
) = new_type
;
3585 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
3586 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
3589 /* Now deal with the unconstrained array case. In this case the pointer
3590 is actually a record where both fields are pointers to dummy nodes.
3591 Turn them into pointers to the correct types using update_pointer_to.
3592 Likewise for the pointer to the object record (thin pointer). */
3595 tree new_ptr
= TYPE_POINTER_TO (new_type
);
3597 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
3599 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3600 since update_pointer_to can be invoked multiple times on the same
3601 couple of types because of the type variants. */
3602 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
3606 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
3607 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
3610 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
3611 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
3613 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
3614 TYPE_OBJECT_RECORD_TYPE (new_type
));
3616 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
3620 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3621 unconstrained one. This involves making or finding a template. */
3624 convert_to_fat_pointer (tree type
, tree expr
)
3626 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
3627 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
3628 tree etype
= TREE_TYPE (expr
);
3630 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
3632 /* If EXPR is null, make a fat pointer that contains null pointers to the
3633 template and array. */
3634 if (integer_zerop (expr
))
3636 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3637 convert (p_array_type
, expr
));
3638 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
3639 convert (build_pointer_type (template_type
),
3641 return gnat_build_constructor (type
, v
);
3644 /* If EXPR is a thin pointer, make template and data from the record.. */
3645 else if (TYPE_IS_THIN_POINTER_P (etype
))
3647 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
3649 expr
= gnat_protect_expr (expr
);
3650 if (TREE_CODE (expr
) == ADDR_EXPR
)
3651 expr
= TREE_OPERAND (expr
, 0);
3653 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
3655 template_tree
= build_component_ref (expr
, NULL_TREE
, fields
, false);
3656 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
3657 build_component_ref (expr
, NULL_TREE
,
3658 DECL_CHAIN (fields
), false));
3661 /* Otherwise, build the constructor for the template. */
3663 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
3665 /* The final result is a constructor for the fat pointer.
3667 If EXPR is an argument of a foreign convention subprogram, the type it
3668 points to is directly the component type. In this case, the expression
3669 type may not match the corresponding FIELD_DECL type at this point, so we
3670 call "convert" here to fix that up if necessary. This type consistency is
3671 required, for instance because it ensures that possible later folding of
3672 COMPONENT_REFs against this constructor always yields something of the
3673 same type as the initial reference.
3675 Note that the call to "build_template" above is still fine because it
3676 will only refer to the provided TEMPLATE_TYPE in this case. */
3677 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3678 convert (p_array_type
, expr
));
3679 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
3680 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3682 return gnat_build_constructor (type
, v
);
3685 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3686 is something that is a fat pointer, so convert to it first if it EXPR
3687 is not already a fat pointer. */
3690 convert_to_thin_pointer (tree type
, tree expr
)
3692 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr
)))
3694 = convert_to_fat_pointer
3695 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
3697 /* We get the pointer to the data and use a NOP_EXPR to make it the
3699 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)),
3701 expr
= build1 (NOP_EXPR
, type
, expr
);
3706 /* Create an expression whose value is that of EXPR,
3707 converted to type TYPE. The TREE_TYPE of the value
3708 is always TYPE. This function implements all reasonable
3709 conversions; callers should filter out those that are
3710 not permitted by the language being compiled. */
3713 convert (tree type
, tree expr
)
3715 tree etype
= TREE_TYPE (expr
);
3716 enum tree_code ecode
= TREE_CODE (etype
);
3717 enum tree_code code
= TREE_CODE (type
);
3719 /* If the expression is already of the right type, we are done. */
3723 /* If both input and output have padding and are of variable size, do this
3724 as an unchecked conversion. Likewise if one is a mere variant of the
3725 other, so we avoid a pointless unpad/repad sequence. */
3726 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
3727 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
3728 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3729 || !TREE_CONSTANT (TYPE_SIZE (etype
))
3730 || gnat_types_compatible_p (type
, etype
)
3731 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
3732 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
3735 /* If the output type has padding, convert to the inner type and make a
3736 constructor to build the record, unless a variable size is involved. */
3737 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
3739 VEC(constructor_elt
,gc
) *v
;
3741 /* If we previously converted from another type and our type is
3742 of variable size, remove the conversion to avoid the need for
3743 variable-sized temporaries. Likewise for a conversion between
3744 original and packable version. */
3745 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3746 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3747 || (ecode
== RECORD_TYPE
3748 && TYPE_NAME (etype
)
3749 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
3750 expr
= TREE_OPERAND (expr
, 0);
3752 /* If we are just removing the padding from expr, convert the original
3753 object if we have variable size in order to avoid the need for some
3754 variable-sized temporaries. Likewise if the padding is a variant
3755 of the other, so we avoid a pointless unpad/repad sequence. */
3756 if (TREE_CODE (expr
) == COMPONENT_REF
3757 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
3758 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3759 || gnat_types_compatible_p (type
,
3760 TREE_TYPE (TREE_OPERAND (expr
, 0)))
3761 || (ecode
== RECORD_TYPE
3762 && TYPE_NAME (etype
)
3763 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
3764 return convert (type
, TREE_OPERAND (expr
, 0));
3766 /* If the inner type is of self-referential size and the expression type
3767 is a record, do this as an unchecked conversion. But first pad the
3768 expression if possible to have the same size on both sides. */
3769 if (ecode
== RECORD_TYPE
3770 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
3772 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
3773 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
3774 false, false, false, true),
3776 return unchecked_convert (type
, expr
, false);
3779 /* If we are converting between array types with variable size, do the
3780 final conversion as an unchecked conversion, again to avoid the need
3781 for some variable-sized temporaries. If valid, this conversion is
3782 very likely purely technical and without real effects. */
3783 if (ecode
== ARRAY_TYPE
3784 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
3785 && !TREE_CONSTANT (TYPE_SIZE (etype
))
3786 && !TREE_CONSTANT (TYPE_SIZE (type
)))
3787 return unchecked_convert (type
,
3788 convert (TREE_TYPE (TYPE_FIELDS (type
)),
3792 v
= VEC_alloc (constructor_elt
, gc
, 1);
3793 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3794 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
3795 return gnat_build_constructor (type
, v
);
3798 /* If the input type has padding, remove it and convert to the output type.
3799 The conditions ordering is arranged to ensure that the output type is not
3800 a padding type here, as it is not clear whether the conversion would
3801 always be correct if this was to happen. */
3802 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
3806 /* If we have just converted to this padded type, just get the
3807 inner expression. */
3808 if (TREE_CODE (expr
) == CONSTRUCTOR
3809 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
3810 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
3811 == TYPE_FIELDS (etype
))
3813 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
3815 /* Otherwise, build an explicit component reference. */
3818 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
3820 return convert (type
, unpadded
);
3823 /* If the input is a biased type, adjust first. */
3824 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
3825 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
3826 fold_convert (TREE_TYPE (etype
),
3828 TYPE_MIN_VALUE (etype
)));
3830 /* If the input is a justified modular type, we need to extract the actual
3831 object before converting it to any other type with the exceptions of an
3832 unconstrained array or of a mere type variant. It is useful to avoid the
3833 extraction and conversion in the type variant case because it could end
3834 up replacing a VAR_DECL expr by a constructor and we might be about the
3835 take the address of the result. */
3836 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
3837 && code
!= UNCONSTRAINED_ARRAY_TYPE
3838 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
3839 return convert (type
, build_component_ref (expr
, NULL_TREE
,
3840 TYPE_FIELDS (etype
), false));
3842 /* If converting to a type that contains a template, convert to the data
3843 type and then build the template. */
3844 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
3846 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
3847 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
3849 /* If the source already has a template, get a reference to the
3850 associated array only, as we are going to rebuild a template
3851 for the target type anyway. */
3852 expr
= maybe_unconstrained_array (expr
);
3854 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
3855 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
3856 obj_type
, NULL_TREE
));
3857 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
3858 convert (obj_type
, expr
));
3859 return gnat_build_constructor (type
, v
);
3862 /* There are some special cases of expressions that we process
3864 switch (TREE_CODE (expr
))
3870 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3871 conversion in gnat_expand_expr. NULL_EXPR does not represent
3872 and actual value, so no conversion is needed. */
3873 expr
= copy_node (expr
);
3874 TREE_TYPE (expr
) = type
;
3878 /* If we are converting a STRING_CST to another constrained array type,
3879 just make a new one in the proper type. */
3880 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
3881 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
3882 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
3884 expr
= copy_node (expr
);
3885 TREE_TYPE (expr
) = type
;
3891 /* If we are converting a VECTOR_CST to a mere variant type, just make
3892 a new one in the proper type. */
3893 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
3895 expr
= copy_node (expr
);
3896 TREE_TYPE (expr
) = type
;
3901 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3902 a new one in the proper type. */
3903 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
3905 expr
= copy_node (expr
);
3906 TREE_TYPE (expr
) = type
;
3910 /* Likewise for a conversion between original and packable version, or
3911 conversion between types of the same size and with the same list of
3912 fields, but we have to work harder to preserve type consistency. */
3914 && code
== RECORD_TYPE
3915 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
3916 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
3919 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
3920 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
3921 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, len
);
3922 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
3923 unsigned HOST_WIDE_INT idx
;
3926 /* Whether we need to clear TREE_CONSTANT et al. on the output
3927 constructor when we convert in place. */
3928 bool clear_constant
= false;
3930 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
3932 constructor_elt
*elt
;
3933 /* We expect only simple constructors. */
3934 if (!SAME_FIELD_P (index
, efield
))
3936 /* The field must be the same. */
3937 if (!SAME_FIELD_P (efield
, field
))
3939 elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
3941 elt
->value
= convert (TREE_TYPE (field
), value
);
3943 /* If packing has made this field a bitfield and the input
3944 value couldn't be emitted statically any more, we need to
3945 clear TREE_CONSTANT on our output. */
3947 && TREE_CONSTANT (expr
)
3948 && !CONSTRUCTOR_BITFIELD_P (efield
)
3949 && CONSTRUCTOR_BITFIELD_P (field
)
3950 && !initializer_constant_valid_for_bitfield_p (value
))
3951 clear_constant
= true;
3953 efield
= DECL_CHAIN (efield
);
3954 field
= DECL_CHAIN (field
);
3957 /* If we have been able to match and convert all the input fields
3958 to their output type, convert in place now. We'll fallback to a
3959 view conversion downstream otherwise. */
3962 expr
= copy_node (expr
);
3963 TREE_TYPE (expr
) = type
;
3964 CONSTRUCTOR_ELTS (expr
) = v
;
3966 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
3971 /* Likewise for a conversion between array type and vector type with a
3972 compatible representative array. */
3973 else if (code
== VECTOR_TYPE
3974 && ecode
== ARRAY_TYPE
3975 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
3978 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
3979 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
3980 VEC(constructor_elt
,gc
) *v
;
3981 unsigned HOST_WIDE_INT ix
;
3984 /* Build a VECTOR_CST from a *constant* array constructor. */
3985 if (TREE_CONSTANT (expr
))
3987 bool constant_p
= true;
3989 /* Iterate through elements and check if all constructor
3990 elements are *_CSTs. */
3991 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
3992 if (!CONSTANT_CLASS_P (value
))
3999 return build_vector_from_ctor (type
,
4000 CONSTRUCTOR_ELTS (expr
));
4003 /* Otherwise, build a regular vector constructor. */
4004 v
= VEC_alloc (constructor_elt
, gc
, len
);
4005 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4007 constructor_elt
*elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
4008 elt
->index
= NULL_TREE
;
4011 expr
= copy_node (expr
);
4012 TREE_TYPE (expr
) = type
;
4013 CONSTRUCTOR_ELTS (expr
) = v
;
4018 case UNCONSTRAINED_ARRAY_REF
:
4019 /* Convert this to the type of the inner array by getting the address of
4020 the array from the template. */
4021 expr
= TREE_OPERAND (expr
, 0);
4022 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
4023 build_component_ref (expr
, NULL_TREE
,
4027 etype
= TREE_TYPE (expr
);
4028 ecode
= TREE_CODE (etype
);
4031 case VIEW_CONVERT_EXPR
:
4033 /* GCC 4.x is very sensitive to type consistency overall, and view
4034 conversions thus are very frequent. Even though just "convert"ing
4035 the inner operand to the output type is fine in most cases, it
4036 might expose unexpected input/output type mismatches in special
4037 circumstances so we avoid such recursive calls when we can. */
4038 tree op0
= TREE_OPERAND (expr
, 0);
4040 /* If we are converting back to the original type, we can just
4041 lift the input conversion. This is a common occurrence with
4042 switches back-and-forth amongst type variants. */
4043 if (type
== TREE_TYPE (op0
))
4046 /* Otherwise, if we're converting between two aggregate or vector
4047 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4048 target type in place or to just convert the inner expression. */
4049 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4050 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4052 /* If we are converting between mere variants, we can just
4053 substitute the VIEW_CONVERT_EXPR in place. */
4054 if (gnat_types_compatible_p (type
, etype
))
4055 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4057 /* Otherwise, we may just bypass the input view conversion unless
4058 one of the types is a fat pointer, which is handled by
4059 specialized code below which relies on exact type matching. */
4060 else if (!TYPE_IS_FAT_POINTER_P (type
)
4061 && !TYPE_IS_FAT_POINTER_P (etype
))
4062 return convert (type
, op0
);
4071 /* Check for converting to a pointer to an unconstrained array. */
4072 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4073 return convert_to_fat_pointer (type
, expr
);
4075 /* If we are converting between two aggregate or vector types that are mere
4076 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4077 to a vector type from its representative array type. */
4078 else if ((code
== ecode
4079 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4080 && gnat_types_compatible_p (type
, etype
))
4081 || (code
== VECTOR_TYPE
4082 && ecode
== ARRAY_TYPE
4083 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4085 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4087 /* If we are converting between tagged types, try to upcast properly. */
4088 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4089 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4091 tree child_etype
= etype
;
4093 tree field
= TYPE_FIELDS (child_etype
);
4094 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4095 return build_component_ref (expr
, NULL_TREE
, field
, false);
4096 child_etype
= TREE_TYPE (field
);
4097 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4100 /* If we are converting from a smaller form of record type back to it, just
4101 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4102 size on both sides. */
4103 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4104 && smaller_form_type_p (etype
, type
))
4106 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4107 false, false, false, true),
4109 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4112 /* In all other cases of related types, make a NOP_EXPR. */
4113 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4114 return fold_convert (type
, expr
);
4119 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4122 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4123 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4124 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4125 return unchecked_convert (type
, expr
, false);
4126 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4127 return fold_convert (type
,
4128 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4129 convert (TREE_TYPE (type
), expr
),
4130 TYPE_MIN_VALUE (type
)));
4132 /* ... fall through ... */
4136 /* If we are converting an additive expression to an integer type
4137 with lower precision, be wary of the optimization that can be
4138 applied by convert_to_integer. There are 2 problematic cases:
4139 - if the first operand was originally of a biased type,
4140 because we could be recursively called to convert it
4141 to an intermediate type and thus rematerialize the
4142 additive operator endlessly,
4143 - if the expression contains a placeholder, because an
4144 intermediate conversion that changes the sign could
4145 be inserted and thus introduce an artificial overflow
4146 at compile time when the placeholder is substituted. */
4147 if (code
== INTEGER_TYPE
4148 && ecode
== INTEGER_TYPE
4149 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4150 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4152 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4154 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4155 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4156 || CONTAINS_PLACEHOLDER_P (expr
))
4157 return build1 (NOP_EXPR
, type
, expr
);
4160 return fold (convert_to_integer (type
, expr
));
4163 case REFERENCE_TYPE
:
4164 /* If converting between two pointers to records denoting
4165 both a template and type, adjust if needed to account
4166 for any differing offsets, since one might be negative. */
4167 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4170 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
4171 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
4173 = size_binop (CEIL_DIV_EXPR
, bit_diff
, sbitsize_unit_node
);
4174 expr
= build1 (NOP_EXPR
, type
, expr
);
4175 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
4176 if (integer_zerop (byte_diff
))
4179 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4180 fold (convert (sizetype
, byte_diff
)));
4183 /* If converting to a thin pointer, handle specially. */
4184 if (TYPE_IS_THIN_POINTER_P (type
)
4185 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)))
4186 return convert_to_thin_pointer (type
, expr
);
4188 /* If converting fat pointer to normal pointer, get the pointer to the
4189 array and then convert it. */
4190 else if (TYPE_IS_FAT_POINTER_P (etype
))
4192 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4194 return fold (convert_to_pointer (type
, expr
));
4197 return fold (convert_to_real (type
, expr
));
4200 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4202 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4204 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4205 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4207 return gnat_build_constructor (type
, v
);
4210 /* ... fall through ... */
4213 /* In these cases, assume the front-end has validated the conversion.
4214 If the conversion is valid, it will be a bit-wise conversion, so
4215 it can be viewed as an unchecked conversion. */
4216 return unchecked_convert (type
, expr
, false);
4219 /* This is a either a conversion between a tagged type and some
4220 subtype, which we have to mark as a UNION_TYPE because of
4221 overlapping fields or a conversion of an Unchecked_Union. */
4222 return unchecked_convert (type
, expr
, false);
4224 case UNCONSTRAINED_ARRAY_TYPE
:
4225 /* If the input is a VECTOR_TYPE, convert to the representative
4226 array type first. */
4227 if (ecode
== VECTOR_TYPE
)
4229 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4230 etype
= TREE_TYPE (expr
);
4231 ecode
= TREE_CODE (etype
);
4234 /* If EXPR is a constrained array, take its address, convert it to a
4235 fat pointer, and then dereference it. Likewise if EXPR is a
4236 record containing both a template and a constrained array.
4237 Note that a record representing a justified modular type
4238 always represents a packed constrained array. */
4239 if (ecode
== ARRAY_TYPE
4240 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4241 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4242 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4245 (INDIRECT_REF
, NULL_TREE
,
4246 convert_to_fat_pointer (TREE_TYPE (type
),
4247 build_unary_op (ADDR_EXPR
,
4250 /* Do something very similar for converting one unconstrained
4251 array to another. */
4252 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4254 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4255 convert (TREE_TYPE (type
),
4256 build_unary_op (ADDR_EXPR
,
4262 return fold (convert_to_complex (type
, expr
));
4269 /* Remove all conversions that are done in EXP. This includes converting
4270 from a padded type or to a justified modular type. If TRUE_ADDRESS
4271 is true, always return the address of the containing object even if
4272 the address is not bit-aligned. */
4275 remove_conversions (tree exp
, bool true_address
)
4277 switch (TREE_CODE (exp
))
4281 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
4282 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
4284 remove_conversions (VEC_index (constructor_elt
,
4285 CONSTRUCTOR_ELTS (exp
), 0)->value
,
4290 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4291 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4294 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
4296 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4305 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4306 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4307 likewise return an expression pointing to the underlying array. */
4310 maybe_unconstrained_array (tree exp
)
4312 enum tree_code code
= TREE_CODE (exp
);
4315 switch (TREE_CODE (TREE_TYPE (exp
)))
4317 case UNCONSTRAINED_ARRAY_TYPE
:
4318 if (code
== UNCONSTRAINED_ARRAY_REF
)
4320 new_exp
= TREE_OPERAND (exp
, 0);
4322 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
4323 build_component_ref (new_exp
, NULL_TREE
,
4325 (TREE_TYPE (new_exp
)),
4327 TREE_READONLY (new_exp
) = TREE_READONLY (exp
);
4331 else if (code
== NULL_EXPR
)
4332 return build1 (NULL_EXPR
,
4333 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4334 (TREE_TYPE (TREE_TYPE (exp
))))),
4335 TREE_OPERAND (exp
, 0));
4338 /* If this is a padded type, convert to the unpadded type and see if
4339 it contains a template. */
4340 if (TYPE_PADDING_P (TREE_TYPE (exp
)))
4342 new_exp
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp
))), exp
);
4343 if (TREE_CODE (TREE_TYPE (new_exp
)) == RECORD_TYPE
4344 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp
)))
4346 build_component_ref (new_exp
, NULL_TREE
,
4348 (TYPE_FIELDS (TREE_TYPE (new_exp
))),
4351 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
4353 build_component_ref (exp
, NULL_TREE
,
4354 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))),
4365 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4366 TYPE_REPRESENTATIVE_ARRAY. */
4369 maybe_vector_array (tree exp
)
4371 tree etype
= TREE_TYPE (exp
);
4373 if (VECTOR_TYPE_P (etype
))
4374 exp
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), exp
);
4379 /* Return true if EXPR is an expression that can be folded as an operand
4380 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4383 can_fold_for_view_convert_p (tree expr
)
4387 /* The folder will fold NOP_EXPRs between integral types with the same
4388 precision (in the middle-end's sense). We cannot allow it if the
4389 types don't have the same precision in the Ada sense as well. */
4390 if (TREE_CODE (expr
) != NOP_EXPR
)
4393 t1
= TREE_TYPE (expr
);
4394 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4396 /* Defer to the folder for non-integral conversions. */
4397 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
4400 /* Only fold conversions that preserve both precisions. */
4401 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
4402 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
4408 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4409 If NOTRUNC_P is true, truncation operations should be suppressed.
4411 Special care is required with (source or target) integral types whose
4412 precision is not equal to their size, to make sure we fetch or assign
4413 the value bits whose location might depend on the endianness, e.g.
4415 Rmsize : constant := 8;
4416 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4418 type Bit_Array is array (1 .. Rmsize) of Boolean;
4419 pragma Pack (Bit_Array);
4421 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4423 Value : Int := 2#1000_0001#;
4424 Vbits : Bit_Array := To_Bit_Array (Value);
4426 we expect the 8 bits at Vbits'Address to always contain Value, while
4427 their original location depends on the endianness, at Value'Address
4428 on a little-endian architecture but not on a big-endian one. */
4431 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
4433 tree etype
= TREE_TYPE (expr
);
4434 enum tree_code ecode
= TREE_CODE (etype
);
4435 enum tree_code code
= TREE_CODE (type
);
4438 /* If the expression is already of the right type, we are done. */
4442 /* If both types types are integral just do a normal conversion.
4443 Likewise for a conversion to an unconstrained array. */
4444 if ((((INTEGRAL_TYPE_P (type
)
4445 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
4446 || (POINTER_TYPE_P (type
) && ! TYPE_IS_THIN_POINTER_P (type
))
4447 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
4448 && ((INTEGRAL_TYPE_P (etype
)
4449 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
4450 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
4451 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
4452 || code
== UNCONSTRAINED_ARRAY_TYPE
)
4454 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4456 tree ntype
= copy_type (etype
);
4457 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
4458 TYPE_MAIN_VARIANT (ntype
) = ntype
;
4459 expr
= build1 (NOP_EXPR
, ntype
, expr
);
4462 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
4464 tree rtype
= copy_type (type
);
4465 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
4466 TYPE_MAIN_VARIANT (rtype
) = rtype
;
4467 expr
= convert (rtype
, expr
);
4468 expr
= build1 (NOP_EXPR
, type
, expr
);
4471 expr
= convert (type
, expr
);
4474 /* If we are converting to an integral type whose precision is not equal
4475 to its size, first unchecked convert to a record that contains an
4476 object of the output type. Then extract the field. */
4477 else if (INTEGRAL_TYPE_P (type
)
4478 && TYPE_RM_SIZE (type
)
4479 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4480 GET_MODE_BITSIZE (TYPE_MODE (type
))))
4482 tree rec_type
= make_node (RECORD_TYPE
);
4483 tree field
= create_field_decl (get_identifier ("OBJ"), type
, rec_type
,
4484 NULL_TREE
, NULL_TREE
, 1, 0);
4486 TYPE_FIELDS (rec_type
) = field
;
4487 layout_type (rec_type
);
4489 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
4490 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
4493 /* Similarly if we are converting from an integral type whose precision
4494 is not equal to its size. */
4495 else if (INTEGRAL_TYPE_P (etype
)
4496 && TYPE_RM_SIZE (etype
)
4497 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
4498 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
4500 tree rec_type
= make_node (RECORD_TYPE
);
4501 tree field
= create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
4502 NULL_TREE
, NULL_TREE
, 1, 0);
4503 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4505 TYPE_FIELDS (rec_type
) = field
;
4506 layout_type (rec_type
);
4508 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
4509 expr
= gnat_build_constructor (rec_type
, v
);
4510 expr
= unchecked_convert (type
, expr
, notrunc_p
);
4513 /* If we are converting from a scalar type to a type with a different size,
4514 we need to pad to have the same size on both sides.
4516 ??? We cannot do it unconditionally because unchecked conversions are
4517 used liberally by the front-end to implement polymorphism, e.g. in:
4519 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4520 return p___size__4 (p__object!(S191s.all));
4522 so we skip all expressions that are references. */
4523 else if (!REFERENCE_CLASS_P (expr
)
4524 && !AGGREGATE_TYPE_P (etype
)
4525 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
4526 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
4530 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4531 false, false, false, true),
4533 expr
= unchecked_convert (type
, expr
, notrunc_p
);
4537 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
4538 false, false, false, true);
4539 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
4540 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
4545 /* We have a special case when we are converting between two unconstrained
4546 array types. In that case, take the address, convert the fat pointer
4547 types, and dereference. */
4548 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
4549 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
4550 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
4551 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4554 /* Another special case is when we are converting to a vector type from its
4555 representative array type; this a regular conversion. */
4556 else if (code
== VECTOR_TYPE
4557 && ecode
== ARRAY_TYPE
4558 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4560 expr
= convert (type
, expr
);
4564 expr
= maybe_unconstrained_array (expr
);
4565 etype
= TREE_TYPE (expr
);
4566 ecode
= TREE_CODE (etype
);
4567 if (can_fold_for_view_convert_p (expr
))
4568 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4570 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4573 /* If the result is an integral type whose precision is not equal to its
4574 size, sign- or zero-extend the result. We need not do this if the input
4575 is an integral type of the same precision and signedness or if the output
4576 is a biased type or if both the input and output are unsigned. */
4578 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
4579 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
4580 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4581 GET_MODE_BITSIZE (TYPE_MODE (type
)))
4582 && !(INTEGRAL_TYPE_P (etype
)
4583 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
4584 && operand_equal_p (TYPE_RM_SIZE (type
),
4585 (TYPE_RM_SIZE (etype
) != 0
4586 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
4588 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
4591 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
4593 = convert (base_type
,
4594 size_binop (MINUS_EXPR
,
4596 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
4597 TYPE_RM_SIZE (type
)));
4600 build_binary_op (RSHIFT_EXPR
, base_type
,
4601 build_binary_op (LSHIFT_EXPR
, base_type
,
4602 convert (base_type
, expr
),
4607 /* An unchecked conversion should never raise Constraint_Error. The code
4608 below assumes that GCC's conversion routines overflow the same way that
4609 the underlying hardware does. This is probably true. In the rare case
4610 when it is false, we can rely on the fact that such conversions are
4611 erroneous anyway. */
4612 if (TREE_CODE (expr
) == INTEGER_CST
)
4613 TREE_OVERFLOW (expr
) = 0;
4615 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4616 show no longer constant. */
4617 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4618 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
4620 TREE_CONSTANT (expr
) = 0;
4625 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4626 the latter being a record type as predicated by Is_Record_Type. */
4629 tree_code_for_record_type (Entity_Id gnat_type
)
4631 Node_Id component_list
4632 = Component_List (Type_Definition
4634 (Implementation_Base_Type (gnat_type
))));
4637 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4638 we have a non-discriminant field outside a variant. In either case,
4639 it's a RECORD_TYPE. */
4641 if (!Is_Unchecked_Union (gnat_type
))
4644 for (component
= First_Non_Pragma (Component_Items (component_list
));
4645 Present (component
);
4646 component
= Next_Non_Pragma (component
))
4647 if (Ekind (Defining_Entity (component
)) == E_Component
)
4653 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4654 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4655 according to the presence of an alignment clause on the type or, if it
4656 is an array, on the component type. */
4659 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
4661 gnat_type
= Underlying_Type (gnat_type
);
4663 *align_clause
= Present (Alignment_Clause (gnat_type
));
4665 if (Is_Array_Type (gnat_type
))
4667 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
4668 if (Present (Alignment_Clause (gnat_type
)))
4669 *align_clause
= true;
4672 if (!Is_Floating_Point_Type (gnat_type
))
4675 if (UI_To_Int (Esize (gnat_type
)) != 64)
4681 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4682 size is greater or equal to 64 bits, or an array of such a type. Set
4683 ALIGN_CLAUSE according to the presence of an alignment clause on the
4684 type or, if it is an array, on the component type. */
4687 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
4689 gnat_type
= Underlying_Type (gnat_type
);
4691 *align_clause
= Present (Alignment_Clause (gnat_type
));
4693 if (Is_Array_Type (gnat_type
))
4695 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
4696 if (Present (Alignment_Clause (gnat_type
)))
4697 *align_clause
= true;
4700 if (!Is_Scalar_Type (gnat_type
))
4703 if (UI_To_Int (Esize (gnat_type
)) < 64)
4709 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4710 component of an aggregate type. */
4713 type_for_nonaliased_component_p (tree gnu_type
)
4715 /* If the type is passed by reference, we may have pointers to the
4716 component so it cannot be made non-aliased. */
4717 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
4720 /* We used to say that any component of aggregate type is aliased
4721 because the front-end may take 'Reference of it. The front-end
4722 has been enhanced in the meantime so as to use a renaming instead
4723 in most cases, but the back-end can probably take the address of
4724 such a component too so we go for the conservative stance.
4726 For instance, we might need the address of any array type, even
4727 if normally passed by copy, to construct a fat pointer if the
4728 component is used as an actual for an unconstrained formal.
4730 Likewise for record types: even if a specific record subtype is
4731 passed by copy, the parent type might be passed by ref (e.g. if
4732 it's of variable size) and we might take the address of a child
4733 component to pass to a parent formal. We have no way to check
4734 for such conditions here. */
4735 if (AGGREGATE_TYPE_P (gnu_type
))
4741 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4744 smaller_form_type_p (tree type
, tree orig_type
)
4748 /* We're not interested in variants here. */
4749 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
4752 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4753 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
4756 size
= TYPE_SIZE (type
);
4757 osize
= TYPE_SIZE (orig_type
);
4759 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
4762 return tree_int_cst_lt (size
, osize
) != 0;
4765 /* Perform final processing on global variables. */
4767 static GTY (()) tree dummy_global
;
4770 gnat_write_global_declarations (void)
4772 /* If we have declared types as used at the global level, insert them in
4773 the global hash table. We use a dummy variable for this purpose. */
4774 if (!VEC_empty (tree
, types_used_by_cur_var_decl
))
4777 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, NULL_TREE
, void_type_node
);
4778 TREE_STATIC (dummy_global
) = 1;
4779 TREE_ASM_WRITTEN (dummy_global
) = 1;
4780 varpool_mark_needed_node (varpool_node (dummy_global
));
4782 while (!VEC_empty (tree
, types_used_by_cur_var_decl
))
4784 tree t
= VEC_pop (tree
, types_used_by_cur_var_decl
);
4785 types_used_by_var_decl_insert (t
, dummy_global
);
4789 /* Proceed to optimize and emit assembly.
4790 FIXME: shouldn't be the front end's responsibility to call this. */
4791 cgraph_finalize_compilation_unit ();
4793 /* Emit debug info for all global declarations. */
4794 emit_debug_global_declarations (VEC_address (tree
, global_decls
),
4795 VEC_length (tree
, global_decls
));
4798 /* ************************************************************************
4799 * * GCC builtins support *
4800 * ************************************************************************ */
4802 /* The general scheme is fairly simple:
4804 For each builtin function/type to be declared, gnat_install_builtins calls
4805 internal facilities which eventually get to gnat_push_decl, which in turn
4806 tracks the so declared builtin function decls in the 'builtin_decls' global
4807 datastructure. When an Intrinsic subprogram declaration is processed, we
4808 search this global datastructure to retrieve the associated BUILT_IN DECL
4811 /* Search the chain of currently available builtin declarations for a node
4812 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4813 found, if any, or NULL_TREE otherwise. */
4815 builtin_decl_for (tree name
)
4820 FOR_EACH_VEC_ELT (tree
, builtin_decls
, i
, decl
)
4821 if (DECL_NAME (decl
) == name
)
4827 /* The code below eventually exposes gnat_install_builtins, which declares
4828 the builtin types and functions we might need, either internally or as
4829 user accessible facilities.
4831 ??? This is a first implementation shot, still in rough shape. It is
4832 heavily inspired from the "C" family implementation, with chunks copied
4833 verbatim from there.
4835 Two obvious TODO candidates are
4836 o Use a more efficient name/decl mapping scheme
4837 o Devise a middle-end infrastructure to avoid having to copy
4838 pieces between front-ends. */
4840 /* ----------------------------------------------------------------------- *
4841 * BUILTIN ELEMENTARY TYPES *
4842 * ----------------------------------------------------------------------- */
4844 /* Standard data types to be used in builtin argument declarations. */
4848 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
4850 CTI_CONST_STRING_TYPE
,
4855 static tree c_global_trees
[CTI_MAX
];
4857 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4858 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4859 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4861 /* ??? In addition some attribute handlers, we currently don't support a
4862 (small) number of builtin-types, which in turns inhibits support for a
4863 number of builtin functions. */
4864 #define wint_type_node void_type_node
4865 #define intmax_type_node void_type_node
4866 #define uintmax_type_node void_type_node
4868 /* Build the void_list_node (void_type_node having been created). */
4871 build_void_list_node (void)
4873 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
4877 /* Used to help initialize the builtin-types.def table. When a type of
4878 the correct size doesn't exist, use error_mark_node instead of NULL.
4879 The later results in segfaults even when a decl using the type doesn't
4883 builtin_type_for_size (int size
, bool unsignedp
)
4885 tree type
= gnat_type_for_size (size
, unsignedp
);
4886 return type
? type
: error_mark_node
;
4889 /* Build/push the elementary type decls that builtin functions/types
4893 install_builtin_elementary_types (void)
4895 signed_size_type_node
= gnat_signed_type (size_type_node
);
4896 pid_type_node
= integer_type_node
;
4897 void_list_node
= build_void_list_node ();
4899 string_type_node
= build_pointer_type (char_type_node
);
4900 const_string_type_node
4901 = build_pointer_type (build_qualified_type
4902 (char_type_node
, TYPE_QUAL_CONST
));
4905 /* ----------------------------------------------------------------------- *
4906 * BUILTIN FUNCTION TYPES *
4907 * ----------------------------------------------------------------------- */
4909 /* Now, builtin function types per se. */
4913 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4914 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4915 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4916 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4917 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4918 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4919 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4920 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4921 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4922 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4923 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4924 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4925 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4926 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4927 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4929 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4930 #include "builtin-types.def"
4931 #undef DEF_PRIMITIVE_TYPE
4932 #undef DEF_FUNCTION_TYPE_0
4933 #undef DEF_FUNCTION_TYPE_1
4934 #undef DEF_FUNCTION_TYPE_2
4935 #undef DEF_FUNCTION_TYPE_3
4936 #undef DEF_FUNCTION_TYPE_4
4937 #undef DEF_FUNCTION_TYPE_5
4938 #undef DEF_FUNCTION_TYPE_6
4939 #undef DEF_FUNCTION_TYPE_7
4940 #undef DEF_FUNCTION_TYPE_VAR_0
4941 #undef DEF_FUNCTION_TYPE_VAR_1
4942 #undef DEF_FUNCTION_TYPE_VAR_2
4943 #undef DEF_FUNCTION_TYPE_VAR_3
4944 #undef DEF_FUNCTION_TYPE_VAR_4
4945 #undef DEF_FUNCTION_TYPE_VAR_5
4946 #undef DEF_POINTER_TYPE
4950 typedef enum c_builtin_type builtin_type
;
4952 /* A temporary array used in communication with def_fn_type. */
4953 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
4955 /* A helper function for install_builtin_types. Build function type
4956 for DEF with return type RET and N arguments. If VAR is true, then the
4957 function should be variadic after those N arguments.
4959 Takes special care not to ICE if any of the types involved are
4960 error_mark_node, which indicates that said type is not in fact available
4961 (see builtin_type_for_size). In which case the function type as a whole
4962 should be error_mark_node. */
4965 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
4967 tree args
= NULL
, t
;
4972 for (i
= 0; i
< n
; ++i
)
4974 builtin_type a
= (builtin_type
) va_arg (list
, int);
4975 t
= builtin_types
[a
];
4976 if (t
== error_mark_node
)
4978 args
= tree_cons (NULL_TREE
, t
, args
);
4982 args
= nreverse (args
);
4984 args
= chainon (args
, void_list_node
);
4986 t
= builtin_types
[ret
];
4987 if (t
== error_mark_node
)
4989 t
= build_function_type (t
, args
);
4992 builtin_types
[def
] = t
;
4996 /* Build the builtin function types and install them in the builtin_types
4997 array for later use in builtin function decls. */
5000 install_builtin_function_types (void)
5002 tree va_list_ref_type_node
;
5003 tree va_list_arg_type_node
;
5005 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5007 va_list_arg_type_node
= va_list_ref_type_node
=
5008 build_pointer_type (TREE_TYPE (va_list_type_node
));
5012 va_list_arg_type_node
= va_list_type_node
;
5013 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5016 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5017 builtin_types[ENUM] = VALUE;
5018 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5019 def_fn_type (ENUM, RETURN, 0, 0);
5020 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5021 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5022 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5023 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5024 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5025 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5026 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5027 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5028 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5029 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5030 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5032 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5033 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5035 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5036 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5037 def_fn_type (ENUM, RETURN, 1, 0);
5038 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5039 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5040 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5041 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5042 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5043 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5044 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5045 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5046 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5047 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5048 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5049 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5051 #include "builtin-types.def"
5053 #undef DEF_PRIMITIVE_TYPE
5054 #undef DEF_FUNCTION_TYPE_1
5055 #undef DEF_FUNCTION_TYPE_2
5056 #undef DEF_FUNCTION_TYPE_3
5057 #undef DEF_FUNCTION_TYPE_4
5058 #undef DEF_FUNCTION_TYPE_5
5059 #undef DEF_FUNCTION_TYPE_6
5060 #undef DEF_FUNCTION_TYPE_VAR_0
5061 #undef DEF_FUNCTION_TYPE_VAR_1
5062 #undef DEF_FUNCTION_TYPE_VAR_2
5063 #undef DEF_FUNCTION_TYPE_VAR_3
5064 #undef DEF_FUNCTION_TYPE_VAR_4
5065 #undef DEF_FUNCTION_TYPE_VAR_5
5066 #undef DEF_POINTER_TYPE
5067 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5070 /* ----------------------------------------------------------------------- *
5071 * BUILTIN ATTRIBUTES *
5072 * ----------------------------------------------------------------------- */
5074 enum built_in_attribute
5076 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5077 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5078 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5079 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5080 #include "builtin-attrs.def"
5081 #undef DEF_ATTR_NULL_TREE
5083 #undef DEF_ATTR_IDENT
5084 #undef DEF_ATTR_TREE_LIST
5088 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5091 install_builtin_attributes (void)
5093 /* Fill in the built_in_attributes array. */
5094 #define DEF_ATTR_NULL_TREE(ENUM) \
5095 built_in_attributes[(int) ENUM] = NULL_TREE;
5096 #define DEF_ATTR_INT(ENUM, VALUE) \
5097 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5098 #define DEF_ATTR_IDENT(ENUM, STRING) \
5099 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5100 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5101 built_in_attributes[(int) ENUM] \
5102 = tree_cons (built_in_attributes[(int) PURPOSE], \
5103 built_in_attributes[(int) VALUE], \
5104 built_in_attributes[(int) CHAIN]);
5105 #include "builtin-attrs.def"
5106 #undef DEF_ATTR_NULL_TREE
5108 #undef DEF_ATTR_IDENT
5109 #undef DEF_ATTR_TREE_LIST
5112 /* Handle a "const" attribute; arguments as in
5113 struct attribute_spec.handler. */
5116 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5117 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5120 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5121 TREE_READONLY (*node
) = 1;
5123 *no_add_attrs
= true;
5128 /* Handle a "nothrow" attribute; arguments as in
5129 struct attribute_spec.handler. */
5132 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5133 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5136 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5137 TREE_NOTHROW (*node
) = 1;
5139 *no_add_attrs
= true;
5144 /* Handle a "pure" attribute; arguments as in
5145 struct attribute_spec.handler. */
5148 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5149 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5151 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5152 DECL_PURE_P (*node
) = 1;
5153 /* ??? TODO: Support types. */
5156 warning (OPT_Wattributes
, "%qs attribute ignored",
5157 IDENTIFIER_POINTER (name
));
5158 *no_add_attrs
= true;
5164 /* Handle a "no vops" attribute; arguments as in
5165 struct attribute_spec.handler. */
5168 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5169 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5170 bool *ARG_UNUSED (no_add_attrs
))
5172 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
5173 DECL_IS_NOVOPS (*node
) = 1;
5177 /* Helper for nonnull attribute handling; fetch the operand number
5178 from the attribute argument list. */
5181 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
5183 /* Verify the arg number is a constant. */
5184 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
5185 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
5188 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
5192 /* Handle the "nonnull" attribute. */
5194 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5195 tree args
, int ARG_UNUSED (flags
),
5199 unsigned HOST_WIDE_INT attr_arg_num
;
5201 /* If no arguments are specified, all pointer arguments should be
5202 non-null. Verify a full prototype is given so that the arguments
5203 will have the correct types when we actually check them later. */
5206 if (!prototype_p (type
))
5208 error ("nonnull attribute without arguments on a non-prototype");
5209 *no_add_attrs
= true;
5214 /* Argument list specified. Verify that each argument number references
5215 a pointer argument. */
5216 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
5219 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
5221 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
5223 error ("nonnull argument has invalid operand number (argument %lu)",
5224 (unsigned long) attr_arg_num
);
5225 *no_add_attrs
= true;
5229 argument
= TYPE_ARG_TYPES (type
);
5232 for (ck_num
= 1; ; ck_num
++)
5234 if (!argument
|| ck_num
== arg_num
)
5236 argument
= TREE_CHAIN (argument
);
5240 || TREE_CODE (TREE_VALUE (argument
)) == VOID_TYPE
)
5242 error ("nonnull argument with out-of-range operand number "
5243 "(argument %lu, operand %lu)",
5244 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5245 *no_add_attrs
= true;
5249 if (TREE_CODE (TREE_VALUE (argument
)) != POINTER_TYPE
)
5251 error ("nonnull argument references non-pointer operand "
5252 "(argument %lu, operand %lu)",
5253 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5254 *no_add_attrs
= true;
5263 /* Handle a "sentinel" attribute. */
5266 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
5267 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5269 if (!prototype_p (*node
))
5271 warning (OPT_Wattributes
,
5272 "%qs attribute requires prototypes with named arguments",
5273 IDENTIFIER_POINTER (name
));
5274 *no_add_attrs
= true;
5278 if (!stdarg_p (*node
))
5280 warning (OPT_Wattributes
,
5281 "%qs attribute only applies to variadic functions",
5282 IDENTIFIER_POINTER (name
));
5283 *no_add_attrs
= true;
5289 tree position
= TREE_VALUE (args
);
5291 if (TREE_CODE (position
) != INTEGER_CST
)
5293 warning (0, "requested position is not an integer constant");
5294 *no_add_attrs
= true;
5298 if (tree_int_cst_lt (position
, integer_zero_node
))
5300 warning (0, "requested position is less than zero");
5301 *no_add_attrs
= true;
5309 /* Handle a "noreturn" attribute; arguments as in
5310 struct attribute_spec.handler. */
5313 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5314 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5316 tree type
= TREE_TYPE (*node
);
5318 /* See FIXME comment in c_common_attribute_table. */
5319 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5320 TREE_THIS_VOLATILE (*node
) = 1;
5321 else if (TREE_CODE (type
) == POINTER_TYPE
5322 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
5324 = build_pointer_type
5325 (build_type_variant (TREE_TYPE (type
),
5326 TYPE_READONLY (TREE_TYPE (type
)), 1));
5329 warning (OPT_Wattributes
, "%qs attribute ignored",
5330 IDENTIFIER_POINTER (name
));
5331 *no_add_attrs
= true;
5337 /* Handle a "leaf" attribute; arguments as in
5338 struct attribute_spec.handler. */
5341 handle_leaf_attribute (tree
*node
, tree name
,
5342 tree
ARG_UNUSED (args
),
5343 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5345 if (TREE_CODE (*node
) != FUNCTION_DECL
)
5347 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
5348 *no_add_attrs
= true;
5350 if (!TREE_PUBLIC (*node
))
5352 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
5353 *no_add_attrs
= true;
5359 /* Handle a "malloc" attribute; arguments as in
5360 struct attribute_spec.handler. */
5363 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5364 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5366 if (TREE_CODE (*node
) == FUNCTION_DECL
5367 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
5368 DECL_IS_MALLOC (*node
) = 1;
5371 warning (OPT_Wattributes
, "%qs attribute ignored",
5372 IDENTIFIER_POINTER (name
));
5373 *no_add_attrs
= true;
5379 /* Fake handler for attributes we don't properly support. */
5382 fake_attribute_handler (tree
* ARG_UNUSED (node
),
5383 tree
ARG_UNUSED (name
),
5384 tree
ARG_UNUSED (args
),
5385 int ARG_UNUSED (flags
),
5386 bool * ARG_UNUSED (no_add_attrs
))
5391 /* Handle a "type_generic" attribute. */
5394 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5395 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5396 bool * ARG_UNUSED (no_add_attrs
))
5398 /* Ensure we have a function type. */
5399 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
5401 /* Ensure we have a variadic function. */
5402 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
5407 /* Handle a "vector_size" attribute; arguments as in
5408 struct attribute_spec.handler. */
5411 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
5412 int ARG_UNUSED (flags
),
5415 unsigned HOST_WIDE_INT vecsize
, nunits
;
5416 enum machine_mode orig_mode
;
5417 tree type
= *node
, new_type
, size
;
5419 *no_add_attrs
= true;
5421 size
= TREE_VALUE (args
);
5423 if (!host_integerp (size
, 1))
5425 warning (OPT_Wattributes
, "%qs attribute ignored",
5426 IDENTIFIER_POINTER (name
));
5430 /* Get the vector size (in bytes). */
5431 vecsize
= tree_low_cst (size
, 1);
5433 /* We need to provide for vector pointers, vector arrays, and
5434 functions returning vectors. For example:
5436 __attribute__((vector_size(16))) short *foo;
5438 In this case, the mode is SI, but the type being modified is
5439 HI, so we need to look further. */
5441 while (POINTER_TYPE_P (type
)
5442 || TREE_CODE (type
) == FUNCTION_TYPE
5443 || TREE_CODE (type
) == ARRAY_TYPE
)
5444 type
= TREE_TYPE (type
);
5446 /* Get the mode of the type being modified. */
5447 orig_mode
= TYPE_MODE (type
);
5449 if ((!INTEGRAL_TYPE_P (type
)
5450 && !SCALAR_FLOAT_TYPE_P (type
)
5451 && !FIXED_POINT_TYPE_P (type
))
5452 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
5453 && GET_MODE_CLASS (orig_mode
) != MODE_INT
5454 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
5455 || !host_integerp (TYPE_SIZE_UNIT (type
), 1)
5456 || TREE_CODE (type
) == BOOLEAN_TYPE
)
5458 error ("invalid vector type for attribute %qs",
5459 IDENTIFIER_POINTER (name
));
5463 if (vecsize
% tree_low_cst (TYPE_SIZE_UNIT (type
), 1))
5465 error ("vector size not an integral multiple of component size");
5471 error ("zero vector size");
5475 /* Calculate how many units fit in the vector. */
5476 nunits
= vecsize
/ tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
5477 if (nunits
& (nunits
- 1))
5479 error ("number of components of the vector not a power of two");
5483 new_type
= build_vector_type (type
, nunits
);
5485 /* Build back pointers if needed. */
5486 *node
= reconstruct_complex_type (*node
, new_type
);
5491 /* Handle a "vector_type" attribute; arguments as in
5492 struct attribute_spec.handler. */
5495 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5496 int ARG_UNUSED (flags
),
5499 /* Vector representative type and size. */
5500 tree rep_type
= *node
;
5501 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
5504 /* Vector size in bytes and number of units. */
5505 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
5507 /* Vector element type and mode. */
5509 enum machine_mode elem_mode
;
5511 *no_add_attrs
= true;
5513 /* Get the representative array type, possibly nested within a
5514 padding record e.g. for alignment purposes. */
5516 if (TYPE_IS_PADDING_P (rep_type
))
5517 rep_type
= TREE_TYPE (TYPE_FIELDS (rep_type
));
5519 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
5521 error ("attribute %qs applies to array types only",
5522 IDENTIFIER_POINTER (name
));
5526 /* Silently punt on variable sizes. We can't make vector types for them,
5527 need to ignore them on front-end generated subtypes of unconstrained
5528 bases, and this attribute is for binding implementors, not end-users, so
5529 we should never get there from legitimate explicit uses. */
5531 if (!host_integerp (rep_size
, 1))
5534 /* Get the element type/mode and check this is something we know
5535 how to make vectors of. */
5537 elem_type
= TREE_TYPE (rep_type
);
5538 elem_mode
= TYPE_MODE (elem_type
);
5540 if ((!INTEGRAL_TYPE_P (elem_type
)
5541 && !SCALAR_FLOAT_TYPE_P (elem_type
)
5542 && !FIXED_POINT_TYPE_P (elem_type
))
5543 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
5544 && GET_MODE_CLASS (elem_mode
) != MODE_INT
5545 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
5546 || !host_integerp (TYPE_SIZE_UNIT (elem_type
), 1))
5548 error ("invalid element type for attribute %qs",
5549 IDENTIFIER_POINTER (name
));
5553 /* Sanity check the vector size and element type consistency. */
5555 vec_bytes
= tree_low_cst (rep_size
, 1);
5557 if (vec_bytes
% tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1))
5559 error ("vector size not an integral multiple of component size");
5565 error ("zero vector size");
5569 vec_units
= vec_bytes
/ tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1);
5570 if (vec_units
& (vec_units
- 1))
5572 error ("number of components of the vector not a power of two");
5576 /* Build the vector type and replace. */
5578 *node
= build_vector_type (elem_type
, vec_units
);
5579 rep_name
= TYPE_NAME (rep_type
);
5580 if (TREE_CODE (rep_name
) == TYPE_DECL
)
5581 rep_name
= DECL_NAME (rep_name
);
5582 TYPE_NAME (*node
) = rep_name
;
5583 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
5588 /* ----------------------------------------------------------------------- *
5589 * BUILTIN FUNCTIONS *
5590 * ----------------------------------------------------------------------- */
5592 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5593 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5594 if nonansi_p and flag_no_nonansi_builtin. */
5597 def_builtin_1 (enum built_in_function fncode
,
5599 enum built_in_class fnclass
,
5600 tree fntype
, tree libtype
,
5601 bool both_p
, bool fallback_p
,
5602 bool nonansi_p ATTRIBUTE_UNUSED
,
5603 tree fnattrs
, bool implicit_p
)
5606 const char *libname
;
5608 /* Preserve an already installed decl. It most likely was setup in advance
5609 (e.g. as part of the internal builtins) for specific reasons. */
5610 if (built_in_decls
[(int) fncode
] != NULL_TREE
)
5613 gcc_assert ((!both_p
&& !fallback_p
)
5614 || !strncmp (name
, "__builtin_",
5615 strlen ("__builtin_")));
5617 libname
= name
+ strlen ("__builtin_");
5618 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
5619 (fallback_p
? libname
: NULL
),
5622 /* ??? This is normally further controlled by command-line options
5623 like -fno-builtin, but we don't have them for Ada. */
5624 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
5627 built_in_decls
[(int) fncode
] = decl
;
5629 implicit_built_in_decls
[(int) fncode
] = decl
;
5632 static int flag_isoc94
= 0;
5633 static int flag_isoc99
= 0;
5635 /* Install what the common builtins.def offers. */
5638 install_builtin_functions (void)
5640 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5641 NONANSI_P, ATTRS, IMPLICIT, COND) \
5643 def_builtin_1 (ENUM, NAME, CLASS, \
5644 builtin_types[(int) TYPE], \
5645 builtin_types[(int) LIBTYPE], \
5646 BOTH_P, FALLBACK_P, NONANSI_P, \
5647 built_in_attributes[(int) ATTRS], IMPLICIT);
5648 #include "builtins.def"
5652 /* ----------------------------------------------------------------------- *
5653 * BUILTIN FUNCTIONS *
5654 * ----------------------------------------------------------------------- */
5656 /* Install the builtin functions we might need. */
5659 gnat_install_builtins (void)
5661 install_builtin_elementary_types ();
5662 install_builtin_function_types ();
5663 install_builtin_attributes ();
5665 /* Install builtins used by generic middle-end pieces first. Some of these
5666 know about internal specificities and control attributes accordingly, for
5667 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5668 the generic definition from builtins.def. */
5669 build_common_builtin_nodes ();
5671 /* Now, install the target specific builtins, such as the AltiVec family on
5672 ppc, and the common set as exposed by builtins.def. */
5673 targetm
.init_builtins ();
5674 install_builtin_functions ();
5677 #include "gt-ada-utils.h"
5678 #include "gtype-ada.h"