]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
stor-layout.c (layout_type): Do not clear TREE_OVERFLOW on overflowed zeroes, except...
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
88a94e2b 9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
31#include "flags.h"
a1ab4c31 32#include "toplev.h"
718f9c0f 33#include "diagnostic-core.h"
a1ab4c31
AC
34#include "output.h"
35#include "ggc.h"
36#include "debug.h"
37#include "convert.h"
38#include "target.h"
677f3fa8 39#include "common/common-target.h"
8713b7e4 40#include "langhooks.h"
a1ab4c31 41#include "cgraph.h"
10e4d056 42#include "diagnostic.h"
7ee2468b 43#include "timevar.h"
8713b7e4 44#include "tree-dump.h"
a1ab4c31
AC
45#include "tree-inline.h"
46#include "tree-iterator.h"
a1ab4c31
AC
47
48#include "ada.h"
49#include "types.h"
50#include "atree.h"
51#include "elists.h"
52#include "namet.h"
53#include "nlists.h"
54#include "stringt.h"
55#include "uintp.h"
56#include "fe.h"
57#include "sinfo.h"
58#include "einfo.h"
59#include "ada-tree.h"
60#include "gigi.h"
61
a1ab4c31
AC
62/* If nonzero, pretend we are allocating at global level. */
63int force_global;
64
caa9d12a
EB
65/* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68int double_float_alignment;
69
70/* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73int double_scalar_alignment;
74
a1ab4c31
AC
75/* Tree nodes for the various types and decls we create. */
76tree gnat_std_decls[(int) ADT_LAST];
77
78/* Functions to call for each of the possible raise reasons. */
79tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
80
ca8e13e8 81/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
82tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
83
a1ab4c31
AC
84/* Forward declarations for handlers of attributes. */
85static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 92static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
93static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 95static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 96static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
97
98/* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
101
102/* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104const struct attribute_spec gnat_internal_attribute_table[] =
105{
62d784f7
KT
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "const", 0, 0, true, false, false, handle_const_attribute,
109 false },
110 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
111 false },
112 { "pure", 0, 0, true, false, false, handle_pure_attribute,
113 false },
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
115 false },
116 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
117 false },
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
119 false },
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
121 false },
122 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
123 false },
124 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
125 false },
126 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
127 false },
128
129 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
130 false },
131 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
132 false },
133 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
134
135 /* ??? format and format_arg are heavy and not supported, which actually
136 prevents support for stdio builtins, which we however declare as part
137 of the common builtins.def contents. */
62d784f7
KT
138 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
139 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 140
62d784f7 141 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
142};
143
144/* Associates a GNAT tree node to a GCC tree node. It is used in
145 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
146 of `save_gnu_tree' for more info. */
147static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
148
149#define GET_GNU_TREE(GNAT_ENTITY) \
150 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
151
152#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
154
155#define PRESENT_GNU_TREE(GNAT_ENTITY) \
156 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
157
158/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
159static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
160
161#define GET_DUMMY_NODE(GNAT_ENTITY) \
162 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
163
164#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166
167#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
168 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169
170/* This variable keeps a table for types for each precision so that we only
171 allocate each of them once. Signed and unsigned types are kept separate.
172
173 Note that these types are only used when fold-const requests something
174 special. Perhaps we should NOT share these types; we'll see how it
175 goes later. */
176static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
177
178/* Likewise for float types, but record these by mode. */
179static GTY(()) tree float_types[NUM_MACHINE_MODES];
180
181/* For each binding contour we allocate a binding_level structure to indicate
182 the binding depth. */
183
d1b38208 184struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
185 /* The binding level containing this one (the enclosing binding level). */
186 struct gnat_binding_level *chain;
187 /* The BLOCK node for this level. */
188 tree block;
189 /* If nonzero, the setjmp buffer that needs to be updated for any
190 variable-sized definition within this context. */
191 tree jmpbuf_decl;
192};
193
194/* The binding level currently in effect. */
195static GTY(()) struct gnat_binding_level *current_binding_level;
196
197/* A chain of gnat_binding_level structures awaiting reuse. */
198static GTY((deletable)) struct gnat_binding_level *free_binding_level;
199
228ee426
EB
200/* The context to be used for global declarations. */
201static GTY(()) tree global_context;
202
a1ab4c31 203/* An array of global declarations. */
9771b263 204static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
205
206/* An array of builtin function declarations. */
9771b263 207static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31
AC
208
209/* An array of global renaming pointers. */
9771b263 210static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
a1ab4c31
AC
211
212/* A chain of unused BLOCK nodes. */
213static GTY((deletable)) tree free_block_chain;
214
842d4ee2
EB
215static int pad_type_hash_marked_p (const void *p);
216static hashval_t pad_type_hash_hash (const void *p);
217static int pad_type_hash_eq (const void *p1, const void *p2);
218
219/* A hash table of padded types. It is modelled on the generic type
220 hash table in tree.c, which must thus be used as a reference. */
221struct GTY(()) pad_type_hash {
222 unsigned long hash;
223 tree type;
224};
225
226static GTY ((if_marked ("pad_type_hash_marked_p"),
227 param_is (struct pad_type_hash)))
228 htab_t pad_type_hash_table;
229
a1ab4c31
AC
230static tree merge_sizes (tree, tree, tree, bool, bool);
231static tree compute_related_constant (tree, tree);
232static tree split_plus (tree, tree *);
a1ab4c31
AC
233static tree float_type_for_precision (int, enum machine_mode);
234static tree convert_to_fat_pointer (tree, tree);
a1ab4c31 235static bool potential_alignment_gap (tree, tree, tree);
58c8f770 236static void process_attributes (tree, struct attrib *);
a1ab4c31 237\f
842d4ee2 238/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
239
240void
842d4ee2 241init_gnat_utils (void)
a1ab4c31 242{
842d4ee2 243 /* Initialize the association of GNAT nodes to GCC trees. */
a9429e29 244 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
842d4ee2
EB
245
246 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
247 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
248
249 /* Initialize the hash table of padded types. */
250 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
251 pad_type_hash_eq, 0);
a1ab4c31
AC
252}
253
842d4ee2 254/* Destroy data structures of the utils.c module. */
f04b8d69
EB
255
256void
842d4ee2 257destroy_gnat_utils (void)
f04b8d69 258{
842d4ee2 259 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
260 ggc_free (associate_gnat_to_gnu);
261 associate_gnat_to_gnu = NULL;
f04b8d69 262
842d4ee2
EB
263 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
264 ggc_free (dummy_node_table);
265 dummy_node_table = NULL;
266
267 /* Destroy the hash table of padded types. */
268 htab_delete (pad_type_hash_table);
269 pad_type_hash_table = NULL;
270
271 /* Invalidate the global renaming pointers. */
272 invalidate_global_renaming_pointers ();
273}
274\f
a1d8cc63
EB
275/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
276 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
277 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 278
a1d8cc63 279 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
280
281void
282save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
283{
284 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 285 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
286 means GNAT_ENTITY is defined twice, but occasionally is due to some
287 Gigi problem. */
288 gcc_assert (!(gnu_decl
289 && (PRESENT_GNU_TREE (gnat_entity)
290 || (!no_check && !DECL_P (gnu_decl)))));
291
292 SET_GNU_TREE (gnat_entity, gnu_decl);
293}
294
a1d8cc63
EB
295/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
296 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
297
298 In some cases, such as delayed elaboration or expressions that need to
299 be elaborated only once, GNAT_ENTITY is really not an entity. */
300
301tree
302get_gnu_tree (Entity_Id gnat_entity)
303{
304 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
305 return GET_GNU_TREE (gnat_entity);
306}
307
308/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
309
310bool
311present_gnu_tree (Entity_Id gnat_entity)
312{
313 return PRESENT_GNU_TREE (gnat_entity);
314}
315\f
a1ab4c31
AC
316/* Make a dummy type corresponding to GNAT_TYPE. */
317
318tree
319make_dummy_type (Entity_Id gnat_type)
320{
321 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
322 tree gnu_type;
323
324 /* If there is an equivalent type, get its underlying type. */
325 if (Present (gnat_underlying))
a0b8b1b7 326 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
a1ab4c31
AC
327
328 /* If there was no equivalent type (can only happen when just annotating
329 types) or underlying type, go back to the original type. */
330 if (No (gnat_underlying))
331 gnat_underlying = gnat_type;
332
333 /* If it there already a dummy type, use that one. Else make one. */
334 if (PRESENT_DUMMY_NODE (gnat_underlying))
335 return GET_DUMMY_NODE (gnat_underlying);
336
337 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
338 an ENUMERAL_TYPE. */
339 gnu_type = make_node (Is_Record_Type (gnat_underlying)
340 ? tree_code_for_record_type (gnat_underlying)
341 : ENUMERAL_TYPE);
342 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
343 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
344 TYPE_STUB_DECL (gnu_type)
345 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a0b8b1b7
EB
346 if (Is_By_Reference_Type (gnat_underlying))
347 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31
AC
348
349 SET_DUMMY_NODE (gnat_underlying, gnu_type);
350
351 return gnu_type;
352}
e3edbd56
EB
353
354/* Return the dummy type that was made for GNAT_TYPE, if any. */
355
356tree
357get_dummy_type (Entity_Id gnat_type)
358{
359 return GET_DUMMY_NODE (gnat_type);
360}
361
362/* Build dummy fat and thin pointer types whose designated type is specified
363 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
364
365void
366build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
367{
368 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
369 tree gnu_fat_type, fields, gnu_object_type;
370
371 gnu_template_type = make_node (RECORD_TYPE);
372 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
373 TYPE_DUMMY_P (gnu_template_type) = 1;
374 gnu_ptr_template = build_pointer_type (gnu_template_type);
375
376 gnu_array_type = make_node (ENUMERAL_TYPE);
377 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
378 TYPE_DUMMY_P (gnu_array_type) = 1;
379 gnu_ptr_array = build_pointer_type (gnu_array_type);
380
381 gnu_fat_type = make_node (RECORD_TYPE);
382 /* Build a stub DECL to trigger the special processing for fat pointer types
383 in gnat_pushdecl. */
384 TYPE_NAME (gnu_fat_type)
385 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
386 gnu_fat_type);
387 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
388 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
389 DECL_CHAIN (fields)
390 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
391 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
392 finish_fat_pointer_type (gnu_fat_type, fields);
393 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
394 /* Suppress debug info until after the type is completed. */
395 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
396
397 gnu_object_type = make_node (RECORD_TYPE);
398 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
399 TYPE_DUMMY_P (gnu_object_type) = 1;
400
401 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
402 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
403}
a1ab4c31 404\f
c99c0026 405/* Return true if we are in the global binding level. */
a1ab4c31 406
c99c0026 407bool
a1ab4c31
AC
408global_bindings_p (void)
409{
c99c0026 410 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
411}
412
a09d56d8 413/* Enter a new binding level. */
a1ab4c31
AC
414
415void
c6bd4220 416gnat_pushlevel (void)
a1ab4c31
AC
417{
418 struct gnat_binding_level *newlevel = NULL;
419
420 /* Reuse a struct for this binding level, if there is one. */
421 if (free_binding_level)
422 {
423 newlevel = free_binding_level;
424 free_binding_level = free_binding_level->chain;
425 }
426 else
a9429e29 427 newlevel = ggc_alloc_gnat_binding_level ();
a1ab4c31
AC
428
429 /* Use a free BLOCK, if any; otherwise, allocate one. */
430 if (free_block_chain)
431 {
432 newlevel->block = free_block_chain;
433 free_block_chain = BLOCK_CHAIN (free_block_chain);
434 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
435 }
436 else
437 newlevel->block = make_node (BLOCK);
438
439 /* Point the BLOCK we just made to its parent. */
440 if (current_binding_level)
441 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
442
a09d56d8
EB
443 BLOCK_VARS (newlevel->block) = NULL_TREE;
444 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
445 TREE_USED (newlevel->block) = 1;
446
a09d56d8 447 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
448 newlevel->chain = current_binding_level;
449 newlevel->jmpbuf_decl = NULL_TREE;
450 current_binding_level = newlevel;
451}
452
453/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
454 and point FNDECL to this BLOCK. */
455
456void
457set_current_block_context (tree fndecl)
458{
459 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
460 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 461 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
462}
463
464/* Set the jmpbuf_decl for the current binding level to DECL. */
465
466void
467set_block_jmpbuf_decl (tree decl)
468{
469 current_binding_level->jmpbuf_decl = decl;
470}
471
472/* Get the jmpbuf_decl, if any, for the current binding level. */
473
474tree
c6bd4220 475get_block_jmpbuf_decl (void)
a1ab4c31
AC
476{
477 return current_binding_level->jmpbuf_decl;
478}
479
a09d56d8 480/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
481
482void
c6bd4220 483gnat_poplevel (void)
a1ab4c31
AC
484{
485 struct gnat_binding_level *level = current_binding_level;
486 tree block = level->block;
487
488 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 489 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
490
491 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
492 are no variables free the block and merge its subblocks into those of its
a09d56d8 493 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
494 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
495 ;
496 else if (BLOCK_VARS (block) == NULL_TREE)
497 {
498 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
499 = block_chainon (BLOCK_SUBBLOCKS (block),
500 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
501 BLOCK_CHAIN (block) = free_block_chain;
502 free_block_chain = block;
503 }
504 else
505 {
506 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
507 BLOCK_SUBBLOCKS (level->chain->block) = block;
508 TREE_USED (block) = 1;
509 set_block_for_group (block);
510 }
511
512 /* Free this binding structure. */
513 current_binding_level = level->chain;
514 level->chain = free_binding_level;
515 free_binding_level = level;
516}
517
2231f17f
EB
518/* Exit a binding level and discard the associated BLOCK. */
519
520void
521gnat_zaplevel (void)
522{
523 struct gnat_binding_level *level = current_binding_level;
524 tree block = level->block;
525
526 BLOCK_CHAIN (block) = free_block_chain;
527 free_block_chain = block;
528
529 /* Free this binding structure. */
530 current_binding_level = level->chain;
531 level->chain = free_binding_level;
532 free_binding_level = level;
533}
a1ab4c31 534\f
228ee426
EB
535/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
536 for location information and flag propagation. */
a1ab4c31
AC
537
538void
539gnat_pushdecl (tree decl, Node_Id gnat_node)
540{
228ee426 541 /* If DECL is public external or at top level, it has global context. */
bd9c7fb9 542 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
228ee426
EB
543 {
544 if (!global_context)
545 global_context = build_translation_unit_decl (NULL_TREE);
546 DECL_CONTEXT (decl) = global_context;
547 }
a1ab4c31
AC
548 else
549 {
550 DECL_CONTEXT (decl) = current_function_decl;
551
9f62cb92
JJ
552 /* Functions imported in another function are not really nested.
553 For really nested functions mark them initially as needing
554 a static chain for uses of that flag before unnesting;
555 lower_nested_functions will then recompute it. */
556 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
557 DECL_STATIC_CHAIN (decl) = 1;
a1ab4c31
AC
558 }
559
228ee426 560 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
561
562 /* Set the location of DECL and emit a declaration for it. */
563 if (Present (gnat_node))
564 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 565
a1ab4c31
AC
566 add_decl_expr (decl, gnat_node);
567
568 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
569 order. The list will be reversed later. Put global declarations in the
570 globals list and local ones in the current block. But skip TYPE_DECLs
571 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
572 with the debugger and aren't needed anyway. */
573 if (!(TREE_CODE (decl) == TYPE_DECL
574 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 575 {
9083aacd 576 if (DECL_EXTERNAL (decl))
a1ab4c31 577 {
a1ab4c31 578 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 579 vec_safe_push (builtin_decls, decl);
a1ab4c31 580 }
9083aacd 581 else if (global_bindings_p ())
9771b263 582 vec_safe_push (global_decls, decl);
9083aacd 583 else
a1ab4c31 584 {
a963da4d
EB
585 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
586 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
587 }
588 }
589
590 /* For the declaration of a type, set its name if it either is not already
10069d53 591 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
592 We'd rather have the type named with a real name and all the pointer
593 types to the same object have the same POINTER_TYPE node. Code in the
594 equivalent function of c-decl.c makes a copy of the type node here, but
595 that may cause us trouble with incomplete types. We make an exception
596 for fat pointer types because the compiler automatically builds them
597 for unconstrained array types and the debugger uses them to represent
598 both these and pointers to these. */
599 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
600 {
601 tree t = TREE_TYPE (decl);
602
10069d53 603 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
1aeb40dd 604 {
4cb12a90 605 /* Array and pointer types aren't "tagged" types so we force the
1aeb40dd
EB
606 type to be associated with its typedef in the DWARF back-end,
607 in order to make sure that the latter is always preserved. */
4cb12a90
EB
608 if (!DECL_ARTIFICIAL (decl)
609 && (TREE_CODE (t) == ARRAY_TYPE
610 || TREE_CODE (t) == POINTER_TYPE))
1aeb40dd
EB
611 {
612 tree tt = build_distinct_type_copy (t);
4cb12a90
EB
613 if (TREE_CODE (t) == POINTER_TYPE)
614 TYPE_NEXT_PTR_TO (t) = tt;
1aeb40dd 615 TYPE_NAME (tt) = DECL_NAME (decl);
d4d05b52 616 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
1aeb40dd
EB
617 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
618 DECL_ORIGINAL_TYPE (decl) = tt;
619 }
620 }
315cff15 621 else if (TYPE_IS_FAT_POINTER_P (t))
a1ab4c31 622 {
e3edbd56 623 /* We need a variant for the placeholder machinery to work. */
a1ab4c31
AC
624 tree tt = build_variant_type_copy (t);
625 TYPE_NAME (tt) = decl;
d4d05b52 626 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
a1ab4c31
AC
627 TREE_USED (tt) = TREE_USED (t);
628 TREE_TYPE (decl) = tt;
40c88b94
EB
629 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
630 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
631 else
632 DECL_ORIGINAL_TYPE (decl) = t;
40c88b94 633 DECL_ARTIFICIAL (decl) = 0;
e3edbd56 634 t = NULL_TREE;
a1ab4c31
AC
635 }
636 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
637 ;
638 else
639 t = NULL_TREE;
640
e3edbd56
EB
641 /* Propagate the name to all the anonymous variants. This is needed
642 for the type qualifiers machinery to work properly. */
a1ab4c31
AC
643 if (t)
644 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
e3edbd56 645 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
646 {
647 TYPE_NAME (t) = decl;
648 TYPE_CONTEXT (t) = DECL_CONTEXT (decl);
649 }
a1ab4c31
AC
650 }
651}
652\f
842d4ee2
EB
653/* Create a record type that contains a SIZE bytes long field of TYPE with a
654 starting bit position so that it is aligned to ALIGN bits, and leaving at
655 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
656 record is guaranteed to get. */
657
658tree
659make_aligning_type (tree type, unsigned int align, tree size,
660 unsigned int base_align, int room)
661{
662 /* We will be crafting a record type with one field at a position set to be
663 the next multiple of ALIGN past record'address + room bytes. We use a
664 record placeholder to express record'address. */
665 tree record_type = make_node (RECORD_TYPE);
666 tree record = build0 (PLACEHOLDER_EXPR, record_type);
667
668 tree record_addr_st
669 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
670
671 /* The diagram below summarizes the shape of what we manipulate:
672
673 <--------- pos ---------->
674 { +------------+-------------+-----------------+
675 record =>{ |############| ... | field (type) |
676 { +------------+-------------+-----------------+
677 |<-- room -->|<- voffset ->|<---- size ----->|
678 o o
679 | |
680 record_addr vblock_addr
681
682 Every length is in sizetype bytes there, except "pos" which has to be
683 set as a bit position in the GCC tree for the record. */
684 tree room_st = size_int (room);
685 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
686 tree voffset_st, pos, field;
687
688 tree name = TYPE_NAME (type);
689
690 if (TREE_CODE (name) == TYPE_DECL)
691 name = DECL_NAME (name);
692 name = concat_name (name, "ALIGN");
693 TYPE_NAME (record_type) = name;
694
695 /* Compute VOFFSET and then POS. The next byte position multiple of some
696 alignment after some address is obtained by "and"ing the alignment minus
697 1 with the two's complement of the address. */
698 voffset_st = size_binop (BIT_AND_EXPR,
699 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
700 size_int ((align / BITS_PER_UNIT) - 1));
701
702 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
703 pos = size_binop (MULT_EXPR,
704 convert (bitsizetype,
705 size_binop (PLUS_EXPR, room_st, voffset_st)),
706 bitsize_unit_node);
707
708 /* Craft the GCC record representation. We exceptionally do everything
709 manually here because 1) our generic circuitry is not quite ready to
710 handle the complex position/size expressions we are setting up, 2) we
711 have a strong simplifying factor at hand: we know the maximum possible
712 value of voffset, and 3) we have to set/reset at least the sizes in
713 accordance with this maximum value anyway, as we need them to convey
714 what should be "alloc"ated for this type.
715
716 Use -1 as the 'addressable' indication for the field to prevent the
717 creation of a bitfield. We don't need one, it would have damaging
718 consequences on the alignment computation, and create_field_decl would
719 make one without this special argument, for instance because of the
720 complex position expression. */
721 field = create_field_decl (get_identifier ("F"), type, record_type, size,
722 pos, 1, -1);
723 TYPE_FIELDS (record_type) = field;
724
725 TYPE_ALIGN (record_type) = base_align;
726 TYPE_USER_ALIGN (record_type) = 1;
727
728 TYPE_SIZE (record_type)
729 = size_binop (PLUS_EXPR,
730 size_binop (MULT_EXPR, convert (bitsizetype, size),
731 bitsize_unit_node),
732 bitsize_int (align + room * BITS_PER_UNIT));
733 TYPE_SIZE_UNIT (record_type)
734 = size_binop (PLUS_EXPR, size,
735 size_int (room + align / BITS_PER_UNIT));
736
737 SET_TYPE_MODE (record_type, BLKmode);
738 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
739
740 /* Declare it now since it will never be declared otherwise. This is
741 necessary to ensure that its subtrees are properly marked. */
742 create_type_decl (name, record_type, NULL, true, false, Empty);
743
744 return record_type;
745}
746
747/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
748 as the field type of a packed record if IN_RECORD is true, or as the
749 component type of a packed array if IN_RECORD is false. See if we can
750 rewrite it either as a type that has a non-BLKmode, which we can pack
751 tighter in the packed record case, or as a smaller type. If so, return
752 the new type. If not, return the original type. */
753
754tree
755make_packable_type (tree type, bool in_record)
756{
757 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
758 unsigned HOST_WIDE_INT new_size;
759 tree new_type, old_field, field_list = NULL_TREE;
760 unsigned int align;
761
762 /* No point in doing anything if the size is zero. */
763 if (size == 0)
764 return type;
765
766 new_type = make_node (TREE_CODE (type));
767
768 /* Copy the name and flags from the old type to that of the new.
769 Note that we rely on the pointer equality created here for
770 TYPE_NAME to look through conversions in various places. */
771 TYPE_NAME (new_type) = TYPE_NAME (type);
772 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
773 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
774 if (TREE_CODE (type) == RECORD_TYPE)
775 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
776
777 /* If we are in a record and have a small size, set the alignment to
778 try for an integral mode. Otherwise set it to try for a smaller
779 type with BLKmode. */
780 if (in_record && size <= MAX_FIXED_MODE_SIZE)
781 {
782 align = ceil_pow2 (size);
783 TYPE_ALIGN (new_type) = align;
784 new_size = (size + align - 1) & -align;
785 }
786 else
787 {
788 unsigned HOST_WIDE_INT align;
789
790 /* Do not try to shrink the size if the RM size is not constant. */
791 if (TYPE_CONTAINS_TEMPLATE_P (type)
792 || !host_integerp (TYPE_ADA_SIZE (type), 1))
793 return type;
794
795 /* Round the RM size up to a unit boundary to get the minimal size
796 for a BLKmode record. Give up if it's already the size. */
797 new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
798 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
799 if (new_size == size)
800 return type;
801
802 align = new_size & -new_size;
803 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
804 }
805
806 TYPE_USER_ALIGN (new_type) = 1;
807
808 /* Now copy the fields, keeping the position and size as we don't want
809 to change the layout by propagating the packedness downwards. */
810 for (old_field = TYPE_FIELDS (type); old_field;
811 old_field = DECL_CHAIN (old_field))
812 {
813 tree new_field_type = TREE_TYPE (old_field);
814 tree new_field, new_size;
815
816 if (RECORD_OR_UNION_TYPE_P (new_field_type)
817 && !TYPE_FAT_POINTER_P (new_field_type)
818 && host_integerp (TYPE_SIZE (new_field_type), 1))
819 new_field_type = make_packable_type (new_field_type, true);
820
821 /* However, for the last field in a not already packed record type
822 that is of an aggregate type, we need to use the RM size in the
823 packable version of the record type, see finish_record_type. */
824 if (!DECL_CHAIN (old_field)
825 && !TYPE_PACKED (type)
826 && RECORD_OR_UNION_TYPE_P (new_field_type)
827 && !TYPE_FAT_POINTER_P (new_field_type)
828 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
829 && TYPE_ADA_SIZE (new_field_type))
830 new_size = TYPE_ADA_SIZE (new_field_type);
831 else
832 new_size = DECL_SIZE (old_field);
833
834 new_field
835 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
836 new_size, bit_position (old_field),
837 TYPE_PACKED (type),
838 !DECL_NONADDRESSABLE_P (old_field));
839
840 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
841 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
842 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
843 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
844
845 DECL_CHAIN (new_field) = field_list;
846 field_list = new_field;
847 }
848
849 finish_record_type (new_type, nreverse (field_list), 2, false);
850 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
851 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
852 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
853
854 /* If this is a padding record, we never want to make the size smaller
855 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
856 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
857 {
858 TYPE_SIZE (new_type) = TYPE_SIZE (type);
859 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
860 new_size = size;
861 }
862 else
863 {
864 TYPE_SIZE (new_type) = bitsize_int (new_size);
865 TYPE_SIZE_UNIT (new_type)
866 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
867 }
868
869 if (!TYPE_CONTAINS_TEMPLATE_P (type))
870 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
871
872 compute_record_mode (new_type);
873
874 /* Try harder to get a packable type if necessary, for example
875 in case the record itself contains a BLKmode field. */
876 if (in_record && TYPE_MODE (new_type) == BLKmode)
877 SET_TYPE_MODE (new_type,
878 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
879
880 /* If neither the mode nor the size has shrunk, return the old type. */
881 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
882 return type;
883
884 return new_type;
885}
886
887/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
888 If TYPE is the best type, return it. Otherwise, make a new type. We
889 only support new integral and pointer types. FOR_BIASED is true if
890 we are making a biased type. */
891
892tree
893make_type_from_size (tree type, tree size_tree, bool for_biased)
894{
895 unsigned HOST_WIDE_INT size;
896 bool biased_p;
897 tree new_type;
898
899 /* If size indicates an error, just return TYPE to avoid propagating
900 the error. Likewise if it's too large to represent. */
901 if (!size_tree || !host_integerp (size_tree, 1))
902 return type;
903
904 size = tree_low_cst (size_tree, 1);
905
906 switch (TREE_CODE (type))
907 {
908 case INTEGER_TYPE:
909 case ENUMERAL_TYPE:
910 case BOOLEAN_TYPE:
911 biased_p = (TREE_CODE (type) == INTEGER_TYPE
912 && TYPE_BIASED_REPRESENTATION_P (type));
913
914 /* Integer types with precision 0 are forbidden. */
915 if (size == 0)
916 size = 1;
917
918 /* Only do something if the type isn't a packed array type and doesn't
919 already have the proper size and the size isn't too large. */
920 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
921 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
922 || size > LONG_LONG_TYPE_SIZE)
923 break;
924
925 biased_p |= for_biased;
926 if (TYPE_UNSIGNED (type) || biased_p)
927 new_type = make_unsigned_type (size);
928 else
929 new_type = make_signed_type (size);
930 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
931 SET_TYPE_RM_MIN_VALUE (new_type,
932 convert (TREE_TYPE (new_type),
933 TYPE_MIN_VALUE (type)));
934 SET_TYPE_RM_MAX_VALUE (new_type,
935 convert (TREE_TYPE (new_type),
936 TYPE_MAX_VALUE (type)));
937 /* Copy the name to show that it's essentially the same type and
938 not a subrange type. */
939 TYPE_NAME (new_type) = TYPE_NAME (type);
940 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
941 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
942 return new_type;
943
944 case RECORD_TYPE:
945 /* Do something if this is a fat pointer, in which case we
946 may need to return the thin pointer. */
947 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
948 {
949 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
950 if (!targetm.valid_pointer_mode (p_mode))
951 p_mode = ptr_mode;
952 return
953 build_pointer_type_for_mode
954 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
955 p_mode, 0);
956 }
957 break;
958
959 case POINTER_TYPE:
960 /* Only do something if this is a thin pointer, in which case we
961 may need to return the fat pointer. */
962 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
963 return
964 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
965 break;
966
967 default:
968 break;
969 }
970
971 return type;
972}
973
974/* See if the data pointed to by the hash table slot is marked. */
975
976static int
977pad_type_hash_marked_p (const void *p)
978{
979 const_tree const type = ((const struct pad_type_hash *) p)->type;
980
981 return ggc_marked_p (type);
982}
983
984/* Return the cached hash value. */
985
986static hashval_t
987pad_type_hash_hash (const void *p)
988{
989 return ((const struct pad_type_hash *) p)->hash;
990}
991
992/* Return 1 iff the padded types are equivalent. */
993
994static int
995pad_type_hash_eq (const void *p1, const void *p2)
996{
997 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
998 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
999 tree type1, type2;
1000
1001 if (t1->hash != t2->hash)
1002 return 0;
1003
1004 type1 = t1->type;
1005 type2 = t2->type;
1006
1007 /* We consider that the padded types are equivalent if they pad the same
1008 type and have the same size, alignment and RM size. Taking the mode
1009 into account is redundant since it is determined by the others. */
1010 return
1011 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1012 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1013 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1014 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1015}
1016
1017/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1018 if needed. We have already verified that SIZE and TYPE are large enough.
1019 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1020 IS_COMPONENT_TYPE is true if this is being done for the component type of
1021 an array. IS_USER_TYPE is true if the original type needs to be completed.
1022 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1023 the RM size of the resulting type is to be set to SIZE too. */
1024
1025tree
1026maybe_pad_type (tree type, tree size, unsigned int align,
1027 Entity_Id gnat_entity, bool is_component_type,
1028 bool is_user_type, bool definition, bool set_rm_size)
1029{
1030 tree orig_size = TYPE_SIZE (type);
1031 tree record, field;
1032
1033 /* If TYPE is a padded type, see if it agrees with any size and alignment
1034 we were given. If so, return the original type. Otherwise, strip
1035 off the padding, since we will either be returning the inner type
1036 or repadding it. If no size or alignment is specified, use that of
1037 the original padded type. */
1038 if (TYPE_IS_PADDING_P (type))
1039 {
1040 if ((!size
1041 || operand_equal_p (round_up (size,
1042 MAX (align, TYPE_ALIGN (type))),
1043 round_up (TYPE_SIZE (type),
1044 MAX (align, TYPE_ALIGN (type))),
1045 0))
1046 && (align == 0 || align == TYPE_ALIGN (type)))
1047 return type;
1048
1049 if (!size)
1050 size = TYPE_SIZE (type);
1051 if (align == 0)
1052 align = TYPE_ALIGN (type);
1053
1054 type = TREE_TYPE (TYPE_FIELDS (type));
1055 orig_size = TYPE_SIZE (type);
1056 }
1057
1058 /* If the size is either not being changed or is being made smaller (which
1059 is not done here and is only valid for bitfields anyway), show the size
1060 isn't changing. Likewise, clear the alignment if it isn't being
1061 changed. Then return if we aren't doing anything. */
1062 if (size
1063 && (operand_equal_p (size, orig_size, 0)
1064 || (TREE_CODE (orig_size) == INTEGER_CST
1065 && tree_int_cst_lt (size, orig_size))))
1066 size = NULL_TREE;
1067
1068 if (align == TYPE_ALIGN (type))
1069 align = 0;
1070
1071 if (align == 0 && !size)
1072 return type;
1073
1074 /* If requested, complete the original type and give it a name. */
1075 if (is_user_type)
1076 create_type_decl (get_entity_name (gnat_entity), type,
1077 NULL, !Comes_From_Source (gnat_entity),
1078 !(TYPE_NAME (type)
1079 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1080 && DECL_IGNORED_P (TYPE_NAME (type))),
1081 gnat_entity);
1082
1083 /* We used to modify the record in place in some cases, but that could
1084 generate incorrect debugging information. So make a new record
1085 type and name. */
1086 record = make_node (RECORD_TYPE);
1087 TYPE_PADDING_P (record) = 1;
1088
1089 if (Present (gnat_entity))
1090 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1091
1092 TYPE_ALIGN (record) = align;
1093 TYPE_SIZE (record) = size ? size : orig_size;
1094 TYPE_SIZE_UNIT (record)
1095 = convert (sizetype,
1096 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1097 bitsize_unit_node));
1098
1099 /* If we are changing the alignment and the input type is a record with
1100 BLKmode and a small constant size, try to make a form that has an
1101 integral mode. This might allow the padding record to also have an
1102 integral mode, which will be much more efficient. There is no point
1103 in doing so if a size is specified unless it is also a small constant
1104 size and it is incorrect to do so if we cannot guarantee that the mode
1105 will be naturally aligned since the field must always be addressable.
1106
1107 ??? This might not always be a win when done for a stand-alone object:
1108 since the nominal and the effective type of the object will now have
1109 different modes, a VIEW_CONVERT_EXPR will be required for converting
1110 between them and it might be hard to overcome afterwards, including
1111 at the RTL level when the stand-alone object is accessed as a whole. */
1112 if (align != 0
1113 && RECORD_OR_UNION_TYPE_P (type)
1114 && TYPE_MODE (type) == BLKmode
1115 && !TYPE_BY_REFERENCE_P (type)
1116 && TREE_CODE (orig_size) == INTEGER_CST
1117 && !TREE_OVERFLOW (orig_size)
1118 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1119 && (!size
1120 || (TREE_CODE (size) == INTEGER_CST
1121 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1122 {
1123 tree packable_type = make_packable_type (type, true);
1124 if (TYPE_MODE (packable_type) != BLKmode
1125 && align >= TYPE_ALIGN (packable_type))
1126 type = packable_type;
1127 }
1128
1129 /* Now create the field with the original size. */
1130 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1131 bitsize_zero_node, 0, 1);
1132 DECL_INTERNAL_P (field) = 1;
1133
1134 /* Do not emit debug info until after the auxiliary record is built. */
1135 finish_record_type (record, field, 1, false);
1136
1137 /* Set the RM size if requested. */
1138 if (set_rm_size)
1139 {
1140 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1141
1142 /* If the padded type is complete and has constant size, we canonicalize
1143 it by means of the hash table. This is consistent with the language
1144 semantics and ensures that gigi and the middle-end have a common view
1145 of these padded types. */
1146 if (TREE_CONSTANT (TYPE_SIZE (record)))
1147 {
1148 hashval_t hashcode;
1149 struct pad_type_hash in, *h;
1150 void **loc;
1151
1152 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1153 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1154 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1155 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1156
1157 in.hash = hashcode;
1158 in.type = record;
1159 h = (struct pad_type_hash *)
1160 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1161 if (h)
1162 {
1163 record = h->type;
1164 goto built;
1165 }
1166
1167 h = ggc_alloc_pad_type_hash ();
1168 h->hash = hashcode;
1169 h->type = record;
1170 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1171 INSERT);
1172 *loc = (void *)h;
1173 }
1174 }
1175
1176 /* Unless debugging information isn't being written for the input type,
1177 write a record that shows what we are a subtype of and also make a
1178 variable that indicates our size, if still variable. */
1179 if (TREE_CODE (orig_size) != INTEGER_CST
1180 && TYPE_NAME (record)
1181 && TYPE_NAME (type)
1182 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1183 && DECL_IGNORED_P (TYPE_NAME (type))))
1184 {
1185 tree marker = make_node (RECORD_TYPE);
1186 tree name = TYPE_NAME (record);
1187 tree orig_name = TYPE_NAME (type);
1188
1189 if (TREE_CODE (name) == TYPE_DECL)
1190 name = DECL_NAME (name);
1191
1192 if (TREE_CODE (orig_name) == TYPE_DECL)
1193 orig_name = DECL_NAME (orig_name);
1194
1195 TYPE_NAME (marker) = concat_name (name, "XVS");
1196 finish_record_type (marker,
1197 create_field_decl (orig_name,
1198 build_reference_type (type),
1199 marker, NULL_TREE, NULL_TREE,
1200 0, 0),
1201 0, true);
1202
1203 add_parallel_type (record, marker);
1204
1205 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1206 TYPE_SIZE_UNIT (marker)
1207 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1208 TYPE_SIZE_UNIT (record), false, false, false,
1209 false, NULL, gnat_entity);
1210 }
1211
1212 rest_of_record_type_compilation (record);
1213
1214built:
1215 /* If the size was widened explicitly, maybe give a warning. Take the
1216 original size as the maximum size of the input if there was an
1217 unconstrained record involved and round it up to the specified alignment,
1218 if one was specified. But don't do it if we are just annotating types
1219 and the type is tagged, since tagged types aren't fully laid out in this
1220 mode. */
1221 if (!size
1222 || TREE_CODE (size) == COND_EXPR
1223 || TREE_CODE (size) == MAX_EXPR
1224 || No (gnat_entity)
1225 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1226 return record;
1227
1228 if (CONTAINS_PLACEHOLDER_P (orig_size))
1229 orig_size = max_size (orig_size, true);
1230
1231 if (align)
1232 orig_size = round_up (orig_size, align);
1233
1234 if (!operand_equal_p (size, orig_size, 0)
1235 && !(TREE_CODE (size) == INTEGER_CST
1236 && TREE_CODE (orig_size) == INTEGER_CST
1237 && (TREE_OVERFLOW (size)
1238 || TREE_OVERFLOW (orig_size)
1239 || tree_int_cst_lt (size, orig_size))))
1240 {
1241 Node_Id gnat_error_node = Empty;
1242
1243 if (Is_Packed_Array_Type (gnat_entity))
1244 gnat_entity = Original_Array_Type (gnat_entity);
1245
1246 if ((Ekind (gnat_entity) == E_Component
1247 || Ekind (gnat_entity) == E_Discriminant)
1248 && Present (Component_Clause (gnat_entity)))
1249 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1250 else if (Present (Size_Clause (gnat_entity)))
1251 gnat_error_node = Expression (Size_Clause (gnat_entity));
1252
1253 /* Generate message only for entities that come from source, since
1254 if we have an entity created by expansion, the message will be
1255 generated for some other corresponding source entity. */
1256 if (Comes_From_Source (gnat_entity))
1257 {
1258 if (Present (gnat_error_node))
1259 post_error_ne_tree ("{^ }bits of & unused?",
1260 gnat_error_node, gnat_entity,
1261 size_diffop (size, orig_size));
1262 else if (is_component_type)
1263 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1264 gnat_entity, gnat_entity,
1265 size_diffop (size, orig_size));
1266 }
1267 }
1268
1269 return record;
1270}
1271\f
1272/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1273 If this is a multi-dimensional array type, do this recursively.
1274
1275 OP may be
1276 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1277 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1278 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1279
1280void
1281relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1282{
1283 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1284 of a one-dimensional array, since the padding has the same alias set
1285 as the field type, but if it's a multi-dimensional array, we need to
1286 see the inner types. */
1287 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1288 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1289 || TYPE_PADDING_P (gnu_old_type)))
1290 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1291
1292 /* Unconstrained array types are deemed incomplete and would thus be given
1293 alias set 0. Retrieve the underlying array type. */
1294 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1295 gnu_old_type
1296 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1297 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1298 gnu_new_type
1299 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1300
1301 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1302 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1303 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1304 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1305
1306 switch (op)
1307 {
1308 case ALIAS_SET_COPY:
1309 /* The alias set shouldn't be copied between array types with different
1310 aliasing settings because this can break the aliasing relationship
1311 between the array type and its element type. */
1312#ifndef ENABLE_CHECKING
1313 if (flag_strict_aliasing)
1314#endif
1315 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1316 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1317 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1318 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1319
1320 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1321 break;
1322
1323 case ALIAS_SET_SUBSET:
1324 case ALIAS_SET_SUPERSET:
1325 {
1326 alias_set_type old_set = get_alias_set (gnu_old_type);
1327 alias_set_type new_set = get_alias_set (gnu_new_type);
1328
1329 /* Do nothing if the alias sets conflict. This ensures that we
1330 never call record_alias_subset several times for the same pair
1331 or at all for alias set 0. */
1332 if (!alias_sets_conflict_p (old_set, new_set))
1333 {
1334 if (op == ALIAS_SET_SUBSET)
1335 record_alias_subset (old_set, new_set);
1336 else
1337 record_alias_subset (new_set, old_set);
1338 }
1339 }
1340 break;
1341
1342 default:
1343 gcc_unreachable ();
1344 }
1345
1346 record_component_aliases (gnu_new_type);
1347}
1348\f
1aeb40dd
EB
1349/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1350 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
a1ab4c31
AC
1351
1352void
1aeb40dd 1353record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1354{
c172df28
AH
1355 tree type_decl = build_decl (input_location,
1356 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1357 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1358 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1359 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1360
10069d53
EB
1361 if (debug_hooks->type_decl)
1362 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1363}
1364\f
e3edbd56
EB
1365/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1366 finish constructing the record type as a fat pointer type. */
1367
1368void
1369finish_fat_pointer_type (tree record_type, tree field_list)
1370{
1371 /* Make sure we can put it into a register. */
5da8c011
EB
1372 if (STRICT_ALIGNMENT)
1373 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1374
1375 /* Show what it really is. */
1376 TYPE_FAT_POINTER_P (record_type) = 1;
1377
1378 /* Do not emit debug info for it since the types of its fields may still be
1379 incomplete at this point. */
1380 finish_record_type (record_type, field_list, 0, false);
1381
1382 /* Force type_contains_placeholder_p to return true on it. Although the
1383 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1384 type but the representation of the unconstrained array. */
1385 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1386}
1387
032d1b71 1388/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1389 finish constructing the record or union type. If REP_LEVEL is zero, this
1390 record has no representation clause and so will be entirely laid out here.
1391 If REP_LEVEL is one, this record has a representation clause and has been
1392 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1393 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1394 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1395 we need to write debug information about this type. */
a1ab4c31
AC
1396
1397void
032d1b71
EB
1398finish_record_type (tree record_type, tree field_list, int rep_level,
1399 bool debug_info_p)
a1ab4c31
AC
1400{
1401 enum tree_code code = TREE_CODE (record_type);
1402 tree name = TYPE_NAME (record_type);
1403 tree ada_size = bitsize_zero_node;
1404 tree size = bitsize_zero_node;
1405 bool had_size = TYPE_SIZE (record_type) != 0;
1406 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1407 bool had_align = TYPE_ALIGN (record_type) != 0;
1408 tree field;
1409
032d1b71 1410 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1411
10069d53
EB
1412 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1413 generate debug info and have a parallel type. */
1414 if (name && TREE_CODE (name) == TYPE_DECL)
1415 name = DECL_NAME (name);
1416 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1417
1418 /* Globally initialize the record first. If this is a rep'ed record,
1419 that just means some initializations; otherwise, layout the record. */
1420 if (rep_level > 0)
1421 {
1422 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1423
1424 if (!had_size_unit)
1425 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1426
a1ab4c31
AC
1427 if (!had_size)
1428 TYPE_SIZE (record_type) = bitsize_zero_node;
1429
1430 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1431 out just like a UNION_TYPE, since the size will be fixed. */
1432 else if (code == QUAL_UNION_TYPE)
1433 code = UNION_TYPE;
1434 }
1435 else
1436 {
1437 /* Ensure there isn't a size already set. There can be in an error
1438 case where there is a rep clause but all fields have errors and
1439 no longer have a position. */
1440 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1441
1442 /* Ensure we use the traditional GCC layout for bitfields when we need
1443 to pack the record type or have a representation clause. The other
1444 possible layout (Microsoft C compiler), if available, would prevent
1445 efficient packing in almost all cases. */
1446#ifdef TARGET_MS_BITFIELD_LAYOUT
1447 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1448 decl_attributes (&record_type,
1449 tree_cons (get_identifier ("gcc_struct"),
1450 NULL_TREE, NULL_TREE),
1451 ATTR_FLAG_TYPE_IN_PLACE);
1452#endif
1453
a1ab4c31
AC
1454 layout_type (record_type);
1455 }
1456
1457 /* At this point, the position and size of each field is known. It was
1458 either set before entry by a rep clause, or by laying out the type above.
1459
1460 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1461 to compute the Ada size; the GCC size and alignment (for rep'ed records
1462 that are not padding types); and the mode (for rep'ed records). We also
1463 clear the DECL_BIT_FIELD indication for the cases we know have not been
1464 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1465
1466 if (code == QUAL_UNION_TYPE)
032d1b71 1467 field_list = nreverse (field_list);
a1ab4c31 1468
910ad8de 1469 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1470 {
1471 tree type = TREE_TYPE (field);
1472 tree pos = bit_position (field);
1473 tree this_size = DECL_SIZE (field);
1474 tree this_ada_size;
1475
e1e5852c 1476 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1477 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1478 && !TYPE_CONTAINS_TEMPLATE_P (type)
1479 && TYPE_ADA_SIZE (type))
1480 this_ada_size = TYPE_ADA_SIZE (type);
1481 else
1482 this_ada_size = this_size;
1483
1484 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1485 if (DECL_BIT_FIELD (field)
1486 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1487 {
1488 unsigned int align = TYPE_ALIGN (type);
1489
1490 /* In the general case, type alignment is required. */
1491 if (value_factor_p (pos, align))
1492 {
1493 /* The enclosing record type must be sufficiently aligned.
1494 Otherwise, if no alignment was specified for it and it
1495 has been laid out already, bump its alignment to the
1496 desired one if this is compatible with its size. */
1497 if (TYPE_ALIGN (record_type) >= align)
1498 {
1499 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1500 DECL_BIT_FIELD (field) = 0;
1501 }
1502 else if (!had_align
1503 && rep_level == 0
1504 && value_factor_p (TYPE_SIZE (record_type), align))
1505 {
1506 TYPE_ALIGN (record_type) = align;
1507 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1508 DECL_BIT_FIELD (field) = 0;
1509 }
1510 }
1511
1512 /* In the non-strict alignment case, only byte alignment is. */
1513 if (!STRICT_ALIGNMENT
1514 && DECL_BIT_FIELD (field)
1515 && value_factor_p (pos, BITS_PER_UNIT))
1516 DECL_BIT_FIELD (field) = 0;
1517 }
1518
c1abd261
EB
1519 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1520 field is technically not addressable. Except that it can actually
1521 be addressed if it is BLKmode and happens to be properly aligned. */
1522 if (DECL_BIT_FIELD (field)
1523 && !(DECL_MODE (field) == BLKmode
1524 && value_factor_p (pos, BITS_PER_UNIT)))
1525 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1526
1527 /* A type must be as aligned as its most aligned field that is not
1528 a bit-field. But this is already enforced by layout_type. */
1529 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1530 TYPE_ALIGN (record_type)
1531 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1532
1533 switch (code)
1534 {
1535 case UNION_TYPE:
1536 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1537 size = size_binop (MAX_EXPR, size, this_size);
1538 break;
1539
1540 case QUAL_UNION_TYPE:
1541 ada_size
1542 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1543 this_ada_size, ada_size);
1544 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1545 this_size, size);
1546 break;
1547
1548 case RECORD_TYPE:
1549 /* Since we know here that all fields are sorted in order of
1550 increasing bit position, the size of the record is one
1551 higher than the ending bit of the last field processed
1552 unless we have a rep clause, since in that case we might
1553 have a field outside a QUAL_UNION_TYPE that has a higher ending
1554 position. So use a MAX in that case. Also, if this field is a
1555 QUAL_UNION_TYPE, we need to take into account the previous size in
1556 the case of empty variants. */
1557 ada_size
1558 = merge_sizes (ada_size, pos, this_ada_size,
1559 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1560 size
1561 = merge_sizes (size, pos, this_size,
1562 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1563 break;
1564
1565 default:
1566 gcc_unreachable ();
1567 }
1568 }
1569
1570 if (code == QUAL_UNION_TYPE)
032d1b71 1571 nreverse (field_list);
a1ab4c31
AC
1572
1573 if (rep_level < 2)
1574 {
1575 /* If this is a padding record, we never want to make the size smaller
1576 than what was specified in it, if any. */
315cff15 1577 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1578 size = TYPE_SIZE (record_type);
1579
1580 /* Now set any of the values we've just computed that apply. */
315cff15 1581 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1582 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1583 SET_TYPE_ADA_SIZE (record_type, ada_size);
1584
1585 if (rep_level > 0)
1586 {
1587 tree size_unit = had_size_unit
1588 ? TYPE_SIZE_UNIT (record_type)
1589 : convert (sizetype,
1590 size_binop (CEIL_DIV_EXPR, size,
1591 bitsize_unit_node));
1592 unsigned int align = TYPE_ALIGN (record_type);
1593
1594 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1595 TYPE_SIZE_UNIT (record_type)
1596 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1597
1598 compute_record_mode (record_type);
1599 }
1600 }
1601
032d1b71 1602 if (debug_info_p)
a1ab4c31
AC
1603 rest_of_record_type_compilation (record_type);
1604}
1605
a5695aa2
EB
1606/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1607
1608void
1609add_parallel_type (tree type, tree parallel_type)
1610{
1611 tree decl = TYPE_STUB_DECL (type);
1612
1613 while (DECL_PARALLEL_TYPE (decl))
1614 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1615
1616 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1617}
1618
1619/* Return true if TYPE has a parallel type. */
1620
1621static bool
1622has_parallel_type (tree type)
1623{
1624 tree decl = TYPE_STUB_DECL (type);
1625
1626 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1627}
1628
032d1b71
EB
1629/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1630 associated with it. It need not be invoked directly in most cases since
1631 finish_record_type takes care of doing so, but this can be necessary if
1632 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1633
1634void
1635rest_of_record_type_compilation (tree record_type)
1636{
a1ab4c31 1637 bool var_size = false;
fb88e1dd 1638 tree field;
a1ab4c31 1639
fb88e1dd
EB
1640 /* If this is a padded type, the bulk of the debug info has already been
1641 generated for the field's type. */
1642 if (TYPE_IS_PADDING_P (record_type))
1643 return;
1644
a5695aa2
EB
1645 /* If the type already has a parallel type (XVS type), then we're done. */
1646 if (has_parallel_type (record_type))
1647 return;
1648
fb88e1dd 1649 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1650 {
1651 /* We need to make an XVE/XVU record if any field has variable size,
1652 whether or not the record does. For example, if we have a union,
1653 it may be that all fields, rounded up to the alignment, have the
1654 same size, in which case we'll use that size. But the debug
1655 output routines (except Dwarf2) won't be able to output the fields,
1656 so we need to make the special record. */
1657 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1658 /* If a field has a non-constant qualifier, the record will have
1659 variable size too. */
fb88e1dd 1660 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1661 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1662 {
1663 var_size = true;
1664 break;
1665 }
1666 }
1667
fb88e1dd
EB
1668 /* If this record type is of variable size, make a parallel record type that
1669 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1670 if (var_size)
a1ab4c31
AC
1671 {
1672 tree new_record_type
1673 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1674 ? UNION_TYPE : TREE_CODE (record_type));
0fb2335d 1675 tree orig_name = TYPE_NAME (record_type), new_name;
a1ab4c31 1676 tree last_pos = bitsize_zero_node;
0fb2335d 1677 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1678
0fb2335d
EB
1679 if (TREE_CODE (orig_name) == TYPE_DECL)
1680 orig_name = DECL_NAME (orig_name);
1681
1682 new_name
1683 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1684 ? "XVU" : "XVE");
1685 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1686 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1687 TYPE_STUB_DECL (new_record_type)
0fb2335d 1688 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1689 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1690 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1691 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1692 TYPE_SIZE_UNIT (new_record_type)
1693 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1694
a1ab4c31
AC
1695 /* Now scan all the fields, replacing each field with a new
1696 field corresponding to the new encoding. */
1697 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1698 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1699 {
1700 tree field_type = TREE_TYPE (old_field);
1701 tree field_name = DECL_NAME (old_field);
1702 tree new_field;
1703 tree curpos = bit_position (old_field);
1704 bool var = false;
1705 unsigned int align = 0;
1706 tree pos;
1707
1708 /* See how the position was modified from the last position.
1709
1710 There are two basic cases we support: a value was added
1711 to the last position or the last position was rounded to
1712 a boundary and they something was added. Check for the
1713 first case first. If not, see if there is any evidence
1714 of rounding. If so, round the last position and try
1715 again.
1716
1717 If this is a union, the position can be taken as zero. */
1718
1719 /* Some computations depend on the shape of the position expression,
1720 so strip conversions to make sure it's exposed. */
1721 curpos = remove_conversions (curpos, true);
1722
1723 if (TREE_CODE (new_record_type) == UNION_TYPE)
1724 pos = bitsize_zero_node, align = 0;
1725 else
1726 pos = compute_related_constant (curpos, last_pos);
1727
1728 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1729 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1730 {
1731 tree offset = TREE_OPERAND (curpos, 0);
1732 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1733
f58e663b
EB
1734 /* An offset which is a bitwise AND with a mask increases the
1735 alignment according to the number of trailing zeros. */
a1ab4c31
AC
1736 offset = remove_conversions (offset, true);
1737 if (TREE_CODE (offset) == BIT_AND_EXPR
f58e663b 1738 && TREE_CODE (TREE_OPERAND (offset, 1)) == INTEGER_CST)
a1ab4c31 1739 {
f58e663b
EB
1740 unsigned HOST_WIDE_INT mask
1741 = TREE_INT_CST_LOW (TREE_OPERAND (offset, 1));
1742 unsigned int i;
1743
1744 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
1745 {
1746 if (mask & 1)
1747 break;
1748 mask >>= 1;
1749 align *= 2;
1750 }
a1ab4c31
AC
1751 }
1752
1753 pos = compute_related_constant (curpos,
1754 round_up (last_pos, align));
1755 }
1756 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1757 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1758 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1759 && host_integerp (TREE_OPERAND
1760 (TREE_OPERAND (curpos, 0), 1),
1761 1))
1762 {
1763 align
1764 = tree_low_cst
1765 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1766 pos = compute_related_constant (curpos,
1767 round_up (last_pos, align));
1768 }
1769 else if (potential_alignment_gap (prev_old_field, old_field,
1770 pos))
1771 {
1772 align = TYPE_ALIGN (field_type);
1773 pos = compute_related_constant (curpos,
1774 round_up (last_pos, align));
1775 }
1776
1777 /* If we can't compute a position, set it to zero.
1778
1779 ??? We really should abort here, but it's too much work
1780 to get this correct for all cases. */
1781
1782 if (!pos)
1783 pos = bitsize_zero_node;
1784
1785 /* See if this type is variable-sized and make a pointer type
1786 and indicate the indirection if so. Beware that the debug
1787 back-end may adjust the position computed above according
1788 to the alignment of the field type, i.e. the pointer type
1789 in this case, if we don't preventively counter that. */
1790 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1791 {
1792 field_type = build_pointer_type (field_type);
1793 if (align != 0 && TYPE_ALIGN (field_type) > align)
1794 {
1795 field_type = copy_node (field_type);
1796 TYPE_ALIGN (field_type) = align;
1797 }
1798 var = true;
1799 }
1800
1801 /* Make a new field name, if necessary. */
1802 if (var || align != 0)
1803 {
1804 char suffix[16];
1805
1806 if (align != 0)
1807 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1808 align / BITS_PER_UNIT);
1809 else
1810 strcpy (suffix, "XVL");
1811
0fb2335d 1812 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
1813 }
1814
da01bfee
EB
1815 new_field
1816 = create_field_decl (field_name, field_type, new_record_type,
1817 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 1818 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
1819 TYPE_FIELDS (new_record_type) = new_field;
1820
1821 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1822 zero. The only time it's not the last field of the record
1823 is when there are other components at fixed positions after
1824 it (meaning there was a rep clause for every field) and we
1825 want to be able to encode them. */
1826 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1827 (TREE_CODE (TREE_TYPE (old_field))
1828 == QUAL_UNION_TYPE)
1829 ? bitsize_zero_node
1830 : DECL_SIZE (old_field));
1831 prev_old_field = old_field;
1832 }
1833
fb88e1dd 1834 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 1835
a5695aa2 1836 add_parallel_type (record_type, new_record_type);
a1ab4c31 1837 }
a1ab4c31
AC
1838}
1839
a1ab4c31 1840/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
1841 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1842 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1843 replace a value of zero with the old size. If HAS_REP is true, we take the
1844 MAX of the end position of this field with LAST_SIZE. In all other cases,
1845 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1846
1847static tree
1848merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1849 bool has_rep)
1850{
1851 tree type = TREE_TYPE (last_size);
c6bd4220 1852 tree new_size;
a1ab4c31
AC
1853
1854 if (!special || TREE_CODE (size) != COND_EXPR)
1855 {
c6bd4220 1856 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 1857 if (has_rep)
c6bd4220 1858 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
1859 }
1860
1861 else
c6bd4220
EB
1862 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1863 integer_zerop (TREE_OPERAND (size, 1))
1864 ? last_size : merge_sizes (last_size, first_bit,
1865 TREE_OPERAND (size, 1),
1866 1, has_rep),
1867 integer_zerop (TREE_OPERAND (size, 2))
1868 ? last_size : merge_sizes (last_size, first_bit,
1869 TREE_OPERAND (size, 2),
1870 1, has_rep));
a1ab4c31
AC
1871
1872 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1873 when fed through substitute_in_expr) into thinking that a constant
1874 size is not constant. */
c6bd4220
EB
1875 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1876 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 1877
c6bd4220 1878 return new_size;
a1ab4c31
AC
1879}
1880
1881/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1882 related by the addition of a constant. Return that constant if so. */
1883
1884static tree
1885compute_related_constant (tree op0, tree op1)
1886{
1887 tree op0_var, op1_var;
1888 tree op0_con = split_plus (op0, &op0_var);
1889 tree op1_con = split_plus (op1, &op1_var);
1890 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1891
1892 if (operand_equal_p (op0_var, op1_var, 0))
1893 return result;
1894 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1895 return result;
1896 else
1897 return 0;
1898}
1899
1900/* Utility function of above to split a tree OP which may be a sum, into a
1901 constant part, which is returned, and a variable part, which is stored
1902 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1903 bitsizetype. */
1904
1905static tree
1906split_plus (tree in, tree *pvar)
1907{
722356ce
EB
1908 /* Strip conversions in order to ease the tree traversal and maximize the
1909 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
1910 to always return and set *pvar to bitsizetype trees, but it's worth
1911 the effort. */
722356ce 1912 in = remove_conversions (in, false);
a1ab4c31
AC
1913
1914 *pvar = convert (bitsizetype, in);
1915
1916 if (TREE_CODE (in) == INTEGER_CST)
1917 {
1918 *pvar = bitsize_zero_node;
1919 return convert (bitsizetype, in);
1920 }
1921 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1922 {
1923 tree lhs_var, rhs_var;
1924 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1925 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1926
1927 if (lhs_var == TREE_OPERAND (in, 0)
1928 && rhs_var == TREE_OPERAND (in, 1))
1929 return bitsize_zero_node;
1930
1931 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1932 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1933 }
1934 else
1935 return bitsize_zero_node;
1936}
1937\f
d47d0a8d
EB
1938/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1939 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1940 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1941 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1942 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1943 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1944 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1945 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1946 invisible reference. */
a1ab4c31
AC
1947
1948tree
1949create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
1950 bool return_unconstrained_p, bool return_by_direct_ref_p,
1951 bool return_by_invisi_ref_p)
a1ab4c31 1952{
e5b00edf
NF
1953 /* A list of the data type nodes of the subprogram formal parameters.
1954 This list is generated by traversing the input list of PARM_DECL
1955 nodes. */
9771b263 1956 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 1957 tree t, type;
a1ab4c31 1958
910ad8de 1959 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 1960 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 1961
e5b00edf 1962 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 1963
d47d0a8d
EB
1964 /* TYPE may have been shared since GCC hashes types. If it has a different
1965 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
1966 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1967 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
1968 {
1969 type = copy_type (type);
1970 TYPE_CI_CO_LIST (type) = cico_list;
1971 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1972 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1973 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1974 }
a1ab4c31 1975
a1ab4c31
AC
1976 return type;
1977}
1978\f
1979/* Return a copy of TYPE but safe to modify in any way. */
1980
1981tree
1982copy_type (tree type)
1983{
c6bd4220 1984 tree new_type = copy_node (type);
a1ab4c31 1985
90dcfecb
EB
1986 /* Unshare the language-specific data. */
1987 if (TYPE_LANG_SPECIFIC (type))
1988 {
1989 TYPE_LANG_SPECIFIC (new_type) = NULL;
1990 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1991 }
1992
1993 /* And the contents of the language-specific slot if needed. */
1994 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1995 && TYPE_RM_VALUES (type))
1996 {
1997 TYPE_RM_VALUES (new_type) = NULL_TREE;
1998 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1999 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2000 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2001 }
2002
a1ab4c31
AC
2003 /* copy_node clears this field instead of copying it, because it is
2004 aliased with TREE_CHAIN. */
c6bd4220 2005 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2006
c6bd4220
EB
2007 TYPE_POINTER_TO (new_type) = 0;
2008 TYPE_REFERENCE_TO (new_type) = 0;
2009 TYPE_MAIN_VARIANT (new_type) = new_type;
2010 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 2011
c6bd4220 2012 return new_type;
a1ab4c31
AC
2013}
2014\f
c1abd261
EB
2015/* Return a subtype of sizetype with range MIN to MAX and whose
2016 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2017 of the associated TYPE_DECL. */
a1ab4c31
AC
2018
2019tree
2020create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2021{
2022 /* First build a type for the desired range. */
523e82a7 2023 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2024
523e82a7 2025 /* Then set the index type. */
a1ab4c31
AC
2026 SET_TYPE_INDEX_TYPE (type, index);
2027 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
c1abd261 2028
a1ab4c31
AC
2029 return type;
2030}
84fb43a1
EB
2031
2032/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2033 sizetype is used. */
2034
2035tree
2036create_range_type (tree type, tree min, tree max)
2037{
2038 tree range_type;
2039
2040 if (type == NULL_TREE)
2041 type = sizetype;
2042
2043 /* First build a type with the base range. */
523e82a7
EB
2044 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2045 TYPE_MAX_VALUE (type));
84fb43a1
EB
2046
2047 /* Then set the actual range. */
523e82a7
EB
2048 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2049 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
84fb43a1
EB
2050
2051 return range_type;
2052}
a1ab4c31 2053\f
10069d53
EB
2054/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2055 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2056 its data type. */
2057
2058tree
2059create_type_stub_decl (tree type_name, tree type)
2060{
2061 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2062 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2063 emitted in DWARF. */
c172df28
AH
2064 tree type_decl = build_decl (input_location,
2065 TYPE_DECL, type_name, type);
10069d53 2066 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2067 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2068 return type_decl;
2069}
2070
2071/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2072 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2073 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2074 true if we need to write debug information about this type. GNAT_NODE
2075 is used for the position of the decl. */
a1ab4c31
AC
2076
2077tree
2078create_type_decl (tree type_name, tree type, struct attrib *attr_list,
2079 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
2080{
a1ab4c31 2081 enum tree_code code = TREE_CODE (type);
10069d53
EB
2082 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2083 tree type_decl;
a1ab4c31 2084
10069d53
EB
2085 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2086 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2087
10069d53
EB
2088 /* If the type hasn't been named yet, we're naming it; preserve an existing
2089 TYPE_STUB_DECL that has been attached to it for some purpose. */
2090 if (!named && TYPE_STUB_DECL (type))
2091 {
2092 type_decl = TYPE_STUB_DECL (type);
2093 DECL_NAME (type_decl) = type_name;
2094 }
2095 else
c172df28
AH
2096 type_decl = build_decl (input_location,
2097 TYPE_DECL, type_name, type);
a1ab4c31 2098
10069d53 2099 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2100 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2101
2102 /* Add this decl to the current binding level. */
10069d53 2103 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2104
a1ab4c31
AC
2105 process_attributes (type_decl, attr_list);
2106
10069d53
EB
2107 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2108 This causes the name to be also viewed as a "tag" by the debug
2109 back-end, with the advantage that no DW_TAG_typedef is emitted
2110 for artificial "tagged" types in DWARF. */
2111 if (!named)
2112 TYPE_STUB_DECL (type) = type_decl;
2113
50741117
EB
2114 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2115 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2116 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2117 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2118
2119 return type_decl;
2120}
10069d53 2121\f
a1ab4c31
AC
2122/* Return a VAR_DECL or CONST_DECL node.
2123
2124 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2125 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2126 the GCC tree for an optional initial expression; NULL_TREE if none.
2127
2128 CONST_FLAG is true if this variable is constant, in which case we might
2129 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2130
2131 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2132 definition to be made visible outside of the current compilation unit, for
2133 instance variable definitions in a package specification.
2134
1e17ef87 2135 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2136 opposed to a definition: no storage is to be allocated for the variable).
2137
2138 STATIC_FLAG is only relevant when not at top level. In that case
2139 it indicates whether to always allocate storage to the variable.
2140
2141 GNAT_NODE is used for the position of the decl. */
2142
2143tree
2144create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2145 bool const_flag, bool public_flag, bool extern_flag,
2146 bool static_flag, bool const_decl_allowed_p,
2147 struct attrib *attr_list, Node_Id gnat_node)
2148{
50179d58
EB
2149 /* Whether the initializer is a constant initializer. At the global level
2150 or for an external object or an object to be allocated in static memory,
2151 we check that it is a valid constant expression for use in initializing
2152 a static variable; otherwise, we only check that it is constant. */
a1ab4c31
AC
2153 bool init_const
2154 = (var_init != 0
2155 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
50179d58 2156 && (global_bindings_p () || extern_flag || static_flag
a1ab4c31
AC
2157 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2158 : TREE_CONSTANT (var_init)));
2159
2160 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2161 case the initializer may be used in-lieu of the DECL node (as done in
2162 Identifier_to_gnu). This is useful to prevent the need of elaboration
2163 code when an identifier for which such a decl is made is in turn used as
2164 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2165 but extra constraints apply to this choice (see below) and are not
2166 relevant to the distinction we wish to make. */
2167 bool constant_p = const_flag && init_const;
2168
2169 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2170 and may be used for scalars in general but not for aggregates. */
2171 tree var_decl
c172df28
AH
2172 = build_decl (input_location,
2173 (constant_p && const_decl_allowed_p
a1ab4c31
AC
2174 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2175 var_name, type);
2176
2177 /* If this is external, throw away any initializations (they will be done
2178 elsewhere) unless this is a constant for which we would like to remain
2179 able to get the initializer. If we are defining a global here, leave a
2180 constant initialization and save any variable elaborations for the
2181 elaboration routine. If we are just annotating types, throw away the
2182 initialization if it isn't a constant. */
2183 if ((extern_flag && !constant_p)
2184 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2185 var_init = NULL_TREE;
2186
2187 /* At the global level, an initializer requiring code to be generated
2188 produces elaboration statements. Check that such statements are allowed,
2189 that is, not violating a No_Elaboration_Code restriction. */
3b9e8343 2190 if (global_bindings_p () && var_init != 0 && !init_const)
a1ab4c31 2191 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2192
8b7b0c36
JH
2193 DECL_INITIAL (var_decl) = var_init;
2194 TREE_READONLY (var_decl) = const_flag;
2195 DECL_EXTERNAL (var_decl) = extern_flag;
2196 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2197 TREE_CONSTANT (var_decl) = constant_p;
2198 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2199 = TYPE_VOLATILE (type);
a1ab4c31
AC
2200
2201 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2202 try to fiddle with DECL_COMMON. However, on platforms that don't
2203 support global BSS sections, uninitialized global variables would
2204 go in DATA instead, thus increasing the size of the executable. */
2205 if (!flag_no_common
2206 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2207 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2208 && !have_global_bss_p ())
2209 DECL_COMMON (var_decl) = 1;
a1ab4c31 2210
2231f17f
EB
2211 /* At the global binding level, we need to allocate static storage for the
2212 variable if it isn't external. Otherwise, we allocate automatic storage
2213 unless requested not to. */
a1ab4c31 2214 TREE_STATIC (var_decl)
2231f17f 2215 = !extern_flag && (static_flag || global_bindings_p ());
a1ab4c31 2216
5225a138
EB
2217 /* For an external constant whose initializer is not absolute, do not emit
2218 debug info. In DWARF this would mean a global relocation in a read-only
c01fe451 2219 section which runs afoul of the PE-COFF run-time relocation mechanism. */
5225a138
EB
2220 if (extern_flag
2221 && constant_p
50179d58 2222 && var_init
5225a138
EB
2223 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2224 != null_pointer_node)
2225 DECL_IGNORED_P (var_decl) = 1;
2226
a1ab4c31
AC
2227 /* Add this decl to the current binding level. */
2228 gnat_pushdecl (var_decl, gnat_node);
2229
2230 if (TREE_SIDE_EFFECTS (var_decl))
2231 TREE_ADDRESSABLE (var_decl) = 1;
2232
58c8f770 2233 if (TREE_CODE (var_decl) == VAR_DECL)
a1ab4c31 2234 {
58c8f770
EB
2235 if (asm_name)
2236 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2237 process_attributes (var_decl, attr_list);
a1ab4c31
AC
2238 if (global_bindings_p ())
2239 rest_of_decl_compilation (var_decl, true, 0);
2240 }
a1ab4c31
AC
2241
2242 return var_decl;
2243}
2244\f
2245/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2246
2247static bool
2248aggregate_type_contains_array_p (tree type)
2249{
2250 switch (TREE_CODE (type))
2251 {
2252 case RECORD_TYPE:
2253 case UNION_TYPE:
2254 case QUAL_UNION_TYPE:
2255 {
2256 tree field;
910ad8de 2257 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2258 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2259 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2260 return true;
2261 return false;
2262 }
2263
2264 case ARRAY_TYPE:
2265 return true;
2266
2267 default:
2268 gcc_unreachable ();
2269 }
2270}
2271
62f9f3ce 2272/* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
da01bfee
EB
2273 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2274 nonzero, it is the specified size of the field. If POS is nonzero, it is
2275 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2276 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2277 means we are allowed to take the address of the field; if it is negative,
2278 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2279
2280tree
2281create_field_decl (tree field_name, tree field_type, tree record_type,
da01bfee 2282 tree size, tree pos, int packed, int addressable)
a1ab4c31 2283{
c172df28
AH
2284 tree field_decl = build_decl (input_location,
2285 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
2286
2287 DECL_CONTEXT (field_decl) = record_type;
2288 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2289
2290 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2291 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2292 Likewise for an aggregate without specified position that contains an
2293 array, because in this case slices of variable length of this array
2294 must be handled by GCC and variable-sized objects need to be aligned
2295 to at least a byte boundary. */
2296 if (packed && (TYPE_MODE (field_type) == BLKmode
2297 || (!pos
2298 && AGGREGATE_TYPE_P (field_type)
2299 && aggregate_type_contains_array_p (field_type))))
2300 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2301
2302 /* If a size is specified, use it. Otherwise, if the record type is packed
2303 compute a size to use, which may differ from the object's natural size.
2304 We always set a size in this case to trigger the checks for bitfield
2305 creation below, which is typically required when no position has been
2306 specified. */
2307 if (size)
2308 size = convert (bitsizetype, size);
2309 else if (packed == 1)
2310 {
2311 size = rm_size (field_type);
62f9f3ce
EB
2312 if (TYPE_MODE (field_type) == BLKmode)
2313 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2314 }
2315
2316 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2317 specified for two reasons: first if the size differs from the natural
2318 size. Second, if the alignment is insufficient. There are a number of
2319 ways the latter can be true.
2320
2321 We never make a bitfield if the type of the field has a nonconstant size,
2322 because no such entity requiring bitfield operations should reach here.
2323
2324 We do *preventively* make a bitfield when there might be the need for it
2325 but we don't have all the necessary information to decide, as is the case
2326 of a field with no specified position in a packed record.
2327
2328 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2329 in layout_decl or finish_record_type to clear the bit_field indication if
2330 it is in fact not needed. */
2331 if (addressable >= 0
2332 && size
2333 && TREE_CODE (size) == INTEGER_CST
2334 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2335 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2336 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2337 || packed
2338 || (TYPE_ALIGN (record_type) != 0
2339 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2340 {
2341 DECL_BIT_FIELD (field_decl) = 1;
2342 DECL_SIZE (field_decl) = size;
2343 if (!packed && !pos)
feec4372
EB
2344 {
2345 if (TYPE_ALIGN (record_type) != 0
2346 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2347 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2348 else
2349 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2350 }
a1ab4c31
AC
2351 }
2352
2353 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2354
2355 /* Bump the alignment if need be, either for bitfield/packing purposes or
2356 to satisfy the type requirements if no such consideration applies. When
2357 we get the alignment from the type, indicate if this is from an explicit
2358 user request, which prevents stor-layout from lowering it later on. */
2359 {
d9223014 2360 unsigned int bit_align
a1ab4c31
AC
2361 = (DECL_BIT_FIELD (field_decl) ? 1
2362 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2363
2364 if (bit_align > DECL_ALIGN (field_decl))
2365 DECL_ALIGN (field_decl) = bit_align;
2366 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2367 {
2368 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2369 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2370 }
2371 }
2372
2373 if (pos)
2374 {
2375 /* We need to pass in the alignment the DECL is known to have.
2376 This is the lowest-order bit set in POS, but no more than
2377 the alignment of the record, if one is specified. Note
2378 that an alignment of 0 is taken as infinite. */
2379 unsigned int known_align;
2380
2381 if (host_integerp (pos, 1))
2382 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
2383 else
2384 known_align = BITS_PER_UNIT;
2385
2386 if (TYPE_ALIGN (record_type)
2387 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2388 known_align = TYPE_ALIGN (record_type);
2389
2390 layout_decl (field_decl, known_align);
2391 SET_DECL_OFFSET_ALIGN (field_decl,
2392 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
2393 : BITS_PER_UNIT);
2394 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2395 &DECL_FIELD_BIT_OFFSET (field_decl),
2396 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2397 }
2398
2399 /* In addition to what our caller says, claim the field is addressable if we
2400 know that its type is not suitable.
2401
2402 The field may also be "technically" nonaddressable, meaning that even if
2403 we attempt to take the field's address we will actually get the address
2404 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2405 value we have at this point is not accurate enough, so we don't account
2406 for this here and let finish_record_type decide. */
4c5a0615 2407 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
2408 addressable = 1;
2409
2410 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2411
2412 return field_decl;
2413}
2414\f
a8e05f92
EB
2415/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2416 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2417 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2418
2419tree
2420create_param_decl (tree param_name, tree param_type, bool readonly)
2421{
c172df28
AH
2422 tree param_decl = build_decl (input_location,
2423 PARM_DECL, param_name, param_type);
a1ab4c31 2424
a8e05f92
EB
2425 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2426 can lead to various ABI violations. */
2427 if (targetm.calls.promote_prototypes (NULL_TREE)
2428 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
2429 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2430 {
2431 /* We have to be careful about biased types here. Make a subtype
2432 of integer_type_node with the proper biasing. */
2433 if (TREE_CODE (param_type) == INTEGER_TYPE
2434 && TYPE_BIASED_REPRESENTATION_P (param_type))
2435 {
84fb43a1
EB
2436 tree subtype
2437 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2438 TREE_TYPE (subtype) = integer_type_node;
2439 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
2440 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2441 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 2442 param_type = subtype;
a1ab4c31
AC
2443 }
2444 else
2445 param_type = integer_type_node;
2446 }
2447
2448 DECL_ARG_TYPE (param_decl) = param_type;
2449 TREE_READONLY (param_decl) = readonly;
2450 return param_decl;
2451}
2452\f
2453/* Given a DECL and ATTR_LIST, process the listed attributes. */
2454
58c8f770 2455static void
a1ab4c31
AC
2456process_attributes (tree decl, struct attrib *attr_list)
2457{
2458 for (; attr_list; attr_list = attr_list->next)
2459 switch (attr_list->type)
2460 {
2461 case ATTR_MACHINE_ATTRIBUTE:
58c8f770 2462 input_location = DECL_SOURCE_LOCATION (decl);
a1ab4c31
AC
2463 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
2464 NULL_TREE),
2465 ATTR_FLAG_TYPE_IN_PLACE);
2466 break;
2467
2468 case ATTR_LINK_ALIAS:
2469 if (! DECL_EXTERNAL (decl))
2470 {
2471 TREE_STATIC (decl) = 1;
2472 assemble_alias (decl, attr_list->name);
2473 }
2474 break;
2475
2476 case ATTR_WEAK_EXTERNAL:
2477 if (SUPPORTS_WEAK)
2478 declare_weak (decl);
2479 else
2480 post_error ("?weak declarations not supported on this target",
2481 attr_list->error_point);
2482 break;
2483
2484 case ATTR_LINK_SECTION:
677f3fa8 2485 if (targetm_common.have_named_sections)
a1ab4c31
AC
2486 {
2487 DECL_SECTION_NAME (decl)
2488 = build_string (IDENTIFIER_LENGTH (attr_list->name),
2489 IDENTIFIER_POINTER (attr_list->name));
2490 DECL_COMMON (decl) = 0;
2491 }
2492 else
2493 post_error ("?section attributes are not supported for this target",
2494 attr_list->error_point);
2495 break;
2496
2497 case ATTR_LINK_CONSTRUCTOR:
2498 DECL_STATIC_CONSTRUCTOR (decl) = 1;
2499 TREE_USED (decl) = 1;
2500 break;
2501
2502 case ATTR_LINK_DESTRUCTOR:
2503 DECL_STATIC_DESTRUCTOR (decl) = 1;
2504 TREE_USED (decl) = 1;
2505 break;
40a14772
TG
2506
2507 case ATTR_THREAD_LOCAL_STORAGE:
62298c61
TG
2508 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
2509 DECL_COMMON (decl) = 0;
40a14772 2510 break;
a1ab4c31
AC
2511 }
2512}
2513\f
feec4372 2514/* Record DECL as a global renaming pointer. */
a1ab4c31
AC
2515
2516void
2517record_global_renaming_pointer (tree decl)
2518{
15bf7d19 2519 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
9771b263 2520 vec_safe_push (global_renaming_pointers, decl);
a1ab4c31
AC
2521}
2522
2523/* Invalidate the global renaming pointers. */
2524
2525void
2526invalidate_global_renaming_pointers (void)
2527{
2528 unsigned int i;
2529 tree iter;
2530
9771b263
DN
2531 if (global_renaming_pointers == NULL)
2532 return;
2533
2534 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
a1ab4c31
AC
2535 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2536
9771b263 2537 vec_free (global_renaming_pointers);
a1ab4c31
AC
2538}
2539
2540/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2541 a power of 2. */
2542
2543bool
2544value_factor_p (tree value, HOST_WIDE_INT factor)
2545{
2546 if (host_integerp (value, 1))
2547 return tree_low_cst (value, 1) % factor == 0;
2548
2549 if (TREE_CODE (value) == MULT_EXPR)
2550 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2551 || value_factor_p (TREE_OPERAND (value, 1), factor));
2552
2553 return false;
2554}
2555
7d7fcb08 2556/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2557 unless we can prove these 2 fields are laid out in such a way that no gap
2558 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2559 is the distance in bits between the end of PREV_FIELD and the starting
2560 position of CURR_FIELD. It is ignored if null. */
2561
2562static bool
2563potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2564{
2565 /* If this is the first field of the record, there cannot be any gap */
2566 if (!prev_field)
2567 return false;
2568
2569 /* If the previous field is a union type, then return False: The only
2570 time when such a field is not the last field of the record is when
2571 there are other components at fixed positions after it (meaning there
2572 was a rep clause for every field), in which case we don't want the
2573 alignment constraint to override them. */
2574 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2575 return false;
2576
2577 /* If the distance between the end of prev_field and the beginning of
2578 curr_field is constant, then there is a gap if the value of this
2579 constant is not null. */
2580 if (offset && host_integerp (offset, 1))
2581 return !integer_zerop (offset);
2582
2583 /* If the size and position of the previous field are constant,
2584 then check the sum of this size and position. There will be a gap
2585 iff it is not multiple of the current field alignment. */
2586 if (host_integerp (DECL_SIZE (prev_field), 1)
2587 && host_integerp (bit_position (prev_field), 1))
2588 return ((tree_low_cst (bit_position (prev_field), 1)
2589 + tree_low_cst (DECL_SIZE (prev_field), 1))
2590 % DECL_ALIGN (curr_field) != 0);
2591
2592 /* If both the position and size of the previous field are multiples
2593 of the current field alignment, there cannot be any gap. */
2594 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2595 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2596 return false;
2597
2598 /* Fallback, return that there may be a potential gap */
2599 return true;
2600}
2601
88a94e2b
EB
2602/* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2603 of the decl. */
a1ab4c31
AC
2604
2605tree
88a94e2b 2606create_label_decl (tree label_name, Node_Id gnat_node)
a1ab4c31 2607{
88a94e2b
EB
2608 tree label_decl
2609 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
a1ab4c31 2610
88a94e2b
EB
2611 DECL_MODE (label_decl) = VOIDmode;
2612
2613 /* Add this decl to the current binding level. */
2614 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
2615
2616 return label_decl;
2617}
2618\f
7d7fcb08 2619/* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
a1ab4c31
AC
2620 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2621 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
7d76717d 2622 PARM_DECL nodes chained through the DECL_CHAIN field).
a1ab4c31 2623
7d7fcb08
EB
2624 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2625 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2626 used for the position of the decl. */
a1ab4c31
AC
2627
2628tree
7d7fcb08
EB
2629create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
2630 tree param_decl_list, bool inline_flag, bool public_flag,
2631 bool extern_flag, bool artificial_flag,
2632 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 2633{
d47d0a8d
EB
2634 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2635 subprog_type);
2636 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2637 TREE_TYPE (subprog_type));
7d7fcb08 2638 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 2639
d84b344a
EB
2640 /* If this is a non-inline function nested inside an inlined external
2641 function, we cannot honor both requests without cloning the nested
2642 function in the current unit since it is private to the other unit.
2643 We could inline the nested function as well but it's probably better
2644 to err on the side of too little inlining. */
2645 if (!inline_flag
5daed84a 2646 && !public_flag
d84b344a
EB
2647 && current_function_decl
2648 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 2649 && DECL_EXTERNAL (current_function_decl))
d84b344a 2650 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31 2651
7d7fcb08
EB
2652 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2653 DECL_EXTERNAL (subprog_decl) = extern_flag;
2654 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
2655 DECL_NO_INLINE_WARNING_P (subprog_decl) = inline_flag && artificial_flag;
2656
2657 TREE_PUBLIC (subprog_decl) = public_flag;
2658 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
a1ab4c31
AC
2659 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2660 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
a1ab4c31 2661
d47d0a8d
EB
2662 DECL_ARTIFICIAL (result_decl) = 1;
2663 DECL_IGNORED_P (result_decl) = 1;
2664 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2665 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 2666
a1ab4c31
AC
2667 if (asm_name)
2668 {
2669 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2670
2671 /* The expand_main_function circuitry expects "main_identifier_node" to
2672 designate the DECL_NAME of the 'main' entry point, in turn expected
2673 to be declared as the "main" function literally by default. Ada
2674 program entry points are typically declared with a different name
2675 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 2676 system expectations. Force main_identifier_node in this case. */
a1ab4c31 2677 if (asm_name == main_identifier_node)
cfbb663c 2678 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
2679 }
2680
a1ab4c31
AC
2681 /* Add this decl to the current binding level. */
2682 gnat_pushdecl (subprog_decl, gnat_node);
2683
58c8f770
EB
2684 process_attributes (subprog_decl, attr_list);
2685
a1ab4c31
AC
2686 /* Output the assembler code and/or RTL for the declaration. */
2687 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2688
2689 return subprog_decl;
2690}
2691\f
2692/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2693 body. This routine needs to be invoked before processing the declarations
2694 appearing in the subprogram. */
2695
2696void
2697begin_subprog_body (tree subprog_decl)
2698{
2699 tree param_decl;
2700
a1ab4c31
AC
2701 announce_function (subprog_decl);
2702
0ae44446
JR
2703 /* This function is being defined. */
2704 TREE_STATIC (subprog_decl) = 1;
2705
58c8f770
EB
2706 current_function_decl = subprog_decl;
2707
a1ab4c31
AC
2708 /* Enter a new binding level and show that all the parameters belong to
2709 this function. */
2710 gnat_pushlevel ();
a09d56d8 2711
a1ab4c31 2712 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 2713 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
2714 DECL_CONTEXT (param_decl) = subprog_decl;
2715
2716 make_decl_rtl (subprog_decl);
a1ab4c31
AC
2717}
2718
71196d4e 2719/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
2720
2721void
a406865a 2722end_subprog_body (tree body)
a1ab4c31
AC
2723{
2724 tree fndecl = current_function_decl;
2725
bd9c7fb9 2726 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
2727 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2728 DECL_INITIAL (fndecl) = current_binding_level->block;
2729 gnat_poplevel ();
2730
a1ab4c31
AC
2731 /* Mark the RESULT_DECL as being in this subprogram. */
2732 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2733
a963da4d
EB
2734 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2735 if (TREE_CODE (body) == BIND_EXPR)
2736 {
2737 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2738 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2739 }
2740
a1ab4c31
AC
2741 DECL_SAVED_TREE (fndecl) = body;
2742
228ee426 2743 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
2744}
2745
2746/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 2747
71196d4e
EB
2748void
2749rest_of_subprog_body_compilation (tree subprog_decl)
2750{
a1ab4c31
AC
2751 /* We cannot track the location of errors past this point. */
2752 error_gnat_node = Empty;
2753
2754 /* If we're only annotating types, don't actually compile this function. */
2755 if (type_annotate_only)
2756 return;
2757
a406865a 2758 /* Dump functions before gimplification. */
71196d4e 2759 dump_function (TDI_original, subprog_decl);
a406865a 2760
228ee426 2761 if (!decl_function_context (subprog_decl))
71196d4e 2762 cgraph_finalize_function (subprog_decl, false);
a1ab4c31
AC
2763 else
2764 /* Register this function with cgraph just far enough to get it
2765 added to our parent's nested function list. */
71196d4e 2766 (void) cgraph_get_create_node (subprog_decl);
a1ab4c31
AC
2767}
2768
a1ab4c31
AC
2769tree
2770gnat_builtin_function (tree decl)
2771{
2772 gnat_pushdecl (decl, Empty);
2773 return decl;
2774}
2775
2776/* Return an integer type with the number of bits of precision given by
2777 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2778 it is a signed type. */
2779
2780tree
2781gnat_type_for_size (unsigned precision, int unsignedp)
2782{
2783 tree t;
2784 char type_name[20];
2785
2786 if (precision <= 2 * MAX_BITS_PER_WORD
2787 && signed_and_unsigned_types[precision][unsignedp])
2788 return signed_and_unsigned_types[precision][unsignedp];
2789
2790 if (unsignedp)
2791 t = make_unsigned_type (precision);
2792 else
2793 t = make_signed_type (precision);
2794
2795 if (precision <= 2 * MAX_BITS_PER_WORD)
2796 signed_and_unsigned_types[precision][unsignedp] = t;
2797
2798 if (!TYPE_NAME (t))
2799 {
2800 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2801 TYPE_NAME (t) = get_identifier (type_name);
2802 }
2803
2804 return t;
2805}
2806
2807/* Likewise for floating-point types. */
2808
2809static tree
2810float_type_for_precision (int precision, enum machine_mode mode)
2811{
2812 tree t;
2813 char type_name[20];
2814
2815 if (float_types[(int) mode])
2816 return float_types[(int) mode];
2817
2818 float_types[(int) mode] = t = make_node (REAL_TYPE);
2819 TYPE_PRECISION (t) = precision;
2820 layout_type (t);
2821
2822 gcc_assert (TYPE_MODE (t) == mode);
2823 if (!TYPE_NAME (t))
2824 {
2825 sprintf (type_name, "FLOAT_%d", precision);
2826 TYPE_NAME (t) = get_identifier (type_name);
2827 }
2828
2829 return t;
2830}
2831
2832/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2833 an unsigned type; otherwise a signed type is returned. */
2834
2835tree
2836gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2837{
2838 if (mode == BLKmode)
2839 return NULL_TREE;
2799d18c
EB
2840
2841 if (mode == VOIDmode)
a1ab4c31 2842 return void_type_node;
2799d18c
EB
2843
2844 if (COMPLEX_MODE_P (mode))
a1ab4c31 2845 return NULL_TREE;
2799d18c
EB
2846
2847 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 2848 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
2849
2850 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 2851 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
2852
2853 if (VECTOR_MODE_P (mode))
2854 {
2855 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2856 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2857 if (inner_type)
2858 return build_vector_type_for_mode (inner_type, mode);
2859 }
2860
2861 return NULL_TREE;
a1ab4c31
AC
2862}
2863
2864/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2865
2866tree
2867gnat_unsigned_type (tree type_node)
2868{
2869 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2870
2871 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2872 {
2873 type = copy_node (type);
2874 TREE_TYPE (type) = type_node;
2875 }
2876 else if (TREE_TYPE (type_node)
2877 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2878 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2879 {
2880 type = copy_node (type);
2881 TREE_TYPE (type) = TREE_TYPE (type_node);
2882 }
2883
2884 return type;
2885}
2886
2887/* Return the signed version of a TYPE_NODE, a scalar type. */
2888
2889tree
2890gnat_signed_type (tree type_node)
2891{
2892 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2893
2894 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2895 {
2896 type = copy_node (type);
2897 TREE_TYPE (type) = type_node;
2898 }
2899 else if (TREE_TYPE (type_node)
2900 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2901 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2902 {
2903 type = copy_node (type);
2904 TREE_TYPE (type) = TREE_TYPE (type_node);
2905 }
2906
2907 return type;
2908}
2909
2910/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2911 transparently converted to each other. */
2912
2913int
2914gnat_types_compatible_p (tree t1, tree t2)
2915{
2916 enum tree_code code;
2917
2918 /* This is the default criterion. */
2919 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2920 return 1;
2921
2922 /* We only check structural equivalence here. */
2923 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2924 return 0;
2925
7948ae37
OH
2926 /* Vector types are also compatible if they have the same number of subparts
2927 and the same form of (scalar) element type. */
2928 if (code == VECTOR_TYPE
2929 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2930 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2931 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2932 return 1;
2933
cfa0bd19 2934 /* Array types are also compatible if they are constrained and have the same
96bba5e6 2935 domain(s) and the same component type. */
a1ab4c31 2936 if (code == ARRAY_TYPE
0adef32b
JJ
2937 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2938 || (TYPE_DOMAIN (t1)
b4680ca1 2939 && TYPE_DOMAIN (t2)
0adef32b
JJ
2940 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2941 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2942 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 2943 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
2944 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2945 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2946 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
a1ab4c31
AC
2947 return 1;
2948
a1ab4c31
AC
2949 return 0;
2950}
523e82a7 2951
71196d4e
EB
2952/* Return true if EXPR is a useless type conversion. */
2953
2954bool
2955gnat_useless_type_conversion (tree expr)
2956{
2957 if (CONVERT_EXPR_P (expr)
2958 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2959 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2960 return gnat_types_compatible_p (TREE_TYPE (expr),
2961 TREE_TYPE (TREE_OPERAND (expr, 0)));
2962
2963 return false;
2964}
2965
523e82a7
EB
2966/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2967
2968bool
2969fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2970 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2971{
2972 return TYPE_CI_CO_LIST (t) == cico_list
2973 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2974 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2975 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2976}
a1ab4c31
AC
2977\f
2978/* EXP is an expression for the size of an object. If this size contains
2979 discriminant references, replace them with the maximum (if MAX_P) or
2980 minimum (if !MAX_P) possible value of the discriminant. */
2981
2982tree
2983max_size (tree exp, bool max_p)
2984{
2985 enum tree_code code = TREE_CODE (exp);
2986 tree type = TREE_TYPE (exp);
2987
2988 switch (TREE_CODE_CLASS (code))
2989 {
2990 case tcc_declaration:
2991 case tcc_constant:
2992 return exp;
2993
2994 case tcc_vl_exp:
2995 if (code == CALL_EXPR)
2996 {
f82a627c
EB
2997 tree t, *argarray;
2998 int n, i;
2999
3000 t = maybe_inline_call_in_expr (exp);
3001 if (t)
3002 return max_size (t, max_p);
a1ab4c31 3003
f82a627c
EB
3004 n = call_expr_nargs (exp);
3005 gcc_assert (n > 0);
2bb1fc26 3006 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3007 for (i = 0; i < n; i++)
3008 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3009 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3010 }
3011 break;
3012
3013 case tcc_reference:
3014 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3015 modify. Otherwise, we treat it like a variable. */
3016 if (!CONTAINS_PLACEHOLDER_P (exp))
3017 return exp;
3018
3019 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3020 return
3021 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3022
3023 case tcc_comparison:
3024 return max_p ? size_one_node : size_zero_node;
3025
3026 case tcc_unary:
ce3da0d0
EB
3027 if (code == NON_LVALUE_EXPR)
3028 return max_size (TREE_OPERAND (exp, 0), max_p);
3029
3030 return fold_build1 (code, type,
3031 max_size (TREE_OPERAND (exp, 0),
3032 code == NEGATE_EXPR ? !max_p : max_p));
3033
a1ab4c31 3034 case tcc_binary:
ce3da0d0
EB
3035 {
3036 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3037 tree rhs = max_size (TREE_OPERAND (exp, 1),
3038 code == MINUS_EXPR ? !max_p : max_p);
3039
3040 /* Special-case wanting the maximum value of a MIN_EXPR.
3041 In that case, if one side overflows, return the other. */
3042 if (max_p && code == MIN_EXPR)
3043 {
3044 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3045 return lhs;
3046
3047 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3048 return rhs;
3049 }
3050
3051 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3052 overflowing and the RHS a variable. */
3053 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3054 && TREE_CODE (lhs) == INTEGER_CST
3055 && TREE_OVERFLOW (lhs)
3056 && !TREE_CONSTANT (rhs))
3057 return lhs;
3058
3059 return size_binop (code, lhs, rhs);
3060 }
3061
a1ab4c31
AC
3062 case tcc_expression:
3063 switch (TREE_CODE_LENGTH (code))
3064 {
3065 case 1:
722356ce
EB
3066 if (code == SAVE_EXPR)
3067 return exp;
ce3da0d0
EB
3068
3069 return fold_build1 (code, type,
3070 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3071
3072 case 2:
3073 if (code == COMPOUND_EXPR)
3074 return max_size (TREE_OPERAND (exp, 1), max_p);
3075
ce3da0d0
EB
3076 return fold_build2 (code, type,
3077 max_size (TREE_OPERAND (exp, 0), max_p),
3078 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3079
3080 case 3:
722356ce 3081 if (code == COND_EXPR)
a1ab4c31
AC
3082 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3083 max_size (TREE_OPERAND (exp, 1), max_p),
3084 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3085
3086 default:
3087 break;
a1ab4c31
AC
3088 }
3089
3090 /* Other tree classes cannot happen. */
3091 default:
3092 break;
3093 }
3094
3095 gcc_unreachable ();
3096}
3097\f
3098/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3099 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3100 Return a constructor for the template. */
3101
3102tree
3103build_template (tree template_type, tree array_type, tree expr)
3104{
9771b263 3105 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3106 tree bound_list = NULL_TREE;
3107 tree field;
3108
3109 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3110 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3111 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3112 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3113
3114 if (TREE_CODE (array_type) == ARRAY_TYPE
3115 || (TREE_CODE (array_type) == INTEGER_TYPE
3116 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3117 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3118
3119 /* First make the list for a CONSTRUCTOR for the template. Go down the
3120 field list of the template instead of the type chain because this
3121 array might be an Ada array of arrays and we can't tell where the
3122 nested arrays stop being the underlying object. */
3123
3124 for (field = TYPE_FIELDS (template_type); field;
3125 (bound_list
3126 ? (bound_list = TREE_CHAIN (bound_list))
3127 : (array_type = TREE_TYPE (array_type))),
910ad8de 3128 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3129 {
3130 tree bounds, min, max;
3131
3132 /* If we have a bound list, get the bounds from there. Likewise
3133 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3134 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3135 This will give us a maximum range. */
3136 if (bound_list)
3137 bounds = TREE_VALUE (bound_list);
3138 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3139 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3140 else if (expr && TREE_CODE (expr) == PARM_DECL
3141 && DECL_BY_COMPONENT_PTR_P (expr))
3142 bounds = TREE_TYPE (field);
3143 else
3144 gcc_unreachable ();
3145
3146 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3147 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3148
3149 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3150 substitute it from OBJECT. */
3151 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3152 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3153
0e228dd9 3154 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3155 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3156 }
3157
0e228dd9 3158 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3159}
3160\f
31a5a547
EB
3161/* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3162 being built; the new decl is chained on to the front of the list. */
3163
3164static tree
3165make_descriptor_field (const char *name, tree type, tree rec_type,
3166 tree initial, tree field_list)
3167{
3168 tree field
3169 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3170 NULL_TREE, 0, 0);
3171
3172 DECL_INITIAL (field) = initial;
3173 DECL_CHAIN (field) = field_list;
3174 return field;
3175}
3176
58c8f770
EB
3177/* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3178 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3179 type contains in its DECL_INITIAL the expression to use when a constructor
3180 is made for the type. GNAT_ENTITY is an entity used to print out an error
3181 message if the mechanism cannot be applied to an object of that type and
3182 also for the name. */
a1ab4c31
AC
3183
3184tree
d628c015 3185build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
3186{
3187 tree record_type = make_node (RECORD_TYPE);
31a5a547 3188 tree pointer32_type, pointer64_type;
788e5046 3189 tree field_list = NULL_TREE;
31a5a547
EB
3190 int klass, ndim, i, dtype = 0;
3191 tree inner_type, tem;
a1ab4c31 3192 tree *idx_arr;
a1ab4c31
AC
3193
3194 /* If TYPE is an unconstrained array, use the underlying array type. */
3195 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3196 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3197
3198 /* If this is an array, compute the number of dimensions in the array,
3199 get the index types, and point to the inner type. */
3200 if (TREE_CODE (type) != ARRAY_TYPE)
3201 ndim = 0;
3202 else
3203 for (ndim = 1, inner_type = type;
3204 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3205 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3206 ndim++, inner_type = TREE_TYPE (inner_type))
3207 ;
3208
2bb1fc26 3209 idx_arr = XALLOCAVEC (tree, ndim);
a1ab4c31 3210
d628c015 3211 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
3212 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3213 for (i = ndim - 1, inner_type = type;
3214 i >= 0;
3215 i--, inner_type = TREE_TYPE (inner_type))
3216 idx_arr[i] = TYPE_DOMAIN (inner_type);
3217 else
3218 for (i = 0, inner_type = type;
3219 i < ndim;
3220 i++, inner_type = TREE_TYPE (inner_type))
3221 idx_arr[i] = TYPE_DOMAIN (inner_type);
3222
3223 /* Now get the DTYPE value. */
3224 switch (TREE_CODE (type))
3225 {
3226 case INTEGER_TYPE:
3227 case ENUMERAL_TYPE:
01ddebf2 3228 case BOOLEAN_TYPE:
a1ab4c31
AC
3229 if (TYPE_VAX_FLOATING_POINT_P (type))
3230 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3231 {
3232 case 6:
3233 dtype = 10;
3234 break;
3235 case 9:
3236 dtype = 11;
3237 break;
3238 case 15:
3239 dtype = 27;
3240 break;
3241 }
3242 else
3243 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3244 {
3245 case 8:
3246 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3247 break;
3248 case 16:
3249 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3250 break;
3251 case 32:
3252 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3253 break;
3254 case 64:
3255 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3256 break;
3257 case 128:
3258 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3259 break;
3260 }
3261 break;
3262
3263 case REAL_TYPE:
3264 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3265 break;
3266
3267 case COMPLEX_TYPE:
3268 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3269 && TYPE_VAX_FLOATING_POINT_P (type))
3270 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3271 {
3272 case 6:
3273 dtype = 12;
3274 break;
3275 case 9:
3276 dtype = 13;
3277 break;
3278 case 15:
3279 dtype = 29;
3280 }
3281 else
3282 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3283 break;
3284
3285 case ARRAY_TYPE:
3286 dtype = 14;
3287 break;
3288
3289 default:
3290 break;
3291 }
3292
3293 /* Get the CLASS value. */
3294 switch (mech)
3295 {
3296 case By_Descriptor_A:
d628c015 3297 case By_Short_Descriptor_A:
c6bd4220 3298 klass = 4;
a1ab4c31
AC
3299 break;
3300 case By_Descriptor_NCA:
d628c015 3301 case By_Short_Descriptor_NCA:
c6bd4220 3302 klass = 10;
a1ab4c31
AC
3303 break;
3304 case By_Descriptor_SB:
d628c015 3305 case By_Short_Descriptor_SB:
c6bd4220 3306 klass = 15;
a1ab4c31
AC
3307 break;
3308 case By_Descriptor:
d628c015 3309 case By_Short_Descriptor:
a1ab4c31 3310 case By_Descriptor_S:
d628c015 3311 case By_Short_Descriptor_S:
a1ab4c31 3312 default:
c6bd4220 3313 klass = 1;
a1ab4c31
AC
3314 break;
3315 }
3316
58c8f770
EB
3317 /* Make the type for a descriptor for VMS. The first four fields are the
3318 same for all types. */
82b481ed
EB
3319 field_list
3320 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3321 size_in_bytes ((mech == By_Descriptor_A
3322 || mech == By_Short_Descriptor_A)
3323 ? inner_type : type),
3324 field_list);
3325 field_list
3326 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3327 size_int (dtype), field_list);
3328 field_list
3329 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3330 size_int (klass), field_list);
a1ab4c31 3331
a1ab4c31 3332 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
31a5a547
EB
3333 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3334
3335 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3336 that we cannot build a template call to the CE routine as it would get a
3337 wrong source location; instead we use a second placeholder for it. */
3338 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3339 build0 (PLACEHOLDER_EXPR, type));
3340 tem = build3 (COND_EXPR, pointer32_type,
f542c405
EB
3341 Pmode != SImode
3342 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3343 build_int_cstu (pointer64_type, 0x80000000))
3344 : boolean_false_node,
31a5a547
EB
3345 build0 (PLACEHOLDER_EXPR, void_type_node),
3346 convert (pointer32_type, tem));
a1ab4c31 3347
82b481ed 3348 field_list
31a5a547 3349 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
82b481ed 3350 field_list);
a1ab4c31
AC
3351
3352 switch (mech)
3353 {
3354 case By_Descriptor:
d628c015 3355 case By_Short_Descriptor:
a1ab4c31 3356 case By_Descriptor_S:
d628c015 3357 case By_Short_Descriptor_S:
a1ab4c31
AC
3358 break;
3359
3360 case By_Descriptor_SB:
d628c015 3361 case By_Short_Descriptor_SB:
82b481ed
EB
3362 field_list
3363 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3364 record_type,
3365 (TREE_CODE (type) == ARRAY_TYPE
3366 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3367 : size_zero_node),
3368 field_list);
3369 field_list
3370 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3371 record_type,
3372 (TREE_CODE (type) == ARRAY_TYPE
3373 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3374 : size_zero_node),
3375 field_list);
a1ab4c31
AC
3376 break;
3377
3378 case By_Descriptor_A:
d628c015 3379 case By_Short_Descriptor_A:
a1ab4c31 3380 case By_Descriptor_NCA:
d628c015 3381 case By_Short_Descriptor_NCA:
82b481ed
EB
3382 field_list
3383 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3384 record_type, size_zero_node, field_list);
3385
3386 field_list
3387 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3388 record_type, size_zero_node, field_list);
3389
82b481ed
EB
3390 field_list
3391 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3392 record_type,
3393 size_int ((mech == By_Descriptor_NCA
3394 || mech == By_Short_Descriptor_NCA)
3395 ? 0
3396 /* Set FL_COLUMN, FL_COEFF, and
3397 FL_BOUNDS. */
3398 : (TREE_CODE (type) == ARRAY_TYPE
3399 && TYPE_CONVENTION_FORTRAN_P
3400 (type)
3401 ? 224 : 192)),
3402 field_list);
3403
3404 field_list
3405 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3406 record_type, size_int (ndim), field_list);
3407
3408 field_list
3409 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3410 record_type, size_in_bytes (type),
3411 field_list);
a1ab4c31
AC
3412
3413 /* Now build a pointer to the 0,0,0... element. */
3414 tem = build0 (PLACEHOLDER_EXPR, type);
3415 for (i = 0, inner_type = type; i < ndim;
3416 i++, inner_type = TREE_TYPE (inner_type))
3417 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3418 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3419 NULL_TREE, NULL_TREE);
3420
82b481ed
EB
3421 field_list
3422 = make_descriptor_field ("A0", pointer32_type, record_type,
3423 build1 (ADDR_EXPR, pointer32_type, tem),
3424 field_list);
a1ab4c31
AC
3425
3426 /* Next come the addressing coefficients. */
3427 tem = size_one_node;
3428 for (i = 0; i < ndim; i++)
3429 {
3430 char fname[3];
3431 tree idx_length
3432 = size_binop (MULT_EXPR, tem,
3433 size_binop (PLUS_EXPR,
3434 size_binop (MINUS_EXPR,
3435 TYPE_MAX_VALUE (idx_arr[i]),
3436 TYPE_MIN_VALUE (idx_arr[i])),
3437 size_int (1)));
3438
d628c015
DR
3439 fname[0] = ((mech == By_Descriptor_NCA ||
3440 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31 3441 fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3442 field_list
3443 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3444 record_type, idx_length, field_list);
a1ab4c31 3445
d628c015 3446 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
3447 tem = idx_length;
3448 }
3449
3450 /* Finally here are the bounds. */
3451 for (i = 0; i < ndim; i++)
3452 {
3453 char fname[3];
3454
3455 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3456 field_list
3457 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3458 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3459 field_list);
a1ab4c31
AC
3460
3461 fname[0] = 'U';
82b481ed
EB
3462 field_list
3463 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3464 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3465 field_list);
a1ab4c31
AC
3466 }
3467 break;
3468
3469 default:
3470 post_error ("unsupported descriptor type for &", gnat_entity);
3471 }
3472
10069d53 3473 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
788e5046 3474 finish_record_type (record_type, nreverse (field_list), 0, false);
a1ab4c31
AC
3475 return record_type;
3476}
3477
58c8f770
EB
3478/* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3479 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3480 type contains in its DECL_INITIAL the expression to use when a constructor
3481 is made for the type. GNAT_ENTITY is an entity used to print out an error
3482 message if the mechanism cannot be applied to an object of that type and
3483 also for the name. */
6ca2b0a0
DR
3484
3485tree
d628c015 3486build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0 3487{
31a5a547 3488 tree record_type = make_node (RECORD_TYPE);
6ca2b0a0 3489 tree pointer64_type;
31a5a547
EB
3490 tree field_list = NULL_TREE;
3491 int klass, ndim, i, dtype = 0;
3492 tree inner_type, tem;
6ca2b0a0 3493 tree *idx_arr;
6ca2b0a0
DR
3494
3495 /* If TYPE is an unconstrained array, use the underlying array type. */
3496 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3497 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3498
3499 /* If this is an array, compute the number of dimensions in the array,
3500 get the index types, and point to the inner type. */
3501 if (TREE_CODE (type) != ARRAY_TYPE)
3502 ndim = 0;
3503 else
3504 for (ndim = 1, inner_type = type;
3505 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3506 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3507 ndim++, inner_type = TREE_TYPE (inner_type))
3508 ;
3509
2bb1fc26 3510 idx_arr = XALLOCAVEC (tree, ndim);
6ca2b0a0
DR
3511
3512 if (mech != By_Descriptor_NCA
3513 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3514 for (i = ndim - 1, inner_type = type;
3515 i >= 0;
3516 i--, inner_type = TREE_TYPE (inner_type))
3517 idx_arr[i] = TYPE_DOMAIN (inner_type);
3518 else
3519 for (i = 0, inner_type = type;
3520 i < ndim;
3521 i++, inner_type = TREE_TYPE (inner_type))
3522 idx_arr[i] = TYPE_DOMAIN (inner_type);
3523
3524 /* Now get the DTYPE value. */
3525 switch (TREE_CODE (type))
3526 {
3527 case INTEGER_TYPE:
3528 case ENUMERAL_TYPE:
01ddebf2 3529 case BOOLEAN_TYPE:
6ca2b0a0
DR
3530 if (TYPE_VAX_FLOATING_POINT_P (type))
3531 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3532 {
3533 case 6:
3534 dtype = 10;
3535 break;
3536 case 9:
3537 dtype = 11;
3538 break;
3539 case 15:
3540 dtype = 27;
3541 break;
3542 }
3543 else
3544 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3545 {
3546 case 8:
3547 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3548 break;
3549 case 16:
3550 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3551 break;
3552 case 32:
3553 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3554 break;
3555 case 64:
3556 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3557 break;
3558 case 128:
3559 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3560 break;
3561 }
3562 break;
3563
3564 case REAL_TYPE:
3565 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3566 break;
3567
3568 case COMPLEX_TYPE:
3569 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3570 && TYPE_VAX_FLOATING_POINT_P (type))
3571 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3572 {
3573 case 6:
3574 dtype = 12;
3575 break;
3576 case 9:
3577 dtype = 13;
3578 break;
3579 case 15:
3580 dtype = 29;
3581 }
3582 else
3583 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3584 break;
3585
3586 case ARRAY_TYPE:
3587 dtype = 14;
3588 break;
3589
3590 default:
3591 break;
3592 }
3593
3594 /* Get the CLASS value. */
3595 switch (mech)
3596 {
3597 case By_Descriptor_A:
c6bd4220 3598 klass = 4;
6ca2b0a0
DR
3599 break;
3600 case By_Descriptor_NCA:
c6bd4220 3601 klass = 10;
6ca2b0a0
DR
3602 break;
3603 case By_Descriptor_SB:
c6bd4220 3604 klass = 15;
6ca2b0a0
DR
3605 break;
3606 case By_Descriptor:
3607 case By_Descriptor_S:
3608 default:
c6bd4220 3609 klass = 1;
6ca2b0a0
DR
3610 break;
3611 }
3612
58c8f770 3613 /* Make the type for a 64-bit descriptor for VMS. The first six fields
6ca2b0a0 3614 are the same for all types. */
31a5a547 3615 field_list
788e5046 3616 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
31a5a547
EB
3617 record_type, size_int (1), field_list);
3618 field_list
788e5046 3619 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
31a5a547
EB
3620 record_type, size_int (dtype), field_list);
3621 field_list
788e5046 3622 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
31a5a547
EB
3623 record_type, size_int (klass), field_list);
3624 field_list
788e5046 3625 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
f54ee980 3626 record_type, size_int (-1), field_list);
31a5a547 3627 field_list
788e5046 3628 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
31a5a547 3629 record_type,
788e5046 3630 size_in_bytes (mech == By_Descriptor_A
82b481ed 3631 ? inner_type : type),
31a5a547 3632 field_list);
6ca2b0a0
DR
3633
3634 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3635
31a5a547
EB
3636 field_list
3637 = make_descriptor_field ("POINTER", pointer64_type, record_type,
788e5046 3638 build_unary_op (ADDR_EXPR, pointer64_type,
82b481ed 3639 build0 (PLACEHOLDER_EXPR, type)),
31a5a547 3640 field_list);
6ca2b0a0
DR
3641
3642 switch (mech)
3643 {
3644 case By_Descriptor:
3645 case By_Descriptor_S:
3646 break;
3647
3648 case By_Descriptor_SB:
31a5a547 3649 field_list
788e5046 3650 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
31a5a547 3651 record_type,
788e5046
NF
3652 (TREE_CODE (type) == ARRAY_TYPE
3653 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
82b481ed 3654 : size_zero_node),
31a5a547
EB
3655 field_list);
3656 field_list
788e5046 3657 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
31a5a547 3658 record_type,
788e5046
NF
3659 (TREE_CODE (type) == ARRAY_TYPE
3660 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
82b481ed 3661 : size_zero_node),
31a5a547 3662 field_list);
6ca2b0a0
DR
3663 break;
3664
3665 case By_Descriptor_A:
3666 case By_Descriptor_NCA:
31a5a547 3667 field_list
788e5046 3668 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
31a5a547 3669 record_type, size_zero_node, field_list);
788e5046 3670
31a5a547 3671 field_list
788e5046 3672 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
31a5a547 3673 record_type, size_zero_node, field_list);
788e5046
NF
3674
3675 dtype = (mech == By_Descriptor_NCA
3676 ? 0
3677 /* Set FL_COLUMN, FL_COEFF, and
3678 FL_BOUNDS. */
3679 : (TREE_CODE (type) == ARRAY_TYPE
3680 && TYPE_CONVENTION_FORTRAN_P (type)
3681 ? 224 : 192));
31a5a547 3682 field_list
788e5046 3683 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
31a5a547
EB
3684 record_type, size_int (dtype),
3685 field_list);
6ca2b0a0 3686
31a5a547 3687 field_list
788e5046 3688 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
31a5a547 3689 record_type, size_int (ndim), field_list);
788e5046 3690
31a5a547 3691 field_list
788e5046 3692 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
31a5a547
EB
3693 record_type, size_int (0), field_list);
3694 field_list
788e5046 3695 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
31a5a547
EB
3696 record_type, size_in_bytes (type),
3697 field_list);
6ca2b0a0
DR
3698
3699 /* Now build a pointer to the 0,0,0... element. */
3700 tem = build0 (PLACEHOLDER_EXPR, type);
3701 for (i = 0, inner_type = type; i < ndim;
3702 i++, inner_type = TREE_TYPE (inner_type))
3703 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3704 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3705 NULL_TREE, NULL_TREE);
3706
31a5a547
EB
3707 field_list
3708 = make_descriptor_field ("A0", pointer64_type, record_type,
788e5046 3709 build1 (ADDR_EXPR, pointer64_type, tem),
31a5a547 3710 field_list);
6ca2b0a0
DR
3711
3712 /* Next come the addressing coefficients. */
3713 tem = size_one_node;
3714 for (i = 0; i < ndim; i++)
3715 {
3716 char fname[3];
3717 tree idx_length
3718 = size_binop (MULT_EXPR, tem,
3719 size_binop (PLUS_EXPR,
3720 size_binop (MINUS_EXPR,
3721 TYPE_MAX_VALUE (idx_arr[i]),
3722 TYPE_MIN_VALUE (idx_arr[i])),
3723 size_int (1)));
3724
3725 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3726 fname[1] = '0' + i, fname[2] = 0;
31a5a547 3727 field_list
788e5046 3728 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547 3729 record_type, idx_length, field_list);
6ca2b0a0
DR
3730
3731 if (mech == By_Descriptor_NCA)
3732 tem = idx_length;
3733 }
3734
3735 /* Finally here are the bounds. */
3736 for (i = 0; i < ndim; i++)
3737 {
3738 char fname[3];
3739
3740 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
31a5a547 3741 field_list
788e5046 3742 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3743 record_type,
3744 TYPE_MIN_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3745
3746 fname[0] = 'U';
31a5a547 3747 field_list
788e5046 3748 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3749 record_type,
3750 TYPE_MAX_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3751 }
3752 break;
3753
3754 default:
3755 post_error ("unsupported descriptor type for &", gnat_entity);
3756 }
3757
31a5a547
EB
3758 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3759 finish_record_type (record_type, nreverse (field_list), 0, false);
3760 return record_type;
6ca2b0a0
DR
3761}
3762
31a5a547
EB
3763/* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3764 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
a1ab4c31 3765
31a5a547
EB
3766tree
3767fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
a1ab4c31 3768{
9771b263 3769 vec<constructor_elt, va_gc> *v = NULL;
31a5a547 3770 tree field;
a1ab4c31 3771
31a5a547
EB
3772 gnu_expr = maybe_unconstrained_array (gnu_expr);
3773 gnu_expr = gnat_protect_expr (gnu_expr);
3774 gnat_mark_addressable (gnu_expr);
3775
3776 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3777 routine in case we have a 32-bit descriptor. */
3778 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3779 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3780 N_Raise_Constraint_Error),
3781 gnu_expr);
3782
3783 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3784 {
3785 tree value
3786 = convert (TREE_TYPE (field),
3787 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3788 gnu_expr));
3789 CONSTRUCTOR_APPEND_ELT (v, field, value);
3790 }
3791
3792 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
3793}
3794
d628c015
DR
3795/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3796 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3797 which the VMS descriptor is passed. */
a1ab4c31
AC
3798
3799static tree
d628c015
DR
3800convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3801{
3802 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3803 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3804 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3805 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
d628c015 3806 /* The POINTER field is the 6th field in the descriptor. */
910ad8de 3807 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
d628c015
DR
3808
3809 /* Retrieve the value of the POINTER field. */
3810 tree gnu_expr64
58c8f770 3811 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
d628c015
DR
3812
3813 if (POINTER_TYPE_P (gnu_type))
3814 return convert (gnu_type, gnu_expr64);
3815
315cff15 3816 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
d628c015
DR
3817 {
3818 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 3819 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
d628c015
DR
3820 tree template_type = TREE_TYPE (p_bounds_type);
3821 tree min_field = TYPE_FIELDS (template_type);
7d76717d 3822 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3823 tree template_tree, template_addr, aflags, dimct, t, u;
d628c015 3824 /* See the head comment of build_vms_descriptor. */
c6bd4220 3825 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
d628c015 3826 tree lfield, ufield;
9771b263 3827 vec<constructor_elt, va_gc> *v;
d628c015 3828
86060344 3829 /* Convert POINTER to the pointer-to-array type. */
d628c015
DR
3830 gnu_expr64 = convert (p_array_type, gnu_expr64);
3831
c6bd4220 3832 switch (iklass)
d628c015
DR
3833 {
3834 case 1: /* Class S */
3835 case 15: /* Class SB */
3836 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
9771b263 3837 vec_alloc (v, 2);
910ad8de 3838 t = DECL_CHAIN (DECL_CHAIN (klass));
d628c015 3839 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
3840 CONSTRUCTOR_APPEND_ELT (v, min_field,
3841 convert (TREE_TYPE (min_field),
3842 integer_one_node));
3843 CONSTRUCTOR_APPEND_ELT (v, max_field,
3844 convert (TREE_TYPE (max_field), t));
3845 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 3846 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
d628c015
DR
3847
3848 /* For class S, we are done. */
c6bd4220 3849 if (iklass == 1)
d628c015
DR
3850 break;
3851
3852 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
3853 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3854 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 3855 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
d628c015
DR
3856 /* If so, there is already a template in the descriptor and
3857 it is located right after the POINTER field. The fields are
3858 64bits so they must be repacked. */
7d76717d 3859 t = DECL_CHAIN (pointer);
d628c015
DR
3860 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3861 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3862
7d76717d 3863 t = DECL_CHAIN (t);
d628c015
DR
3864 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3865 ufield = convert
910ad8de 3866 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3867
3868 /* Build the template in the form of a constructor. */
9771b263 3869 vec_alloc (v, 2);
0e228dd9 3870 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
7d76717d 3871 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3872 ufield);
3873 template_tree = gnat_build_constructor (template_type, v);
d628c015
DR
3874
3875 /* Otherwise use the {1, LENGTH} template we build above. */
3876 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3877 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 3878 template_tree),
d628c015
DR
3879 template_addr);
3880 break;
3881
3882 case 4: /* Class A */
3883 /* The AFLAGS field is the 3rd field after the pointer in the
3884 descriptor. */
910ad8de 3885 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
d628c015
DR
3886 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3887 /* The DIMCT field is the next field in the descriptor after
3888 aflags. */
7d76717d 3889 t = DECL_CHAIN (t);
d628c015
DR
3890 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3891 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3892 or FL_COEFF or FL_BOUNDS not set. */
3893 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
3894 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3895 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3896 dimct,
3897 convert (TREE_TYPE (dimct),
3898 size_one_node)),
1139f2e8 3899 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3900 build2 (BIT_AND_EXPR,
3901 TREE_TYPE (aflags),
3902 aflags, u),
3903 u));
3904 /* There is already a template in the descriptor and it is located
3905 in block 3. The fields are 64bits so they must be repacked. */
910ad8de 3906 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
d628c015
DR
3907 (t)))));
3908 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3909 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3910
7d76717d 3911 t = DECL_CHAIN (t);
d628c015
DR
3912 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3913 ufield = convert
910ad8de 3914 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3915
3916 /* Build the template in the form of a constructor. */
9771b263 3917 vec_alloc (v, 2);
0e228dd9 3918 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
910ad8de 3919 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3920 ufield);
3921 template_tree = gnat_build_constructor (template_type, v);
f76d6e6f 3922 template_tree = build3 (COND_EXPR, template_type, u,
d628c015
DR
3923 build_call_raise (CE_Length_Check_Failed, Empty,
3924 N_Raise_Constraint_Error),
6bf68a93 3925 template_tree);
c6bd4220
EB
3926 template_addr
3927 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
d628c015
DR
3928 break;
3929
3930 case 10: /* Class NCA */
3931 default:
3932 post_error ("unsupported descriptor type for &", gnat_subprog);
3933 template_addr = integer_zero_node;
3934 break;
3935 }
3936
3937 /* Build the fat pointer in the form of a constructor. */
9771b263 3938 vec_alloc (v, 2);
0e228dd9 3939 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
910ad8de 3940 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9
NF
3941 template_addr);
3942 return gnat_build_constructor (gnu_type, v);
d628c015
DR
3943 }
3944
3945 else
3946 gcc_unreachable ();
3947}
3948
3949/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3950 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3951 which the VMS descriptor is passed. */
3952
3953static tree
3954convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3955{
3956 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3957 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3958 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3959 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
a1ab4c31 3960 /* The POINTER field is the 4th field in the descriptor. */
910ad8de 3961 tree pointer = DECL_CHAIN (klass);
a1ab4c31
AC
3962
3963 /* Retrieve the value of the POINTER field. */
d628c015 3964 tree gnu_expr32
a1ab4c31
AC
3965 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3966
3967 if (POINTER_TYPE_P (gnu_type))
d628c015 3968 return convert (gnu_type, gnu_expr32);
a1ab4c31 3969
315cff15 3970 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
a1ab4c31
AC
3971 {
3972 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 3973 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
a1ab4c31
AC
3974 tree template_type = TREE_TYPE (p_bounds_type);
3975 tree min_field = TYPE_FIELDS (template_type);
7d76717d 3976 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3977 tree template_tree, template_addr, aflags, dimct, t, u;
a1ab4c31 3978 /* See the head comment of build_vms_descriptor. */
c6bd4220 3979 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
9771b263 3980 vec<constructor_elt, va_gc> *v;
a1ab4c31 3981
86060344 3982 /* Convert POINTER to the pointer-to-array type. */
d628c015 3983 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31 3984
c6bd4220 3985 switch (iklass)
a1ab4c31
AC
3986 {
3987 case 1: /* Class S */
3988 case 15: /* Class SB */
3989 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
9771b263 3990 vec_alloc (v, 2);
a1ab4c31
AC
3991 t = TYPE_FIELDS (desc_type);
3992 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
3993 CONSTRUCTOR_APPEND_ELT (v, min_field,
3994 convert (TREE_TYPE (min_field),
3995 integer_one_node));
3996 CONSTRUCTOR_APPEND_ELT (v, max_field,
3997 convert (TREE_TYPE (max_field), t));
3998 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 3999 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
a1ab4c31
AC
4000
4001 /* For class S, we are done. */
c6bd4220 4002 if (iklass == 1)
a1ab4c31
AC
4003 break;
4004
4005 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
4006 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4007 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 4008 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
a1ab4c31
AC
4009 /* If so, there is already a template in the descriptor and
4010 it is located right after the POINTER field. */
7d76717d 4011 t = DECL_CHAIN (pointer);
c6bd4220
EB
4012 template_tree
4013 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
a1ab4c31
AC
4014 /* Otherwise use the {1, LENGTH} template we build above. */
4015 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4016 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 4017 template_tree),
a1ab4c31
AC
4018 template_addr);
4019 break;
4020
4021 case 4: /* Class A */
4022 /* The AFLAGS field is the 7th field in the descriptor. */
910ad8de 4023 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
a1ab4c31
AC
4024 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4025 /* The DIMCT field is the 8th field in the descriptor. */
7d76717d 4026 t = DECL_CHAIN (t);
a1ab4c31
AC
4027 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4028 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4029 or FL_COEFF or FL_BOUNDS not set. */
4030 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
4031 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4032 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4033 dimct,
4034 convert (TREE_TYPE (dimct),
4035 size_one_node)),
1139f2e8 4036 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4037 build2 (BIT_AND_EXPR,
4038 TREE_TYPE (aflags),
4039 aflags, u),
4040 u));
a1ab4c31
AC
4041 /* There is already a template in the descriptor and it is
4042 located at the start of block 3 (12th field). */
910ad8de 4043 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
c6bd4220
EB
4044 template_tree
4045 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
f76d6e6f 4046 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
d628c015
DR
4047 build_call_raise (CE_Length_Check_Failed, Empty,
4048 N_Raise_Constraint_Error),
6bf68a93 4049 template_tree);
c6bd4220
EB
4050 template_addr
4051 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
a1ab4c31
AC
4052 break;
4053
4054 case 10: /* Class NCA */
4055 default:
4056 post_error ("unsupported descriptor type for &", gnat_subprog);
4057 template_addr = integer_zero_node;
4058 break;
4059 }
4060
4061 /* Build the fat pointer in the form of a constructor. */
9771b263 4062 vec_alloc (v, 2);
0e228dd9 4063 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
910ad8de 4064 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9 4065 template_addr);
d628c015 4066
0e228dd9 4067 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
4068 }
4069
4070 else
4071 gcc_unreachable ();
4072}
4073
a981c964
EB
4074/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4075 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
0c700259
EB
4076 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
4077 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
4078 passed. */
d628c015 4079
f3d34576 4080tree
a981c964 4081convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
0c700259 4082 bool by_ref, Entity_Id gnat_subprog)
d628c015
DR
4083{
4084 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4085 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4086 tree mbo = TYPE_FIELDS (desc_type);
4087 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
910ad8de 4088 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
0c700259
EB
4089 tree real_type, is64bit, gnu_expr32, gnu_expr64;
4090
4091 if (by_ref)
4092 real_type = TREE_TYPE (gnu_type);
4093 else
4094 real_type = gnu_type;
d628c015 4095
a981c964
EB
4096 /* If the field name is not MBO, it must be 32-bit and no alternate.
4097 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 4098 if (strcmp (mbostr, "MBO") != 0)
0c700259
EB
4099 {
4100 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
4101 if (by_ref)
4102 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
4103 return ret;
4104 }
d628c015 4105
a981c964 4106 /* Build the test for 64-bit descriptor. */
d628c015
DR
4107 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4108 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964 4109 is64bit
1139f2e8
EB
4110 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4111 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4112 convert (integer_type_node, mbo),
4113 integer_one_node),
1139f2e8 4114 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4115 convert (integer_type_node, mbmo),
4116 integer_minus_one_node));
4117
4118 /* Build the 2 possible end results. */
0c700259
EB
4119 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
4120 if (by_ref)
4121 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
a981c964 4122 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
0c700259
EB
4123 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
4124 if (by_ref)
4125 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
a981c964
EB
4126
4127 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015 4128}
a1ab4c31 4129\f
928dfa4b
EB
4130/* Build a type to be used to represent an aliased object whose nominal type
4131 is an unconstrained array. This consists of a RECORD_TYPE containing a
4132 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4133 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4134 an arbitrary unconstrained object. Use NAME as the name of the record.
4135 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
4136
4137tree
928dfa4b
EB
4138build_unc_object_type (tree template_type, tree object_type, tree name,
4139 bool debug_info_p)
a1ab4c31
AC
4140{
4141 tree type = make_node (RECORD_TYPE);
da01bfee
EB
4142 tree template_field
4143 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4144 NULL_TREE, NULL_TREE, 0, 1);
4145 tree array_field
4146 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4147 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
4148
4149 TYPE_NAME (type) = name;
4150 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 4151 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
4152 finish_record_type (type, template_field, 0, true);
4153
4154 /* Declare it now since it will never be declared otherwise. This is
4155 necessary to ensure that its subtrees are properly marked. */
4156 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
a1ab4c31
AC
4157
4158 return type;
4159}
4160
4161/* Same, taking a thin or fat pointer type instead of a template type. */
4162
4163tree
4164build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 4165 tree name, bool debug_info_p)
a1ab4c31
AC
4166{
4167 tree template_type;
4168
315cff15 4169 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
4170
4171 template_type
315cff15 4172 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 4173 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 4174 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
4175
4176 return
4177 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 4178}
a1ab4c31 4179\f
229077b0
EB
4180/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4181 In the normal case this is just two adjustments, but we have more to
4182 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
4183
4184void
4185update_pointer_to (tree old_type, tree new_type)
4186{
4187 tree ptr = TYPE_POINTER_TO (old_type);
4188 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 4189 tree t;
a1ab4c31
AC
4190
4191 /* If this is the main variant, process all the other variants first. */
4192 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
4193 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4194 update_pointer_to (t, new_type);
a1ab4c31 4195
229077b0 4196 /* If no pointers and no references, we are done. */
a1ab4c31
AC
4197 if (!ptr && !ref)
4198 return;
4199
4200 /* Merge the old type qualifiers in the new type.
4201
4202 Each old variant has qualifiers for specific reasons, and the new
229077b0 4203 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
4204 information grabbed at some point, and merging the two simply unifies
4205 these inputs into the final type description.
4206
4207 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
4208 type designating it; after the designated type's freeze, we get here with
4209 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4210 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
4211 designated type, because that's what it really is.
4212
229077b0
EB
4213 We might also get here for a non-dummy OLD_TYPE variant with different
4214 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 4215 to private record type elaboration (see the comments around the call to
229077b0
EB
4216 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4217 the qualifiers in those cases too, to avoid accidentally discarding the
4218 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4219 new_type
4220 = build_qualified_type (new_type,
4221 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4222
4223 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
4224 if (old_type == new_type)
4225 return;
4226
4227 /* Otherwise, first handle the simple case. */
4228 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4229 {
aeecf17c
EB
4230 tree new_ptr, new_ref;
4231
4232 /* If pointer or reference already points to new type, nothing to do.
4233 This can happen as update_pointer_to can be invoked multiple times
4234 on the same couple of types because of the type variants. */
4235 if ((ptr && TREE_TYPE (ptr) == new_type)
4236 || (ref && TREE_TYPE (ref) == new_type))
4237 return;
4238
4239 /* Chain PTR and its variants at the end. */
4240 new_ptr = TYPE_POINTER_TO (new_type);
4241 if (new_ptr)
4242 {
4243 while (TYPE_NEXT_PTR_TO (new_ptr))
4244 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4245 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4246 }
4247 else
4248 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 4249
aeecf17c 4250 /* Now adjust them. */
a1ab4c31 4251 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 4252 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
4253 {
4254 TREE_TYPE (t) = new_type;
4255 if (TYPE_NULL_BOUNDS (t))
4256 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4257 }
de9528f0 4258
aeecf17c
EB
4259 /* Chain REF and its variants at the end. */
4260 new_ref = TYPE_REFERENCE_TO (new_type);
4261 if (new_ref)
4262 {
4263 while (TYPE_NEXT_REF_TO (new_ref))
4264 new_ref = TYPE_NEXT_REF_TO (new_ref);
4265 TYPE_NEXT_REF_TO (new_ref) = ref;
4266 }
4267 else
4268 TYPE_REFERENCE_TO (new_type) = ref;
4269
4270 /* Now adjust them. */
a1ab4c31 4271 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
4272 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4273 TREE_TYPE (t) = new_type;
de9528f0
EB
4274
4275 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 4276 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4277 }
4278
aeecf17c
EB
4279 /* Now deal with the unconstrained array case. In this case the pointer
4280 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
4281 Turn them into pointers to the correct types using update_pointer_to.
4282 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
4283 else
4284 {
e3edbd56 4285 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
4286
4287 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4288
e3edbd56 4289 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
4290 since update_pointer_to can be invoked multiple times on the same
4291 couple of types because of the type variants. */
4292 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4293 return;
4294
a1ab4c31 4295 update_pointer_to
e3edbd56
EB
4296 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4297 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 4298
a1ab4c31 4299 update_pointer_to
e3edbd56
EB
4300 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4301 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 4302
e3edbd56
EB
4303 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4304 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 4305
e3edbd56 4306 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4307 }
4308}
4309\f
8df2e902
EB
4310/* Convert EXPR, a pointer to a constrained array, into a pointer to an
4311 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
4312
4313static tree
4314convert_to_fat_pointer (tree type, tree expr)
4315{
910ad8de 4316 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 4317 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 4318 tree etype = TREE_TYPE (expr);
6bf68a93 4319 tree template_tree;
9771b263
DN
4320 vec<constructor_elt, va_gc> *v;
4321 vec_alloc (v, 2);
a1ab4c31 4322
50179d58
EB
4323 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4324 array (compare_fat_pointers ensures that this is the full discriminant)
4325 and a valid pointer to the bounds. This latter property is necessary
4326 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 4327 if (integer_zerop (expr))
0e228dd9 4328 {
50179d58
EB
4329 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4330 tree null_bounds, t;
4331
4332 if (TYPE_NULL_BOUNDS (ptr_template_type))
4333 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4334 else
4335 {
4336 /* The template type can still be dummy at this point so we build an
4337 empty constructor. The middle-end will fill it in with zeros. */
9771b263
DN
4338 t = build_constructor (template_type,
4339 NULL);
50179d58
EB
4340 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4341 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4342 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4343 }
4344
0e228dd9 4345 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
4346 fold_convert (p_array_type, null_pointer_node));
4347 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4348 t = build_constructor (type, v);
4349 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4350 TREE_CONSTANT (t) = 0;
4351 TREE_STATIC (t) = 1;
4352
4353 return t;
0e228dd9 4354 }
a1ab4c31 4355
0d7de0e1
EB
4356 /* If EXPR is a thin pointer, make template and data from the record. */
4357 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 4358 {
0d7de0e1 4359 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 4360
7d7a1fe8 4361 expr = gnat_protect_expr (expr);
a1ab4c31
AC
4362 if (TREE_CODE (expr) == ADDR_EXPR)
4363 expr = TREE_OPERAND (expr, 0);
4364 else
2b45154d
EB
4365 {
4366 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4367 the thin pointer value has been shifted so we first need to shift
4368 it back to get the template address. */
4369 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4370 expr
4371 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4372 fold_build1 (NEGATE_EXPR, sizetype,
4373 byte_position
4374 (DECL_CHAIN (field))));
4375 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
4376 }
a1ab4c31 4377
0d7de0e1 4378 template_tree = build_component_ref (expr, NULL_TREE, field, false);
a1ab4c31
AC
4379 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4380 build_component_ref (expr, NULL_TREE,
0d7de0e1 4381 DECL_CHAIN (field), false));
a1ab4c31 4382 }
8df2e902
EB
4383
4384 /* Otherwise, build the constructor for the template. */
a1ab4c31 4385 else
6bf68a93 4386 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
a1ab4c31 4387
8df2e902 4388 /* The final result is a constructor for the fat pointer.
a1ab4c31 4389
8df2e902
EB
4390 If EXPR is an argument of a foreign convention subprogram, the type it
4391 points to is directly the component type. In this case, the expression
a1ab4c31 4392 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4393 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4394 required, for instance because it ensures that possible later folding of
8df2e902 4395 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4396 same type as the initial reference.
4397
8df2e902
EB
4398 Note that the call to "build_template" above is still fine because it
4399 will only refer to the provided TEMPLATE_TYPE in this case. */
0e228dd9
NF
4400 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4401 convert (p_array_type, expr));
910ad8de 4402 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4403 build_unary_op (ADDR_EXPR, NULL_TREE,
4404 template_tree));
4405 return gnat_build_constructor (type, v);
a1ab4c31
AC
4406}
4407\f
a1ab4c31
AC
4408/* Create an expression whose value is that of EXPR,
4409 converted to type TYPE. The TREE_TYPE of the value
4410 is always TYPE. This function implements all reasonable
4411 conversions; callers should filter out those that are
4412 not permitted by the language being compiled. */
4413
4414tree
4415convert (tree type, tree expr)
4416{
a1ab4c31
AC
4417 tree etype = TREE_TYPE (expr);
4418 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4419 enum tree_code code = TREE_CODE (type);
a1ab4c31 4420
c34f3839
EB
4421 /* If the expression is already of the right type, we are done. */
4422 if (etype == type)
a1ab4c31
AC
4423 return expr;
4424
4425 /* If both input and output have padding and are of variable size, do this
4426 as an unchecked conversion. Likewise if one is a mere variant of the
4427 other, so we avoid a pointless unpad/repad sequence. */
4428 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4429 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4430 && (!TREE_CONSTANT (TYPE_SIZE (type))
4431 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4432 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4433 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4434 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4435 ;
4436
f88facfe
EB
4437 /* If the output type has padding, convert to the inner type and make a
4438 constructor to build the record, unless a variable size is involved. */
315cff15 4439 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4440 {
9771b263 4441 vec<constructor_elt, va_gc> *v;
0e228dd9 4442
a1ab4c31
AC
4443 /* If we previously converted from another type and our type is
4444 of variable size, remove the conversion to avoid the need for
f88facfe 4445 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4446 original and packable version. */
4447 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4448 && (!TREE_CONSTANT (TYPE_SIZE (type))
4449 || (ecode == RECORD_TYPE
4450 && TYPE_NAME (etype)
4451 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4452 expr = TREE_OPERAND (expr, 0);
4453
4454 /* If we are just removing the padding from expr, convert the original
4455 object if we have variable size in order to avoid the need for some
f88facfe 4456 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4457 of the other, so we avoid a pointless unpad/repad sequence. */
4458 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4459 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4460 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4461 || TYPE_MAIN_VARIANT (type)
4462 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4463 || (ecode == RECORD_TYPE
4464 && TYPE_NAME (etype)
4465 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4466 return convert (type, TREE_OPERAND (expr, 0));
4467
431cfac1
EB
4468 /* If the inner type is of self-referential size and the expression type
4469 is a record, do this as an unchecked conversion. But first pad the
4470 expression if possible to have the same size on both sides. */
c34f3839 4471 if (ecode == RECORD_TYPE
f88facfe 4472 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4473 {
980a0501 4474 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4475 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4476 false, false, false, true),
4477 expr);
431cfac1
EB
4478 return unchecked_convert (type, expr, false);
4479 }
a1ab4c31 4480
f88facfe
EB
4481 /* If we are converting between array types with variable size, do the
4482 final conversion as an unchecked conversion, again to avoid the need
4483 for some variable-sized temporaries. If valid, this conversion is
4484 very likely purely technical and without real effects. */
c34f3839 4485 if (ecode == ARRAY_TYPE
f88facfe
EB
4486 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4487 && !TREE_CONSTANT (TYPE_SIZE (etype))
4488 && !TREE_CONSTANT (TYPE_SIZE (type)))
4489 return unchecked_convert (type,
4490 convert (TREE_TYPE (TYPE_FIELDS (type)),
4491 expr),
4492 false);
4493
9771b263 4494 vec_alloc (v, 1);
0e228dd9
NF
4495 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4496 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4497 return gnat_build_constructor (type, v);
a1ab4c31
AC
4498 }
4499
4500 /* If the input type has padding, remove it and convert to the output type.
4501 The conditions ordering is arranged to ensure that the output type is not
4502 a padding type here, as it is not clear whether the conversion would
4503 always be correct if this was to happen. */
315cff15 4504 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4505 {
4506 tree unpadded;
4507
4508 /* If we have just converted to this padded type, just get the
4509 inner expression. */
4510 if (TREE_CODE (expr) == CONSTRUCTOR
9771b263
DN
4511 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4512 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4513 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
a1ab4c31
AC
4514
4515 /* Otherwise, build an explicit component reference. */
4516 else
4517 unpadded
4518 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4519
4520 return convert (type, unpadded);
4521 }
4522
4523 /* If the input is a biased type, adjust first. */
4524 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4525 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4526 fold_convert (TREE_TYPE (etype),
4527 expr),
4528 TYPE_MIN_VALUE (etype)));
4529
4530 /* If the input is a justified modular type, we need to extract the actual
4531 object before converting it to any other type with the exceptions of an
4532 unconstrained array or of a mere type variant. It is useful to avoid the
4533 extraction and conversion in the type variant case because it could end
4534 up replacing a VAR_DECL expr by a constructor and we might be about the
4535 take the address of the result. */
4536 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4537 && code != UNCONSTRAINED_ARRAY_TYPE
4538 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4539 return convert (type, build_component_ref (expr, NULL_TREE,
4540 TYPE_FIELDS (etype), false));
4541
4542 /* If converting to a type that contains a template, convert to the data
4543 type and then build the template. */
4544 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4545 {
910ad8de 4546 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4547 vec<constructor_elt, va_gc> *v;
4548 vec_alloc (v, 2);
a1ab4c31
AC
4549
4550 /* If the source already has a template, get a reference to the
4551 associated array only, as we are going to rebuild a template
4552 for the target type anyway. */
4553 expr = maybe_unconstrained_array (expr);
4554
0e228dd9
NF
4555 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4556 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4557 obj_type, NULL_TREE));
910ad8de 4558 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4559 convert (obj_type, expr));
4560 return gnat_build_constructor (type, v);
a1ab4c31
AC
4561 }
4562
a1c7d797 4563 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4564 switch (TREE_CODE (expr))
4565 {
4566 case ERROR_MARK:
4567 return expr;
4568
4569 case NULL_EXPR:
4570 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4571 conversion in gnat_expand_expr. NULL_EXPR does not represent
4572 and actual value, so no conversion is needed. */
4573 expr = copy_node (expr);
4574 TREE_TYPE (expr) = type;
4575 return expr;
4576
4577 case STRING_CST:
4578 /* If we are converting a STRING_CST to another constrained array type,
4579 just make a new one in the proper type. */
4580 if (code == ecode && AGGREGATE_TYPE_P (etype)
4581 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4582 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4583 {
4584 expr = copy_node (expr);
4585 TREE_TYPE (expr) = type;
4586 return expr;
4587 }
4588 break;
4589
7948ae37
OH
4590 case VECTOR_CST:
4591 /* If we are converting a VECTOR_CST to a mere variant type, just make
4592 a new one in the proper type. */
4593 if (code == ecode && gnat_types_compatible_p (type, etype))
4594 {
4595 expr = copy_node (expr);
4596 TREE_TYPE (expr) = type;
4597 return expr;
4598 }
4599
a1ab4c31
AC
4600 case CONSTRUCTOR:
4601 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4602 a new one in the proper type. */
4603 if (code == ecode && gnat_types_compatible_p (type, etype))
4604 {
4605 expr = copy_node (expr);
4606 TREE_TYPE (expr) = type;
9771b263 4607 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4608 return expr;
4609 }
4610
cb3d597d
EB
4611 /* Likewise for a conversion between original and packable version, or
4612 conversion between types of the same size and with the same list of
4613 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4614 if (code == ecode
4615 && code == RECORD_TYPE
cb3d597d
EB
4616 && (TYPE_NAME (type) == TYPE_NAME (etype)
4617 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4618
a1ab4c31 4619 {
9771b263
DN
4620 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4621 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4622 vec<constructor_elt, va_gc> *v;
4623 vec_alloc (v, len);
a1ab4c31
AC
4624 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4625 unsigned HOST_WIDE_INT idx;
4626 tree index, value;
4627
db868e1e
OH
4628 /* Whether we need to clear TREE_CONSTANT et al. on the output
4629 constructor when we convert in place. */
4630 bool clear_constant = false;
4631
a1ab4c31
AC
4632 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4633 {
cb3d597d
EB
4634 /* We expect only simple constructors. */
4635 if (!SAME_FIELD_P (index, efield))
4636 break;
4637 /* The field must be the same. */
4638 if (!SAME_FIELD_P (efield, field))
a1ab4c31 4639 break;
f32682ca 4640 constructor_elt elt = {field, convert (TREE_TYPE (field), value)};
9771b263 4641 v->quick_push (elt);
db868e1e
OH
4642
4643 /* If packing has made this field a bitfield and the input
4644 value couldn't be emitted statically any more, we need to
4645 clear TREE_CONSTANT on our output. */
ced57283
EB
4646 if (!clear_constant
4647 && TREE_CONSTANT (expr)
db868e1e
OH
4648 && !CONSTRUCTOR_BITFIELD_P (efield)
4649 && CONSTRUCTOR_BITFIELD_P (field)
4650 && !initializer_constant_valid_for_bitfield_p (value))
4651 clear_constant = true;
4652
910ad8de
NF
4653 efield = DECL_CHAIN (efield);
4654 field = DECL_CHAIN (field);
a1ab4c31
AC
4655 }
4656
db868e1e
OH
4657 /* If we have been able to match and convert all the input fields
4658 to their output type, convert in place now. We'll fallback to a
4659 view conversion downstream otherwise. */
a1ab4c31
AC
4660 if (idx == len)
4661 {
4662 expr = copy_node (expr);
4663 TREE_TYPE (expr) = type;
4664 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4665 if (clear_constant)
ced57283 4666 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4667 return expr;
4668 }
4669 }
7948ae37
OH
4670
4671 /* Likewise for a conversion between array type and vector type with a
4672 compatible representative array. */
4673 else if (code == VECTOR_TYPE
4674 && ecode == ARRAY_TYPE
4675 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4676 etype))
4677 {
9771b263
DN
4678 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4679 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4680 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4681 unsigned HOST_WIDE_INT ix;
4682 tree value;
4683
4684 /* Build a VECTOR_CST from a *constant* array constructor. */
4685 if (TREE_CONSTANT (expr))
4686 {
4687 bool constant_p = true;
4688
4689 /* Iterate through elements and check if all constructor
4690 elements are *_CSTs. */
4691 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4692 if (!CONSTANT_CLASS_P (value))
4693 {
4694 constant_p = false;
4695 break;
4696 }
4697
4698 if (constant_p)
4699 return build_vector_from_ctor (type,
4700 CONSTRUCTOR_ELTS (expr));
4701 }
4702
4703 /* Otherwise, build a regular vector constructor. */
9771b263 4704 vec_alloc (v, len);
7948ae37
OH
4705 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4706 {
f32682ca 4707 constructor_elt elt = {NULL_TREE, value};
9771b263 4708 v->quick_push (elt);
7948ae37
OH
4709 }
4710 expr = copy_node (expr);
4711 TREE_TYPE (expr) = type;
4712 CONSTRUCTOR_ELTS (expr) = v;
4713 return expr;
4714 }
a1ab4c31
AC
4715 break;
4716
4717 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4718 /* First retrieve the underlying array. */
4719 expr = maybe_unconstrained_array (expr);
4720 etype = TREE_TYPE (expr);
4721 ecode = TREE_CODE (etype);
4722 break;
a1ab4c31
AC
4723
4724 case VIEW_CONVERT_EXPR:
4725 {
4726 /* GCC 4.x is very sensitive to type consistency overall, and view
4727 conversions thus are very frequent. Even though just "convert"ing
4728 the inner operand to the output type is fine in most cases, it
4729 might expose unexpected input/output type mismatches in special
4730 circumstances so we avoid such recursive calls when we can. */
4731 tree op0 = TREE_OPERAND (expr, 0);
4732
4733 /* If we are converting back to the original type, we can just
4734 lift the input conversion. This is a common occurrence with
4735 switches back-and-forth amongst type variants. */
4736 if (type == TREE_TYPE (op0))
4737 return op0;
4738
7948ae37
OH
4739 /* Otherwise, if we're converting between two aggregate or vector
4740 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4741 target type in place or to just convert the inner expression. */
4742 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4743 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4744 {
4745 /* If we are converting between mere variants, we can just
4746 substitute the VIEW_CONVERT_EXPR in place. */
4747 if (gnat_types_compatible_p (type, etype))
4748 return build1 (VIEW_CONVERT_EXPR, type, op0);
4749
4750 /* Otherwise, we may just bypass the input view conversion unless
4751 one of the types is a fat pointer, which is handled by
4752 specialized code below which relies on exact type matching. */
315cff15
EB
4753 else if (!TYPE_IS_FAT_POINTER_P (type)
4754 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4755 return convert (type, op0);
4756 }
ad1d36ba
EB
4757
4758 break;
a1ab4c31 4759 }
a1ab4c31 4760
a1ab4c31
AC
4761 default:
4762 break;
4763 }
4764
4765 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4766 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4767 return convert_to_fat_pointer (type, expr);
4768
7948ae37
OH
4769 /* If we are converting between two aggregate or vector types that are mere
4770 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4771 to a vector type from its representative array type. */
4772 else if ((code == ecode
4773 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4774 && gnat_types_compatible_p (type, etype))
4775 || (code == VECTOR_TYPE
4776 && ecode == ARRAY_TYPE
4777 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4778 etype)))
a1ab4c31
AC
4779 return build1 (VIEW_CONVERT_EXPR, type, expr);
4780
76af763d
EB
4781 /* If we are converting between tagged types, try to upcast properly. */
4782 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4783 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4784 {
4785 tree child_etype = etype;
4786 do {
4787 tree field = TYPE_FIELDS (child_etype);
4788 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4789 return build_component_ref (expr, NULL_TREE, field, false);
4790 child_etype = TREE_TYPE (field);
4791 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4792 }
4793
bb1f7929
EB
4794 /* If we are converting from a smaller form of record type back to it, just
4795 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4796 size on both sides. */
4797 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4798 && smaller_form_type_p (etype, type))
4799 {
4800 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4801 false, false, false, true),
4802 expr);
4803 return build1 (VIEW_CONVERT_EXPR, type, expr);
4804 }
4805
a1ab4c31 4806 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4807 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4808 return fold_convert (type, expr);
4809
4810 switch (code)
4811 {
4812 case VOID_TYPE:
4813 return fold_build1 (CONVERT_EXPR, type, expr);
4814
a1ab4c31
AC
4815 case INTEGER_TYPE:
4816 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4817 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4818 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4819 return unchecked_convert (type, expr, false);
4820 else if (TYPE_BIASED_REPRESENTATION_P (type))
4821 return fold_convert (type,
4822 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4823 convert (TREE_TYPE (type), expr),
4824 TYPE_MIN_VALUE (type)));
4825
4826 /* ... fall through ... */
4827
4828 case ENUMERAL_TYPE:
01ddebf2 4829 case BOOLEAN_TYPE:
a1ab4c31
AC
4830 /* If we are converting an additive expression to an integer type
4831 with lower precision, be wary of the optimization that can be
4832 applied by convert_to_integer. There are 2 problematic cases:
4833 - if the first operand was originally of a biased type,
4834 because we could be recursively called to convert it
4835 to an intermediate type and thus rematerialize the
4836 additive operator endlessly,
4837 - if the expression contains a placeholder, because an
4838 intermediate conversion that changes the sign could
4839 be inserted and thus introduce an artificial overflow
4840 at compile time when the placeholder is substituted. */
4841 if (code == INTEGER_TYPE
4842 && ecode == INTEGER_TYPE
4843 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4844 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4845 {
4846 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4847
4848 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4849 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4850 || CONTAINS_PLACEHOLDER_P (expr))
4851 return build1 (NOP_EXPR, type, expr);
4852 }
4853
4854 return fold (convert_to_integer (type, expr));
4855
4856 case POINTER_TYPE:
4857 case REFERENCE_TYPE:
0d7de0e1 4858 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4859 for differing offsets from the base pointer, depending on whether
4860 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4861 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4862 {
2b45154d
EB
4863 tree etype_pos
4864 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4865 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4866 : size_zero_node;
4867 tree type_pos
4868 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4869 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4870 : size_zero_node;
4871 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4872
a1ab4c31 4873 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4874 if (integer_zerop (byte_diff))
4875 return expr;
4876
4877 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4878 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4879 }
4880
0d7de0e1
EB
4881 /* If converting fat pointer to normal or thin pointer, get the pointer
4882 to the array and then convert it. */
4883 if (TYPE_IS_FAT_POINTER_P (etype))
86060344
EB
4884 expr
4885 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4886
4887 return fold (convert_to_pointer (type, expr));
4888
4889 case REAL_TYPE:
4890 return fold (convert_to_real (type, expr));
4891
4892 case RECORD_TYPE:
4893 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4894 {
9771b263
DN
4895 vec<constructor_elt, va_gc> *v;
4896 vec_alloc (v, 1);
0e228dd9
NF
4897
4898 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4899 convert (TREE_TYPE (TYPE_FIELDS (type)),
4900 expr));
4901 return gnat_build_constructor (type, v);
4902 }
a1ab4c31
AC
4903
4904 /* ... fall through ... */
4905
4906 case ARRAY_TYPE:
4907 /* In these cases, assume the front-end has validated the conversion.
4908 If the conversion is valid, it will be a bit-wise conversion, so
4909 it can be viewed as an unchecked conversion. */
4910 return unchecked_convert (type, expr, false);
4911
4912 case UNION_TYPE:
4913 /* This is a either a conversion between a tagged type and some
4914 subtype, which we have to mark as a UNION_TYPE because of
4915 overlapping fields or a conversion of an Unchecked_Union. */
4916 return unchecked_convert (type, expr, false);
4917
4918 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4919 /* If the input is a VECTOR_TYPE, convert to the representative
4920 array type first. */
4921 if (ecode == VECTOR_TYPE)
4922 {
4923 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4924 etype = TREE_TYPE (expr);
4925 ecode = TREE_CODE (etype);
4926 }
4927
a1ab4c31
AC
4928 /* If EXPR is a constrained array, take its address, convert it to a
4929 fat pointer, and then dereference it. Likewise if EXPR is a
4930 record containing both a template and a constrained array.
4931 Note that a record representing a justified modular type
4932 always represents a packed constrained array. */
4933 if (ecode == ARRAY_TYPE
4934 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4935 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4936 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4937 return
4938 build_unary_op
4939 (INDIRECT_REF, NULL_TREE,
4940 convert_to_fat_pointer (TREE_TYPE (type),
4941 build_unary_op (ADDR_EXPR,
4942 NULL_TREE, expr)));
4943
4944 /* Do something very similar for converting one unconstrained
4945 array to another. */
4946 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4947 return
4948 build_unary_op (INDIRECT_REF, NULL_TREE,
4949 convert (TREE_TYPE (type),
4950 build_unary_op (ADDR_EXPR,
4951 NULL_TREE, expr)));
4952 else
4953 gcc_unreachable ();
4954
4955 case COMPLEX_TYPE:
4956 return fold (convert_to_complex (type, expr));
4957
4958 default:
4959 gcc_unreachable ();
4960 }
4961}
15bf7d19
EB
4962
4963/* Create an expression whose value is that of EXPR converted to the common
4964 index type, which is sizetype. EXPR is supposed to be in the base type
4965 of the GNAT index type. Calling it is equivalent to doing
4966
4967 convert (sizetype, expr)
4968
4969 but we try to distribute the type conversion with the knowledge that EXPR
4970 cannot overflow in its type. This is a best-effort approach and we fall
4971 back to the above expression as soon as difficulties are encountered.
4972
4973 This is necessary to overcome issues that arise when the GNAT base index
4974 type and the GCC common index type (sizetype) don't have the same size,
4975 which is quite frequent on 64-bit architectures. In this case, and if
4976 the GNAT base index type is signed but the iteration type of the loop has
4977 been forced to unsigned, the loop scalar evolution engine cannot compute
4978 a simple evolution for the general induction variables associated with the
4979 array indices, because it will preserve the wrap-around semantics in the
4980 unsigned type of their "inner" part. As a result, many loop optimizations
4981 are blocked.
4982
4983 The solution is to use a special (basic) induction variable that is at
4984 least as large as sizetype, and to express the aforementioned general
4985 induction variables in terms of this induction variable, eliminating
4986 the problematic intermediate truncation to the GNAT base index type.
4987 This is possible as long as the original expression doesn't overflow
4988 and if the middle-end hasn't introduced artificial overflows in the
4989 course of the various simplification it can make to the expression. */
4990
4991tree
4992convert_to_index_type (tree expr)
4993{
4994 enum tree_code code = TREE_CODE (expr);
4995 tree type = TREE_TYPE (expr);
4996
4997 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4998 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4999 if (TYPE_UNSIGNED (type) || !optimize)
5000 return convert (sizetype, expr);
5001
5002 switch (code)
5003 {
5004 case VAR_DECL:
5005 /* The main effect of the function: replace a loop parameter with its
5006 associated special induction variable. */
5007 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5008 expr = DECL_INDUCTION_VAR (expr);
5009 break;
5010
5011 CASE_CONVERT:
5012 {
5013 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5014 /* Bail out as soon as we suspect some sort of type frobbing. */
5015 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5016 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5017 break;
5018 }
5019
5020 /* ... fall through ... */
5021
5022 case NON_LVALUE_EXPR:
5023 return fold_build1 (code, sizetype,
5024 convert_to_index_type (TREE_OPERAND (expr, 0)));
5025
5026 case PLUS_EXPR:
5027 case MINUS_EXPR:
5028 case MULT_EXPR:
5029 return fold_build2 (code, sizetype,
5030 convert_to_index_type (TREE_OPERAND (expr, 0)),
5031 convert_to_index_type (TREE_OPERAND (expr, 1)));
5032
5033 case COMPOUND_EXPR:
5034 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5035 convert_to_index_type (TREE_OPERAND (expr, 1)));
5036
5037 case COND_EXPR:
5038 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5039 convert_to_index_type (TREE_OPERAND (expr, 1)),
5040 convert_to_index_type (TREE_OPERAND (expr, 2)));
5041
5042 default:
5043 break;
5044 }
5045
5046 return convert (sizetype, expr);
5047}
a1ab4c31
AC
5048\f
5049/* Remove all conversions that are done in EXP. This includes converting
5050 from a padded type or to a justified modular type. If TRUE_ADDRESS
5051 is true, always return the address of the containing object even if
5052 the address is not bit-aligned. */
5053
5054tree
5055remove_conversions (tree exp, bool true_address)
5056{
5057 switch (TREE_CODE (exp))
5058 {
5059 case CONSTRUCTOR:
5060 if (true_address
5061 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5062 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5063 return
9771b263 5064 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
a1ab4c31
AC
5065 break;
5066
5067 case COMPONENT_REF:
315cff15 5068 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
5069 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5070 break;
5071
a1ab4c31 5072 CASE_CONVERT:
722356ce
EB
5073 case VIEW_CONVERT_EXPR:
5074 case NON_LVALUE_EXPR:
a1ab4c31
AC
5075 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5076
5077 default:
5078 break;
5079 }
5080
5081 return exp;
5082}
5083\f
5084/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 5085 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
5086 likewise return an expression pointing to the underlying array. */
5087
5088tree
5089maybe_unconstrained_array (tree exp)
5090{
5091 enum tree_code code = TREE_CODE (exp);
1aa291f7 5092 tree type = TREE_TYPE (exp);
a1ab4c31 5093
1aa291f7 5094 switch (TREE_CODE (type))
a1ab4c31
AC
5095 {
5096 case UNCONSTRAINED_ARRAY_TYPE:
5097 if (code == UNCONSTRAINED_ARRAY_REF)
5098 {
7e169899 5099 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
5100 const bool no_trap = TREE_THIS_NOTRAP (exp);
5101
7e169899 5102 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
5103 type = TREE_TYPE (exp);
5104
7e169899
EB
5105 if (TREE_CODE (exp) == COND_EXPR)
5106 {
5107 tree op1
5108 = build_unary_op (INDIRECT_REF, NULL_TREE,
5109 build_component_ref (TREE_OPERAND (exp, 1),
5110 NULL_TREE,
1aa291f7 5111 TYPE_FIELDS (type),
7e169899
EB
5112 false));
5113 tree op2
5114 = build_unary_op (INDIRECT_REF, NULL_TREE,
5115 build_component_ref (TREE_OPERAND (exp, 2),
5116 NULL_TREE,
1aa291f7 5117 TYPE_FIELDS (type),
7e169899
EB
5118 false));
5119
5120 exp = build3 (COND_EXPR,
1aa291f7 5121 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
5122 TREE_OPERAND (exp, 0), op1, op2);
5123 }
5124 else
a1c7d797
EB
5125 {
5126 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5127 build_component_ref (exp, NULL_TREE,
1aa291f7 5128 TYPE_FIELDS (type),
a1c7d797
EB
5129 false));
5130 TREE_READONLY (exp) = read_only;
5131 TREE_THIS_NOTRAP (exp) = no_trap;
5132 }
a1ab4c31
AC
5133 }
5134
5135 else if (code == NULL_EXPR)
1aa291f7
EB
5136 exp = build1 (NULL_EXPR,
5137 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5138 TREE_OPERAND (exp, 0));
5139 break;
a1ab4c31
AC
5140
5141 case RECORD_TYPE:
1aa291f7
EB
5142 /* If this is a padded type and it contains a template, convert to the
5143 unpadded type first. */
5144 if (TYPE_PADDING_P (type)
5145 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5146 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 5147 {
1aa291f7
EB
5148 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5149 type = TREE_TYPE (exp);
5150 }
5151
5152 if (TYPE_CONTAINS_TEMPLATE_P (type))
5153 {
5154 exp = build_component_ref (exp, NULL_TREE,
5155 DECL_CHAIN (TYPE_FIELDS (type)),
5156 false);
5157 type = TREE_TYPE (exp);
5158
5159 /* If the array type is padded, convert to the unpadded type. */
5160 if (TYPE_IS_PADDING_P (type))
5161 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 5162 }
a1ab4c31
AC
5163 break;
5164
5165 default:
5166 break;
5167 }
5168
5169 return exp;
5170}
5171\f
afcea859 5172/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 5173 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
5174
5175static bool
5176can_fold_for_view_convert_p (tree expr)
5177{
5178 tree t1, t2;
5179
5180 /* The folder will fold NOP_EXPRs between integral types with the same
5181 precision (in the middle-end's sense). We cannot allow it if the
5182 types don't have the same precision in the Ada sense as well. */
5183 if (TREE_CODE (expr) != NOP_EXPR)
5184 return true;
5185
5186 t1 = TREE_TYPE (expr);
5187 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5188
5189 /* Defer to the folder for non-integral conversions. */
5190 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5191 return true;
5192
5193 /* Only fold conversions that preserve both precisions. */
5194 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5195 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5196 return true;
5197
5198 return false;
5199}
5200
a1ab4c31 5201/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
5202 If NOTRUNC_P is true, truncation operations should be suppressed.
5203
5204 Special care is required with (source or target) integral types whose
5205 precision is not equal to their size, to make sure we fetch or assign
5206 the value bits whose location might depend on the endianness, e.g.
5207
5208 Rmsize : constant := 8;
5209 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5210
5211 type Bit_Array is array (1 .. Rmsize) of Boolean;
5212 pragma Pack (Bit_Array);
5213
5214 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5215
5216 Value : Int := 2#1000_0001#;
5217 Vbits : Bit_Array := To_Bit_Array (Value);
5218
5219 we expect the 8 bits at Vbits'Address to always contain Value, while
5220 their original location depends on the endianness, at Value'Address
84fb43a1 5221 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
5222
5223tree
5224unchecked_convert (tree type, tree expr, bool notrunc_p)
5225{
5226 tree etype = TREE_TYPE (expr);
c34f3839
EB
5227 enum tree_code ecode = TREE_CODE (etype);
5228 enum tree_code code = TREE_CODE (type);
980a0501 5229 int c;
a1ab4c31 5230
c34f3839 5231 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
5232 if (etype == type)
5233 return expr;
5234
5235 /* If both types types are integral just do a normal conversion.
5236 Likewise for a conversion to an unconstrained array. */
5237 if ((((INTEGRAL_TYPE_P (type)
c34f3839 5238 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
0d7de0e1 5239 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 5240 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
a1ab4c31 5241 && ((INTEGRAL_TYPE_P (etype)
c34f3839 5242 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
315cff15 5243 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
5244 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5245 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 5246 {
c34f3839 5247 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
5248 {
5249 tree ntype = copy_type (etype);
a1ab4c31
AC
5250 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5251 TYPE_MAIN_VARIANT (ntype) = ntype;
5252 expr = build1 (NOP_EXPR, ntype, expr);
5253 }
5254
c34f3839 5255 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 5256 {
afcea859 5257 tree rtype = copy_type (type);
a1ab4c31
AC
5258 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5259 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
5260 expr = convert (rtype, expr);
5261 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 5262 }
afcea859
EB
5263 else
5264 expr = convert (type, expr);
a1ab4c31
AC
5265 }
5266
afcea859 5267 /* If we are converting to an integral type whose precision is not equal
416de7d5
EB
5268 to its size, first unchecked convert to a record type that contains an
5269 field of the given precision. Then extract the field. */
980a0501
EB
5270 else if (INTEGRAL_TYPE_P (type)
5271 && TYPE_RM_SIZE (type)
a1ab4c31
AC
5272 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5273 GET_MODE_BITSIZE (TYPE_MODE (type))))
5274 {
5275 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
5276 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5277 tree field_type, field;
5278
5279 if (TYPE_UNSIGNED (type))
5280 field_type = make_unsigned_type (prec);
5281 else
5282 field_type = make_signed_type (prec);
5283 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5284
5285 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5286 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5287
5288 TYPE_FIELDS (rec_type) = field;
5289 layout_type (rec_type);
5290
5291 expr = unchecked_convert (rec_type, expr, notrunc_p);
3cd64bab 5292 expr = build_component_ref (expr, NULL_TREE, field, false);
416de7d5 5293 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
5294 }
5295
416de7d5
EB
5296 /* Similarly if we are converting from an integral type whose precision is
5297 not equal to its size, first copy into a field of the given precision
5298 and unchecked convert the record type. */
980a0501
EB
5299 else if (INTEGRAL_TYPE_P (etype)
5300 && TYPE_RM_SIZE (etype)
5301 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5302 GET_MODE_BITSIZE (TYPE_MODE (etype))))
a1ab4c31
AC
5303 {
5304 tree rec_type = make_node (RECORD_TYPE);
416de7d5 5305 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
5306 vec<constructor_elt, va_gc> *v;
5307 vec_alloc (v, 1);
416de7d5
EB
5308 tree field_type, field;
5309
5310 if (TYPE_UNSIGNED (etype))
5311 field_type = make_unsigned_type (prec);
5312 else
5313 field_type = make_signed_type (prec);
5314 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5315
5316 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5317 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5318
5319 TYPE_FIELDS (rec_type) = field;
5320 layout_type (rec_type);
5321
416de7d5 5322 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
5323 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5324 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
5325 expr = unchecked_convert (type, expr, notrunc_p);
5326 }
5327
980a0501
EB
5328 /* If we are converting from a scalar type to a type with a different size,
5329 we need to pad to have the same size on both sides.
5330
5331 ??? We cannot do it unconditionally because unchecked conversions are
5332 used liberally by the front-end to implement polymorphism, e.g. in:
5333
5334 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5335 return p___size__4 (p__object!(S191s.all));
5336
5337 so we skip all expressions that are references. */
5338 else if (!REFERENCE_CLASS_P (expr)
5339 && !AGGREGATE_TYPE_P (etype)
5340 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5341 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5342 {
5343 if (c < 0)
5344 {
5345 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5346 false, false, false, true),
5347 expr);
5348 expr = unchecked_convert (type, expr, notrunc_p);
5349 }
5350 else
5351 {
5352 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5353 false, false, false, true);
5354 expr = unchecked_convert (rec_type, expr, notrunc_p);
5355 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5356 false);
5357 }
5358 }
5359
7948ae37
OH
5360 /* We have a special case when we are converting between two unconstrained
5361 array types. In that case, take the address, convert the fat pointer
5362 types, and dereference. */
c34f3839 5363 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5364 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5365 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5366 build_unary_op (ADDR_EXPR, NULL_TREE,
5367 expr)));
7948ae37
OH
5368
5369 /* Another special case is when we are converting to a vector type from its
5370 representative array type; this a regular conversion. */
c34f3839
EB
5371 else if (code == VECTOR_TYPE
5372 && ecode == ARRAY_TYPE
7948ae37
OH
5373 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5374 etype))
5375 expr = convert (type, expr);
5376
a1ab4c31
AC
5377 else
5378 {
5379 expr = maybe_unconstrained_array (expr);
5380 etype = TREE_TYPE (expr);
c34f3839 5381 ecode = TREE_CODE (etype);
afcea859
EB
5382 if (can_fold_for_view_convert_p (expr))
5383 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5384 else
5385 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5386 }
5387
afcea859
EB
5388 /* If the result is an integral type whose precision is not equal to its
5389 size, sign- or zero-extend the result. We need not do this if the input
5390 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
5391 is a biased type or if both the input and output are unsigned. */
5392 if (!notrunc_p
5393 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 5394 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
5395 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5396 GET_MODE_BITSIZE (TYPE_MODE (type)))
5397 && !(INTEGRAL_TYPE_P (etype)
5398 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5399 && operand_equal_p (TYPE_RM_SIZE (type),
5400 (TYPE_RM_SIZE (etype) != 0
5401 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5402 0))
5403 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5404 {
c34f3839
EB
5405 tree base_type
5406 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
5407 tree shift_expr
5408 = convert (base_type,
5409 size_binop (MINUS_EXPR,
5410 bitsize_int
5411 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5412 TYPE_RM_SIZE (type)));
5413 expr
5414 = convert (type,
5415 build_binary_op (RSHIFT_EXPR, base_type,
5416 build_binary_op (LSHIFT_EXPR, base_type,
5417 convert (base_type, expr),
5418 shift_expr),
5419 shift_expr));
5420 }
5421
5422 /* An unchecked conversion should never raise Constraint_Error. The code
5423 below assumes that GCC's conversion routines overflow the same way that
5424 the underlying hardware does. This is probably true. In the rare case
5425 when it is false, we can rely on the fact that such conversions are
5426 erroneous anyway. */
5427 if (TREE_CODE (expr) == INTEGER_CST)
5428 TREE_OVERFLOW (expr) = 0;
5429
5430 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5431 show no longer constant. */
5432 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5433 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5434 OEP_ONLY_CONST))
5435 TREE_CONSTANT (expr) = 0;
5436
5437 return expr;
5438}
5439\f
feec4372 5440/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5441 the latter being a record type as predicated by Is_Record_Type. */
5442
5443enum tree_code
5444tree_code_for_record_type (Entity_Id gnat_type)
5445{
b1a785fb 5446 Node_Id component_list, component;
a1ab4c31 5447
b1a785fb
EB
5448 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5449 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5450 if (!Is_Unchecked_Union (gnat_type))
5451 return RECORD_TYPE;
5452
b1a785fb
EB
5453 gnat_type = Implementation_Base_Type (gnat_type);
5454 component_list
5455 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5456
a1ab4c31
AC
5457 for (component = First_Non_Pragma (Component_Items (component_list));
5458 Present (component);
5459 component = Next_Non_Pragma (component))
5460 if (Ekind (Defining_Entity (component)) == E_Component)
5461 return RECORD_TYPE;
5462
5463 return UNION_TYPE;
5464}
5465
caa9d12a
EB
5466/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5467 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5468 according to the presence of an alignment clause on the type or, if it
5469 is an array, on the component type. */
5470
5471bool
5472is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5473{
5474 gnat_type = Underlying_Type (gnat_type);
5475
5476 *align_clause = Present (Alignment_Clause (gnat_type));
5477
5478 if (Is_Array_Type (gnat_type))
5479 {
5480 gnat_type = Underlying_Type (Component_Type (gnat_type));
5481 if (Present (Alignment_Clause (gnat_type)))
5482 *align_clause = true;
5483 }
5484
5485 if (!Is_Floating_Point_Type (gnat_type))
5486 return false;
5487
5488 if (UI_To_Int (Esize (gnat_type)) != 64)
5489 return false;
5490
5491 return true;
5492}
5493
5494/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5495 size is greater or equal to 64 bits, or an array of such a type. Set
5496 ALIGN_CLAUSE according to the presence of an alignment clause on the
5497 type or, if it is an array, on the component type. */
5498
5499bool
5500is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5501{
5502 gnat_type = Underlying_Type (gnat_type);
5503
5504 *align_clause = Present (Alignment_Clause (gnat_type));
5505
5506 if (Is_Array_Type (gnat_type))
5507 {
5508 gnat_type = Underlying_Type (Component_Type (gnat_type));
5509 if (Present (Alignment_Clause (gnat_type)))
5510 *align_clause = true;
5511 }
5512
5513 if (!Is_Scalar_Type (gnat_type))
5514 return false;
5515
5516 if (UI_To_Int (Esize (gnat_type)) < 64)
5517 return false;
5518
5519 return true;
5520}
5521
a1ab4c31
AC
5522/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5523 component of an aggregate type. */
5524
5525bool
5526type_for_nonaliased_component_p (tree gnu_type)
5527{
5528 /* If the type is passed by reference, we may have pointers to the
5529 component so it cannot be made non-aliased. */
5530 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5531 return false;
5532
5533 /* We used to say that any component of aggregate type is aliased
5534 because the front-end may take 'Reference of it. The front-end
5535 has been enhanced in the meantime so as to use a renaming instead
5536 in most cases, but the back-end can probably take the address of
5537 such a component too so we go for the conservative stance.
5538
5539 For instance, we might need the address of any array type, even
5540 if normally passed by copy, to construct a fat pointer if the
5541 component is used as an actual for an unconstrained formal.
5542
5543 Likewise for record types: even if a specific record subtype is
5544 passed by copy, the parent type might be passed by ref (e.g. if
5545 it's of variable size) and we might take the address of a child
5546 component to pass to a parent formal. We have no way to check
5547 for such conditions here. */
5548 if (AGGREGATE_TYPE_P (gnu_type))
5549 return false;
5550
5551 return true;
5552}
5553
bb1f7929
EB
5554/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5555
5556bool
5557smaller_form_type_p (tree type, tree orig_type)
5558{
5559 tree size, osize;
5560
5561 /* We're not interested in variants here. */
5562 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5563 return false;
5564
5565 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5566 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5567 return false;
5568
5569 size = TYPE_SIZE (type);
5570 osize = TYPE_SIZE (orig_type);
5571
5572 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5573 return false;
5574
5575 return tree_int_cst_lt (size, osize) != 0;
5576}
5577
a1ab4c31
AC
5578/* Perform final processing on global variables. */
5579
65444786
EB
5580static GTY (()) tree dummy_global;
5581
a1ab4c31
AC
5582void
5583gnat_write_global_declarations (void)
5584{
10e4d056
EB
5585 unsigned int i;
5586 tree iter;
5587
65444786
EB
5588 /* If we have declared types as used at the global level, insert them in
5589 the global hash table. We use a dummy variable for this purpose. */
9771b263 5590 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
65444786 5591 {
35e8bcf4 5592 struct varpool_node *node;
d3c268ab
EB
5593 char *label;
5594
5595 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5596 dummy_global
d3c268ab
EB
5597 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5598 void_type_node);
65444786
EB
5599 TREE_STATIC (dummy_global) = 1;
5600 TREE_ASM_WRITTEN (dummy_global) = 1;
5d59b5e1 5601 node = varpool_node_for_decl (dummy_global);
ead84f73 5602 node->symbol.force_output = 1;
65444786 5603
9771b263 5604 while (!types_used_by_cur_var_decl->is_empty ())
65444786 5605 {
9771b263 5606 tree t = types_used_by_cur_var_decl->pop ();
65444786
EB
5607 types_used_by_var_decl_insert (t, dummy_global);
5608 }
5609 }
5610
10e4d056
EB
5611 /* Output debug information for all global type declarations first. This
5612 ensures that global types whose compilation hasn't been finalized yet,
5613 for example pointers to Taft amendment types, have their compilation
5614 finalized in the right context. */
9771b263 5615 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5616 if (TREE_CODE (iter) == TYPE_DECL)
5617 debug_hooks->global_decl (iter);
5618
65d630d4
JH
5619 /* Proceed to optimize and emit assembly. */
5620 finalize_compilation_unit ();
a1ab4c31 5621
10e4d056
EB
5622 /* After cgraph has had a chance to emit everything that's going to
5623 be emitted, output debug information for the rest of globals. */
5624 if (!seen_error ())
5625 {
5626 timevar_push (TV_SYMOUT);
9771b263 5627 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5628 if (TREE_CODE (iter) != TYPE_DECL)
5629 debug_hooks->global_decl (iter);
5630 timevar_pop (TV_SYMOUT);
5631 }
a1ab4c31
AC
5632}
5633
5634/* ************************************************************************
5635 * * GCC builtins support *
5636 * ************************************************************************ */
5637
5638/* The general scheme is fairly simple:
5639
5640 For each builtin function/type to be declared, gnat_install_builtins calls
5641 internal facilities which eventually get to gnat_push_decl, which in turn
5642 tracks the so declared builtin function decls in the 'builtin_decls' global
5643 datastructure. When an Intrinsic subprogram declaration is processed, we
5644 search this global datastructure to retrieve the associated BUILT_IN DECL
5645 node. */
5646
5647/* Search the chain of currently available builtin declarations for a node
5648 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5649 found, if any, or NULL_TREE otherwise. */
5650tree
5651builtin_decl_for (tree name)
5652{
5653 unsigned i;
5654 tree decl;
5655
9771b263 5656 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5657 if (DECL_NAME (decl) == name)
5658 return decl;
5659
5660 return NULL_TREE;
5661}
5662
5663/* The code below eventually exposes gnat_install_builtins, which declares
5664 the builtin types and functions we might need, either internally or as
5665 user accessible facilities.
5666
5667 ??? This is a first implementation shot, still in rough shape. It is
5668 heavily inspired from the "C" family implementation, with chunks copied
5669 verbatim from there.
5670
5671 Two obvious TODO candidates are
5672 o Use a more efficient name/decl mapping scheme
5673 o Devise a middle-end infrastructure to avoid having to copy
5674 pieces between front-ends. */
5675
5676/* ----------------------------------------------------------------------- *
5677 * BUILTIN ELEMENTARY TYPES *
5678 * ----------------------------------------------------------------------- */
5679
5680/* Standard data types to be used in builtin argument declarations. */
5681
5682enum c_tree_index
5683{
5684 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5685 CTI_STRING_TYPE,
5686 CTI_CONST_STRING_TYPE,
5687
5688 CTI_MAX
5689};
5690
5691static tree c_global_trees[CTI_MAX];
5692
5693#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5694#define string_type_node c_global_trees[CTI_STRING_TYPE]
5695#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5696
5697/* ??? In addition some attribute handlers, we currently don't support a
5698 (small) number of builtin-types, which in turns inhibits support for a
5699 number of builtin functions. */
5700#define wint_type_node void_type_node
5701#define intmax_type_node void_type_node
5702#define uintmax_type_node void_type_node
5703
5704/* Build the void_list_node (void_type_node having been created). */
5705
5706static tree
5707build_void_list_node (void)
5708{
5709 tree t = build_tree_list (NULL_TREE, void_type_node);
5710 return t;
5711}
5712
5713/* Used to help initialize the builtin-types.def table. When a type of
5714 the correct size doesn't exist, use error_mark_node instead of NULL.
5715 The later results in segfaults even when a decl using the type doesn't
5716 get invoked. */
5717
5718static tree
5719builtin_type_for_size (int size, bool unsignedp)
5720{
ced57283 5721 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5722 return type ? type : error_mark_node;
5723}
5724
5725/* Build/push the elementary type decls that builtin functions/types
5726 will need. */
5727
5728static void
5729install_builtin_elementary_types (void)
5730{
728936bb 5731 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5732 pid_type_node = integer_type_node;
5733 void_list_node = build_void_list_node ();
5734
5735 string_type_node = build_pointer_type (char_type_node);
5736 const_string_type_node
5737 = build_pointer_type (build_qualified_type
5738 (char_type_node, TYPE_QUAL_CONST));
5739}
5740
5741/* ----------------------------------------------------------------------- *
5742 * BUILTIN FUNCTION TYPES *
5743 * ----------------------------------------------------------------------- */
5744
5745/* Now, builtin function types per se. */
5746
5747enum c_builtin_type
5748{
5749#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5750#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5751#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5752#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5753#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5754#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5755#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5756#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5757#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5758#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5759#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5760#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5761#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5762#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5763#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5764 NAME,
5765#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5766#include "builtin-types.def"
5767#undef DEF_PRIMITIVE_TYPE
5768#undef DEF_FUNCTION_TYPE_0
5769#undef DEF_FUNCTION_TYPE_1
5770#undef DEF_FUNCTION_TYPE_2
5771#undef DEF_FUNCTION_TYPE_3
5772#undef DEF_FUNCTION_TYPE_4
5773#undef DEF_FUNCTION_TYPE_5
5774#undef DEF_FUNCTION_TYPE_6
5775#undef DEF_FUNCTION_TYPE_7
5776#undef DEF_FUNCTION_TYPE_VAR_0
5777#undef DEF_FUNCTION_TYPE_VAR_1
5778#undef DEF_FUNCTION_TYPE_VAR_2
5779#undef DEF_FUNCTION_TYPE_VAR_3
5780#undef DEF_FUNCTION_TYPE_VAR_4
5781#undef DEF_FUNCTION_TYPE_VAR_5
5782#undef DEF_POINTER_TYPE
5783 BT_LAST
5784};
5785
5786typedef enum c_builtin_type builtin_type;
5787
5788/* A temporary array used in communication with def_fn_type. */
5789static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5790
5791/* A helper function for install_builtin_types. Build function type
5792 for DEF with return type RET and N arguments. If VAR is true, then the
5793 function should be variadic after those N arguments.
5794
5795 Takes special care not to ICE if any of the types involved are
5796 error_mark_node, which indicates that said type is not in fact available
5797 (see builtin_type_for_size). In which case the function type as a whole
5798 should be error_mark_node. */
5799
5800static void
5801def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5802{
e5b00edf
NF
5803 tree t;
5804 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5805 va_list list;
5806 int i;
5807
5808 va_start (list, n);
5809 for (i = 0; i < n; ++i)
5810 {
c6bd4220 5811 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5812 t = builtin_types[a];
5813 if (t == error_mark_node)
5814 goto egress;
e5b00edf 5815 args[i] = t;
a1ab4c31 5816 }
a1ab4c31 5817
a1ab4c31
AC
5818 t = builtin_types[ret];
5819 if (t == error_mark_node)
5820 goto egress;
e5b00edf
NF
5821 if (var)
5822 t = build_varargs_function_type_array (t, n, args);
5823 else
5824 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5825
5826 egress:
5827 builtin_types[def] = t;
0edf1bb2 5828 va_end (list);
a1ab4c31
AC
5829}
5830
5831/* Build the builtin function types and install them in the builtin_types
5832 array for later use in builtin function decls. */
5833
5834static void
5835install_builtin_function_types (void)
5836{
5837 tree va_list_ref_type_node;
5838 tree va_list_arg_type_node;
5839
5840 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5841 {
5842 va_list_arg_type_node = va_list_ref_type_node =
5843 build_pointer_type (TREE_TYPE (va_list_type_node));
5844 }
5845 else
5846 {
5847 va_list_arg_type_node = va_list_type_node;
5848 va_list_ref_type_node = build_reference_type (va_list_type_node);
5849 }
5850
5851#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5852 builtin_types[ENUM] = VALUE;
5853#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5854 def_fn_type (ENUM, RETURN, 0, 0);
5855#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5856 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5857#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5858 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5859#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5860 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5861#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5862 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5863#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5864 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5865#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5866 ARG6) \
5867 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5868#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5869 ARG6, ARG7) \
5870 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5871#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5872 def_fn_type (ENUM, RETURN, 1, 0);
5873#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5874 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5875#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5876 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5877#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5878 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5879#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5880 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5881#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5882 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5883#define DEF_POINTER_TYPE(ENUM, TYPE) \
5884 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5885
5886#include "builtin-types.def"
5887
5888#undef DEF_PRIMITIVE_TYPE
5889#undef DEF_FUNCTION_TYPE_1
5890#undef DEF_FUNCTION_TYPE_2
5891#undef DEF_FUNCTION_TYPE_3
5892#undef DEF_FUNCTION_TYPE_4
5893#undef DEF_FUNCTION_TYPE_5
5894#undef DEF_FUNCTION_TYPE_6
5895#undef DEF_FUNCTION_TYPE_VAR_0
5896#undef DEF_FUNCTION_TYPE_VAR_1
5897#undef DEF_FUNCTION_TYPE_VAR_2
5898#undef DEF_FUNCTION_TYPE_VAR_3
5899#undef DEF_FUNCTION_TYPE_VAR_4
5900#undef DEF_FUNCTION_TYPE_VAR_5
5901#undef DEF_POINTER_TYPE
5902 builtin_types[(int) BT_LAST] = NULL_TREE;
5903}
5904
5905/* ----------------------------------------------------------------------- *
5906 * BUILTIN ATTRIBUTES *
5907 * ----------------------------------------------------------------------- */
5908
5909enum built_in_attribute
5910{
5911#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5912#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5913#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5914#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5915#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5916#include "builtin-attrs.def"
5917#undef DEF_ATTR_NULL_TREE
5918#undef DEF_ATTR_INT
e384e6b5 5919#undef DEF_ATTR_STRING
a1ab4c31
AC
5920#undef DEF_ATTR_IDENT
5921#undef DEF_ATTR_TREE_LIST
5922 ATTR_LAST
5923};
5924
5925static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5926
5927static void
5928install_builtin_attributes (void)
5929{
5930 /* Fill in the built_in_attributes array. */
5931#define DEF_ATTR_NULL_TREE(ENUM) \
5932 built_in_attributes[(int) ENUM] = NULL_TREE;
5933#define DEF_ATTR_INT(ENUM, VALUE) \
5934 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5935#define DEF_ATTR_STRING(ENUM, VALUE) \
5936 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5937#define DEF_ATTR_IDENT(ENUM, STRING) \
5938 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5939#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5940 built_in_attributes[(int) ENUM] \
5941 = tree_cons (built_in_attributes[(int) PURPOSE], \
5942 built_in_attributes[(int) VALUE], \
5943 built_in_attributes[(int) CHAIN]);
5944#include "builtin-attrs.def"
5945#undef DEF_ATTR_NULL_TREE
5946#undef DEF_ATTR_INT
e384e6b5 5947#undef DEF_ATTR_STRING
a1ab4c31
AC
5948#undef DEF_ATTR_IDENT
5949#undef DEF_ATTR_TREE_LIST
5950}
5951
5952/* Handle a "const" attribute; arguments as in
5953 struct attribute_spec.handler. */
5954
5955static tree
5956handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5957 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5958 bool *no_add_attrs)
5959{
5960 if (TREE_CODE (*node) == FUNCTION_DECL)
5961 TREE_READONLY (*node) = 1;
5962 else
5963 *no_add_attrs = true;
5964
5965 return NULL_TREE;
5966}
5967
5968/* Handle a "nothrow" attribute; arguments as in
5969 struct attribute_spec.handler. */
5970
5971static tree
5972handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5973 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5974 bool *no_add_attrs)
5975{
5976 if (TREE_CODE (*node) == FUNCTION_DECL)
5977 TREE_NOTHROW (*node) = 1;
5978 else
5979 *no_add_attrs = true;
5980
5981 return NULL_TREE;
5982}
5983
5984/* Handle a "pure" attribute; arguments as in
5985 struct attribute_spec.handler. */
5986
5987static tree
5988handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5989 int ARG_UNUSED (flags), bool *no_add_attrs)
5990{
5991 if (TREE_CODE (*node) == FUNCTION_DECL)
5992 DECL_PURE_P (*node) = 1;
5993 /* ??? TODO: Support types. */
5994 else
5995 {
7948ae37
OH
5996 warning (OPT_Wattributes, "%qs attribute ignored",
5997 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5998 *no_add_attrs = true;
5999 }
6000
6001 return NULL_TREE;
6002}
6003
6004/* Handle a "no vops" attribute; arguments as in
6005 struct attribute_spec.handler. */
6006
6007static tree
6008handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6009 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6010 bool *ARG_UNUSED (no_add_attrs))
6011{
6012 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6013 DECL_IS_NOVOPS (*node) = 1;
6014 return NULL_TREE;
6015}
6016
6017/* Helper for nonnull attribute handling; fetch the operand number
6018 from the attribute argument list. */
6019
6020static bool
6021get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6022{
6023 /* Verify the arg number is a constant. */
6024 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6025 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6026 return false;
6027
6028 *valp = TREE_INT_CST_LOW (arg_num_expr);
6029 return true;
6030}
6031
6032/* Handle the "nonnull" attribute. */
6033static tree
6034handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6035 tree args, int ARG_UNUSED (flags),
6036 bool *no_add_attrs)
6037{
6038 tree type = *node;
6039 unsigned HOST_WIDE_INT attr_arg_num;
6040
6041 /* If no arguments are specified, all pointer arguments should be
6042 non-null. Verify a full prototype is given so that the arguments
6043 will have the correct types when we actually check them later. */
6044 if (!args)
6045 {
f4da8dce 6046 if (!prototype_p (type))
a1ab4c31
AC
6047 {
6048 error ("nonnull attribute without arguments on a non-prototype");
6049 *no_add_attrs = true;
6050 }
6051 return NULL_TREE;
6052 }
6053
6054 /* Argument list specified. Verify that each argument number references
6055 a pointer argument. */
6056 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6057 {
a1ab4c31
AC
6058 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6059
6060 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6061 {
6062 error ("nonnull argument has invalid operand number (argument %lu)",
6063 (unsigned long) attr_arg_num);
6064 *no_add_attrs = true;
6065 return NULL_TREE;
6066 }
6067
d7d058c5 6068 if (prototype_p (type))
a1ab4c31 6069 {
d7d058c5
NF
6070 function_args_iterator iter;
6071 tree argument;
6072
6073 function_args_iter_init (&iter, type);
6074 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 6075 {
d7d058c5 6076 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
6077 if (!argument || ck_num == arg_num)
6078 break;
a1ab4c31
AC
6079 }
6080
6081 if (!argument
d7d058c5 6082 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 6083 {
58c8f770
EB
6084 error ("nonnull argument with out-of-range operand number "
6085 "(argument %lu, operand %lu)",
a1ab4c31
AC
6086 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6087 *no_add_attrs = true;
6088 return NULL_TREE;
6089 }
6090
d7d058c5 6091 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 6092 {
58c8f770
EB
6093 error ("nonnull argument references non-pointer operand "
6094 "(argument %lu, operand %lu)",
a1ab4c31
AC
6095 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6096 *no_add_attrs = true;
6097 return NULL_TREE;
6098 }
6099 }
6100 }
6101
6102 return NULL_TREE;
6103}
6104
6105/* Handle a "sentinel" attribute. */
6106
6107static tree
6108handle_sentinel_attribute (tree *node, tree name, tree args,
6109 int ARG_UNUSED (flags), bool *no_add_attrs)
6110{
f4da8dce 6111 if (!prototype_p (*node))
a1ab4c31
AC
6112 {
6113 warning (OPT_Wattributes,
7948ae37
OH
6114 "%qs attribute requires prototypes with named arguments",
6115 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6116 *no_add_attrs = true;
6117 }
6118 else
6119 {
dcf0c47e 6120 if (!stdarg_p (*node))
a1ab4c31
AC
6121 {
6122 warning (OPT_Wattributes,
7948ae37
OH
6123 "%qs attribute only applies to variadic functions",
6124 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6125 *no_add_attrs = true;
6126 }
6127 }
6128
6129 if (args)
6130 {
6131 tree position = TREE_VALUE (args);
6132
6133 if (TREE_CODE (position) != INTEGER_CST)
6134 {
6135 warning (0, "requested position is not an integer constant");
6136 *no_add_attrs = true;
6137 }
6138 else
6139 {
6140 if (tree_int_cst_lt (position, integer_zero_node))
6141 {
6142 warning (0, "requested position is less than zero");
6143 *no_add_attrs = true;
6144 }
6145 }
6146 }
6147
6148 return NULL_TREE;
6149}
6150
6151/* Handle a "noreturn" attribute; arguments as in
6152 struct attribute_spec.handler. */
6153
6154static tree
6155handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6156 int ARG_UNUSED (flags), bool *no_add_attrs)
6157{
6158 tree type = TREE_TYPE (*node);
6159
6160 /* See FIXME comment in c_common_attribute_table. */
6161 if (TREE_CODE (*node) == FUNCTION_DECL)
6162 TREE_THIS_VOLATILE (*node) = 1;
6163 else if (TREE_CODE (type) == POINTER_TYPE
6164 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6165 TREE_TYPE (*node)
6166 = build_pointer_type
6167 (build_type_variant (TREE_TYPE (type),
6168 TYPE_READONLY (TREE_TYPE (type)), 1));
6169 else
6170 {
7948ae37
OH
6171 warning (OPT_Wattributes, "%qs attribute ignored",
6172 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6173 *no_add_attrs = true;
6174 }
6175
6176 return NULL_TREE;
6177}
6178
0d6e14fd
JH
6179/* Handle a "leaf" attribute; arguments as in
6180 struct attribute_spec.handler. */
6181
6182static tree
6183handle_leaf_attribute (tree *node, tree name,
6184 tree ARG_UNUSED (args),
6185 int ARG_UNUSED (flags), bool *no_add_attrs)
6186{
6187 if (TREE_CODE (*node) != FUNCTION_DECL)
6188 {
6189 warning (OPT_Wattributes, "%qE attribute ignored", name);
6190 *no_add_attrs = true;
6191 }
6192 if (!TREE_PUBLIC (*node))
6193 {
32a5388a 6194 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd
JH
6195 *no_add_attrs = true;
6196 }
6197
6198 return NULL_TREE;
6199}
6200
a1ab4c31
AC
6201/* Handle a "malloc" attribute; arguments as in
6202 struct attribute_spec.handler. */
6203
6204static tree
6205handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6206 int ARG_UNUSED (flags), bool *no_add_attrs)
6207{
6208 if (TREE_CODE (*node) == FUNCTION_DECL
6209 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6210 DECL_IS_MALLOC (*node) = 1;
6211 else
6212 {
7948ae37
OH
6213 warning (OPT_Wattributes, "%qs attribute ignored",
6214 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6215 *no_add_attrs = true;
6216 }
6217
6218 return NULL_TREE;
6219}
6220
6221/* Fake handler for attributes we don't properly support. */
6222
6223tree
6224fake_attribute_handler (tree * ARG_UNUSED (node),
6225 tree ARG_UNUSED (name),
6226 tree ARG_UNUSED (args),
6227 int ARG_UNUSED (flags),
6228 bool * ARG_UNUSED (no_add_attrs))
6229{
6230 return NULL_TREE;
6231}
6232
6233/* Handle a "type_generic" attribute. */
6234
6235static tree
6236handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6237 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6238 bool * ARG_UNUSED (no_add_attrs))
6239{
a1ab4c31
AC
6240 /* Ensure we have a function type. */
6241 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 6242
a1ab4c31 6243 /* Ensure we have a variadic function. */
dcf0c47e 6244 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
6245
6246 return NULL_TREE;
6247}
6248
2724e58f
OH
6249/* Handle a "vector_size" attribute; arguments as in
6250 struct attribute_spec.handler. */
6251
6252static tree
6253handle_vector_size_attribute (tree *node, tree name, tree args,
6254 int ARG_UNUSED (flags),
6255 bool *no_add_attrs)
6256{
6257 unsigned HOST_WIDE_INT vecsize, nunits;
6258 enum machine_mode orig_mode;
6259 tree type = *node, new_type, size;
6260
6261 *no_add_attrs = true;
6262
6263 size = TREE_VALUE (args);
6264
6265 if (!host_integerp (size, 1))
6266 {
7948ae37
OH
6267 warning (OPT_Wattributes, "%qs attribute ignored",
6268 IDENTIFIER_POINTER (name));
2724e58f
OH
6269 return NULL_TREE;
6270 }
6271
6272 /* Get the vector size (in bytes). */
6273 vecsize = tree_low_cst (size, 1);
6274
6275 /* We need to provide for vector pointers, vector arrays, and
6276 functions returning vectors. For example:
6277
6278 __attribute__((vector_size(16))) short *foo;
6279
6280 In this case, the mode is SI, but the type being modified is
6281 HI, so we need to look further. */
6282
6283 while (POINTER_TYPE_P (type)
6284 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6285 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6286 type = TREE_TYPE (type);
6287
6288 /* Get the mode of the type being modified. */
6289 orig_mode = TYPE_MODE (type);
6290
6291 if ((!INTEGRAL_TYPE_P (type)
6292 && !SCALAR_FLOAT_TYPE_P (type)
6293 && !FIXED_POINT_TYPE_P (type))
6294 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6295 && GET_MODE_CLASS (orig_mode) != MODE_INT
6296 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
6297 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
6298 || TREE_CODE (type) == BOOLEAN_TYPE)
6299 {
7948ae37
OH
6300 error ("invalid vector type for attribute %qs",
6301 IDENTIFIER_POINTER (name));
2724e58f
OH
6302 return NULL_TREE;
6303 }
6304
6305 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
6306 {
6307 error ("vector size not an integral multiple of component size");
6308 return NULL;
6309 }
6310
6311 if (vecsize == 0)
6312 {
6313 error ("zero vector size");
6314 return NULL;
6315 }
6316
6317 /* Calculate how many units fit in the vector. */
6318 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
6319 if (nunits & (nunits - 1))
6320 {
6321 error ("number of components of the vector not a power of two");
6322 return NULL_TREE;
6323 }
6324
6325 new_type = build_vector_type (type, nunits);
6326
6327 /* Build back pointers if needed. */
9b469089 6328 *node = reconstruct_complex_type (*node, new_type);
2724e58f
OH
6329
6330 return NULL_TREE;
6331}
6332
7948ae37
OH
6333/* Handle a "vector_type" attribute; arguments as in
6334 struct attribute_spec.handler. */
6335
6336static tree
6337handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6338 int ARG_UNUSED (flags),
6339 bool *no_add_attrs)
6340{
6341 /* Vector representative type and size. */
6342 tree rep_type = *node;
6343 tree rep_size = TYPE_SIZE_UNIT (rep_type);
6344 tree rep_name;
6345
6346 /* Vector size in bytes and number of units. */
6347 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6348
6349 /* Vector element type and mode. */
6350 tree elem_type;
6351 enum machine_mode elem_mode;
6352
6353 *no_add_attrs = true;
6354
6355 /* Get the representative array type, possibly nested within a
6356 padding record e.g. for alignment purposes. */
6357
315cff15 6358 if (TYPE_IS_PADDING_P (rep_type))
7948ae37
OH
6359 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
6360
6361 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6362 {
6363 error ("attribute %qs applies to array types only",
6364 IDENTIFIER_POINTER (name));
6365 return NULL_TREE;
6366 }
6367
6368 /* Silently punt on variable sizes. We can't make vector types for them,
6369 need to ignore them on front-end generated subtypes of unconstrained
6370 bases, and this attribute is for binding implementors, not end-users, so
6371 we should never get there from legitimate explicit uses. */
6372
6373 if (!host_integerp (rep_size, 1))
6374 return NULL_TREE;
6375
6376 /* Get the element type/mode and check this is something we know
6377 how to make vectors of. */
6378
6379 elem_type = TREE_TYPE (rep_type);
6380 elem_mode = TYPE_MODE (elem_type);
6381
6382 if ((!INTEGRAL_TYPE_P (elem_type)
6383 && !SCALAR_FLOAT_TYPE_P (elem_type)
6384 && !FIXED_POINT_TYPE_P (elem_type))
6385 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6386 && GET_MODE_CLASS (elem_mode) != MODE_INT
6387 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
6388 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
6389 {
6390 error ("invalid element type for attribute %qs",
6391 IDENTIFIER_POINTER (name));
6392 return NULL_TREE;
6393 }
6394
6395 /* Sanity check the vector size and element type consistency. */
6396
6397 vec_bytes = tree_low_cst (rep_size, 1);
6398
6399 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
6400 {
6401 error ("vector size not an integral multiple of component size");
6402 return NULL;
6403 }
6404
6405 if (vec_bytes == 0)
6406 {
6407 error ("zero vector size");
6408 return NULL;
6409 }
6410
6411 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
6412 if (vec_units & (vec_units - 1))
6413 {
6414 error ("number of components of the vector not a power of two");
6415 return NULL_TREE;
6416 }
6417
6418 /* Build the vector type and replace. */
6419
6420 *node = build_vector_type (elem_type, vec_units);
6421 rep_name = TYPE_NAME (rep_type);
6422 if (TREE_CODE (rep_name) == TYPE_DECL)
6423 rep_name = DECL_NAME (rep_name);
6424 TYPE_NAME (*node) = rep_name;
6425 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6426
6427 return NULL_TREE;
6428}
6429
a1ab4c31
AC
6430/* ----------------------------------------------------------------------- *
6431 * BUILTIN FUNCTIONS *
6432 * ----------------------------------------------------------------------- */
6433
6434/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6435 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6436 if nonansi_p and flag_no_nonansi_builtin. */
6437
6438static void
6439def_builtin_1 (enum built_in_function fncode,
6440 const char *name,
6441 enum built_in_class fnclass,
6442 tree fntype, tree libtype,
6443 bool both_p, bool fallback_p,
6444 bool nonansi_p ATTRIBUTE_UNUSED,
6445 tree fnattrs, bool implicit_p)
6446{
6447 tree decl;
6448 const char *libname;
6449
6450 /* Preserve an already installed decl. It most likely was setup in advance
6451 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 6452 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
6453 return;
6454
6455 gcc_assert ((!both_p && !fallback_p)
6456 || !strncmp (name, "__builtin_",
6457 strlen ("__builtin_")));
6458
6459 libname = name + strlen ("__builtin_");
6460 decl = add_builtin_function (name, fntype, fncode, fnclass,
6461 (fallback_p ? libname : NULL),
6462 fnattrs);
6463 if (both_p)
6464 /* ??? This is normally further controlled by command-line options
6465 like -fno-builtin, but we don't have them for Ada. */
6466 add_builtin_function (libname, libtype, fncode, fnclass,
6467 NULL, fnattrs);
6468
e79983f4 6469 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6470}
6471
6472static int flag_isoc94 = 0;
6473static int flag_isoc99 = 0;
6474
6475/* Install what the common builtins.def offers. */
6476
6477static void
6478install_builtin_functions (void)
6479{
6480#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6481 NONANSI_P, ATTRS, IMPLICIT, COND) \
6482 if (NAME && COND) \
6483 def_builtin_1 (ENUM, NAME, CLASS, \
6484 builtin_types[(int) TYPE], \
6485 builtin_types[(int) LIBTYPE], \
6486 BOTH_P, FALLBACK_P, NONANSI_P, \
6487 built_in_attributes[(int) ATTRS], IMPLICIT);
6488#include "builtins.def"
6489#undef DEF_BUILTIN
6490}
6491
6492/* ----------------------------------------------------------------------- *
6493 * BUILTIN FUNCTIONS *
6494 * ----------------------------------------------------------------------- */
6495
6496/* Install the builtin functions we might need. */
6497
6498void
6499gnat_install_builtins (void)
6500{
6501 install_builtin_elementary_types ();
6502 install_builtin_function_types ();
6503 install_builtin_attributes ();
6504
6505 /* Install builtins used by generic middle-end pieces first. Some of these
6506 know about internal specificities and control attributes accordingly, for
6507 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6508 the generic definition from builtins.def. */
384c400a 6509 build_common_builtin_nodes ();
a1ab4c31
AC
6510
6511 /* Now, install the target specific builtins, such as the AltiVec family on
6512 ppc, and the common set as exposed by builtins.def. */
6513 targetm.init_builtins ();
6514 install_builtin_functions ();
6515}
6516
6517#include "gt-ada-utils.h"
6518#include "gtype-ada.h"
This page took 2.299386 seconds and 5 git commands to generate.