]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
compiler: Use backend interface for function declarations.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
0e24192c 9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
31#include "flags.h"
a1ab4c31 32#include "toplev.h"
718f9c0f 33#include "diagnostic-core.h"
a1ab4c31
AC
34#include "output.h"
35#include "ggc.h"
36#include "debug.h"
37#include "convert.h"
38#include "target.h"
677f3fa8 39#include "common/common-target.h"
8713b7e4 40#include "langhooks.h"
a1ab4c31 41#include "cgraph.h"
10e4d056 42#include "diagnostic.h"
7ee2468b 43#include "timevar.h"
8713b7e4 44#include "tree-dump.h"
a1ab4c31
AC
45#include "tree-inline.h"
46#include "tree-iterator.h"
a1ab4c31
AC
47
48#include "ada.h"
49#include "types.h"
50#include "atree.h"
51#include "elists.h"
52#include "namet.h"
53#include "nlists.h"
54#include "stringt.h"
55#include "uintp.h"
56#include "fe.h"
57#include "sinfo.h"
58#include "einfo.h"
59#include "ada-tree.h"
60#include "gigi.h"
61
a1ab4c31
AC
62/* If nonzero, pretend we are allocating at global level. */
63int force_global;
64
caa9d12a
EB
65/* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68int double_float_alignment;
69
70/* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73int double_scalar_alignment;
74
a1ab4c31
AC
75/* Tree nodes for the various types and decls we create. */
76tree gnat_std_decls[(int) ADT_LAST];
77
78/* Functions to call for each of the possible raise reasons. */
79tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
80
ca8e13e8 81/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
82tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
83
a1ab4c31
AC
84/* Forward declarations for handlers of attributes. */
85static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 92static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
93static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 95static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 96static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
97
98/* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
101
102/* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104const struct attribute_spec gnat_internal_attribute_table[] =
105{
62d784f7
KT
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "const", 0, 0, true, false, false, handle_const_attribute,
109 false },
110 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
111 false },
112 { "pure", 0, 0, true, false, false, handle_pure_attribute,
113 false },
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
115 false },
116 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
117 false },
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
119 false },
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
121 false },
122 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
123 false },
124 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
125 false },
126 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
127 false },
128
129 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
130 false },
131 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
132 false },
133 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
134
135 /* ??? format and format_arg are heavy and not supported, which actually
136 prevents support for stdio builtins, which we however declare as part
137 of the common builtins.def contents. */
62d784f7
KT
138 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
139 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 140
62d784f7 141 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
142};
143
144/* Associates a GNAT tree node to a GCC tree node. It is used in
145 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
146 of `save_gnu_tree' for more info. */
147static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
148
149#define GET_GNU_TREE(GNAT_ENTITY) \
150 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
151
152#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
154
155#define PRESENT_GNU_TREE(GNAT_ENTITY) \
156 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
157
158/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
159static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
160
161#define GET_DUMMY_NODE(GNAT_ENTITY) \
162 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
163
164#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166
167#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
168 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169
170/* This variable keeps a table for types for each precision so that we only
171 allocate each of them once. Signed and unsigned types are kept separate.
172
173 Note that these types are only used when fold-const requests something
174 special. Perhaps we should NOT share these types; we'll see how it
175 goes later. */
176static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
177
178/* Likewise for float types, but record these by mode. */
179static GTY(()) tree float_types[NUM_MACHINE_MODES];
180
181/* For each binding contour we allocate a binding_level structure to indicate
182 the binding depth. */
183
d1b38208 184struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
185 /* The binding level containing this one (the enclosing binding level). */
186 struct gnat_binding_level *chain;
187 /* The BLOCK node for this level. */
188 tree block;
189 /* If nonzero, the setjmp buffer that needs to be updated for any
190 variable-sized definition within this context. */
191 tree jmpbuf_decl;
192};
193
194/* The binding level currently in effect. */
195static GTY(()) struct gnat_binding_level *current_binding_level;
196
197/* A chain of gnat_binding_level structures awaiting reuse. */
198static GTY((deletable)) struct gnat_binding_level *free_binding_level;
199
228ee426
EB
200/* The context to be used for global declarations. */
201static GTY(()) tree global_context;
202
a1ab4c31 203/* An array of global declarations. */
9771b263 204static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
205
206/* An array of builtin function declarations. */
9771b263 207static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31
AC
208
209/* An array of global renaming pointers. */
9771b263 210static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
a1ab4c31
AC
211
212/* A chain of unused BLOCK nodes. */
213static GTY((deletable)) tree free_block_chain;
214
842d4ee2
EB
215static int pad_type_hash_marked_p (const void *p);
216static hashval_t pad_type_hash_hash (const void *p);
217static int pad_type_hash_eq (const void *p1, const void *p2);
218
219/* A hash table of padded types. It is modelled on the generic type
220 hash table in tree.c, which must thus be used as a reference. */
221struct GTY(()) pad_type_hash {
222 unsigned long hash;
223 tree type;
224};
225
226static GTY ((if_marked ("pad_type_hash_marked_p"),
227 param_is (struct pad_type_hash)))
228 htab_t pad_type_hash_table;
229
a1ab4c31
AC
230static tree merge_sizes (tree, tree, tree, bool, bool);
231static tree compute_related_constant (tree, tree);
232static tree split_plus (tree, tree *);
a1ab4c31
AC
233static tree float_type_for_precision (int, enum machine_mode);
234static tree convert_to_fat_pointer (tree, tree);
a1ab4c31
AC
235static bool potential_alignment_gap (tree, tree, tree);
236\f
842d4ee2 237/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
238
239void
842d4ee2 240init_gnat_utils (void)
a1ab4c31 241{
842d4ee2 242 /* Initialize the association of GNAT nodes to GCC trees. */
a9429e29 243 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
842d4ee2
EB
244
245 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
246 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
247
248 /* Initialize the hash table of padded types. */
249 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
250 pad_type_hash_eq, 0);
a1ab4c31
AC
251}
252
842d4ee2 253/* Destroy data structures of the utils.c module. */
f04b8d69
EB
254
255void
842d4ee2 256destroy_gnat_utils (void)
f04b8d69 257{
842d4ee2 258 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
259 ggc_free (associate_gnat_to_gnu);
260 associate_gnat_to_gnu = NULL;
f04b8d69 261
842d4ee2
EB
262 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
263 ggc_free (dummy_node_table);
264 dummy_node_table = NULL;
265
266 /* Destroy the hash table of padded types. */
267 htab_delete (pad_type_hash_table);
268 pad_type_hash_table = NULL;
269
270 /* Invalidate the global renaming pointers. */
271 invalidate_global_renaming_pointers ();
272}
273\f
a1d8cc63
EB
274/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
275 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
276 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 277
a1d8cc63 278 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
279
280void
281save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
282{
283 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 284 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
285 means GNAT_ENTITY is defined twice, but occasionally is due to some
286 Gigi problem. */
287 gcc_assert (!(gnu_decl
288 && (PRESENT_GNU_TREE (gnat_entity)
289 || (!no_check && !DECL_P (gnu_decl)))));
290
291 SET_GNU_TREE (gnat_entity, gnu_decl);
292}
293
a1d8cc63
EB
294/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
295 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
296
297 In some cases, such as delayed elaboration or expressions that need to
298 be elaborated only once, GNAT_ENTITY is really not an entity. */
299
300tree
301get_gnu_tree (Entity_Id gnat_entity)
302{
303 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
304 return GET_GNU_TREE (gnat_entity);
305}
306
307/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
308
309bool
310present_gnu_tree (Entity_Id gnat_entity)
311{
312 return PRESENT_GNU_TREE (gnat_entity);
313}
314\f
a1ab4c31
AC
315/* Make a dummy type corresponding to GNAT_TYPE. */
316
317tree
318make_dummy_type (Entity_Id gnat_type)
319{
320 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
321 tree gnu_type;
322
323 /* If there is an equivalent type, get its underlying type. */
324 if (Present (gnat_underlying))
a0b8b1b7 325 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
a1ab4c31
AC
326
327 /* If there was no equivalent type (can only happen when just annotating
328 types) or underlying type, go back to the original type. */
329 if (No (gnat_underlying))
330 gnat_underlying = gnat_type;
331
332 /* If it there already a dummy type, use that one. Else make one. */
333 if (PRESENT_DUMMY_NODE (gnat_underlying))
334 return GET_DUMMY_NODE (gnat_underlying);
335
336 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
337 an ENUMERAL_TYPE. */
338 gnu_type = make_node (Is_Record_Type (gnat_underlying)
339 ? tree_code_for_record_type (gnat_underlying)
340 : ENUMERAL_TYPE);
341 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
342 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
343 TYPE_STUB_DECL (gnu_type)
344 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a0b8b1b7
EB
345 if (Is_By_Reference_Type (gnat_underlying))
346 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31
AC
347
348 SET_DUMMY_NODE (gnat_underlying, gnu_type);
349
350 return gnu_type;
351}
e3edbd56
EB
352
353/* Return the dummy type that was made for GNAT_TYPE, if any. */
354
355tree
356get_dummy_type (Entity_Id gnat_type)
357{
358 return GET_DUMMY_NODE (gnat_type);
359}
360
361/* Build dummy fat and thin pointer types whose designated type is specified
362 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
363
364void
365build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
366{
367 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
368 tree gnu_fat_type, fields, gnu_object_type;
369
370 gnu_template_type = make_node (RECORD_TYPE);
371 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
372 TYPE_DUMMY_P (gnu_template_type) = 1;
373 gnu_ptr_template = build_pointer_type (gnu_template_type);
374
375 gnu_array_type = make_node (ENUMERAL_TYPE);
376 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
377 TYPE_DUMMY_P (gnu_array_type) = 1;
378 gnu_ptr_array = build_pointer_type (gnu_array_type);
379
380 gnu_fat_type = make_node (RECORD_TYPE);
381 /* Build a stub DECL to trigger the special processing for fat pointer types
382 in gnat_pushdecl. */
383 TYPE_NAME (gnu_fat_type)
384 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
385 gnu_fat_type);
386 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
387 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
388 DECL_CHAIN (fields)
389 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
390 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
391 finish_fat_pointer_type (gnu_fat_type, fields);
392 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
393 /* Suppress debug info until after the type is completed. */
394 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
395
396 gnu_object_type = make_node (RECORD_TYPE);
397 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
398 TYPE_DUMMY_P (gnu_object_type) = 1;
399
400 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
401 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
402}
a1ab4c31 403\f
c99c0026 404/* Return true if we are in the global binding level. */
a1ab4c31 405
c99c0026 406bool
a1ab4c31
AC
407global_bindings_p (void)
408{
c99c0026 409 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
410}
411
a09d56d8 412/* Enter a new binding level. */
a1ab4c31
AC
413
414void
c6bd4220 415gnat_pushlevel (void)
a1ab4c31
AC
416{
417 struct gnat_binding_level *newlevel = NULL;
418
419 /* Reuse a struct for this binding level, if there is one. */
420 if (free_binding_level)
421 {
422 newlevel = free_binding_level;
423 free_binding_level = free_binding_level->chain;
424 }
425 else
a9429e29 426 newlevel = ggc_alloc_gnat_binding_level ();
a1ab4c31
AC
427
428 /* Use a free BLOCK, if any; otherwise, allocate one. */
429 if (free_block_chain)
430 {
431 newlevel->block = free_block_chain;
432 free_block_chain = BLOCK_CHAIN (free_block_chain);
433 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
434 }
435 else
436 newlevel->block = make_node (BLOCK);
437
438 /* Point the BLOCK we just made to its parent. */
439 if (current_binding_level)
440 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
441
a09d56d8
EB
442 BLOCK_VARS (newlevel->block) = NULL_TREE;
443 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
444 TREE_USED (newlevel->block) = 1;
445
a09d56d8 446 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
447 newlevel->chain = current_binding_level;
448 newlevel->jmpbuf_decl = NULL_TREE;
449 current_binding_level = newlevel;
450}
451
452/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
453 and point FNDECL to this BLOCK. */
454
455void
456set_current_block_context (tree fndecl)
457{
458 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
459 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 460 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
461}
462
463/* Set the jmpbuf_decl for the current binding level to DECL. */
464
465void
466set_block_jmpbuf_decl (tree decl)
467{
468 current_binding_level->jmpbuf_decl = decl;
469}
470
471/* Get the jmpbuf_decl, if any, for the current binding level. */
472
473tree
c6bd4220 474get_block_jmpbuf_decl (void)
a1ab4c31
AC
475{
476 return current_binding_level->jmpbuf_decl;
477}
478
a09d56d8 479/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
480
481void
c6bd4220 482gnat_poplevel (void)
a1ab4c31
AC
483{
484 struct gnat_binding_level *level = current_binding_level;
485 tree block = level->block;
486
487 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 488 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
489
490 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
491 are no variables free the block and merge its subblocks into those of its
a09d56d8 492 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
493 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
494 ;
495 else if (BLOCK_VARS (block) == NULL_TREE)
496 {
497 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
498 = block_chainon (BLOCK_SUBBLOCKS (block),
499 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
500 BLOCK_CHAIN (block) = free_block_chain;
501 free_block_chain = block;
502 }
503 else
504 {
505 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
506 BLOCK_SUBBLOCKS (level->chain->block) = block;
507 TREE_USED (block) = 1;
508 set_block_for_group (block);
509 }
510
511 /* Free this binding structure. */
512 current_binding_level = level->chain;
513 level->chain = free_binding_level;
514 free_binding_level = level;
515}
516
2231f17f
EB
517/* Exit a binding level and discard the associated BLOCK. */
518
519void
520gnat_zaplevel (void)
521{
522 struct gnat_binding_level *level = current_binding_level;
523 tree block = level->block;
524
525 BLOCK_CHAIN (block) = free_block_chain;
526 free_block_chain = block;
527
528 /* Free this binding structure. */
529 current_binding_level = level->chain;
530 level->chain = free_binding_level;
531 free_binding_level = level;
532}
a1ab4c31 533\f
228ee426
EB
534/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
535 for location information and flag propagation. */
a1ab4c31
AC
536
537void
538gnat_pushdecl (tree decl, Node_Id gnat_node)
539{
228ee426 540 /* If DECL is public external or at top level, it has global context. */
bd9c7fb9 541 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
228ee426
EB
542 {
543 if (!global_context)
544 global_context = build_translation_unit_decl (NULL_TREE);
545 DECL_CONTEXT (decl) = global_context;
546 }
a1ab4c31
AC
547 else
548 {
549 DECL_CONTEXT (decl) = current_function_decl;
550
9f62cb92
JJ
551 /* Functions imported in another function are not really nested.
552 For really nested functions mark them initially as needing
553 a static chain for uses of that flag before unnesting;
554 lower_nested_functions will then recompute it. */
555 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
556 DECL_STATIC_CHAIN (decl) = 1;
a1ab4c31
AC
557 }
558
228ee426 559 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
560
561 /* Set the location of DECL and emit a declaration for it. */
562 if (Present (gnat_node))
563 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 564
a1ab4c31
AC
565 add_decl_expr (decl, gnat_node);
566
567 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
568 order. The list will be reversed later. Put global declarations in the
569 globals list and local ones in the current block. But skip TYPE_DECLs
570 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
571 with the debugger and aren't needed anyway. */
572 if (!(TREE_CODE (decl) == TYPE_DECL
573 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 574 {
9083aacd 575 if (DECL_EXTERNAL (decl))
a1ab4c31 576 {
a1ab4c31 577 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 578 vec_safe_push (builtin_decls, decl);
a1ab4c31 579 }
9083aacd 580 else if (global_bindings_p ())
9771b263 581 vec_safe_push (global_decls, decl);
9083aacd 582 else
a1ab4c31 583 {
a963da4d
EB
584 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
585 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
586 }
587 }
588
589 /* For the declaration of a type, set its name if it either is not already
10069d53 590 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
591 We'd rather have the type named with a real name and all the pointer
592 types to the same object have the same POINTER_TYPE node. Code in the
593 equivalent function of c-decl.c makes a copy of the type node here, but
594 that may cause us trouble with incomplete types. We make an exception
595 for fat pointer types because the compiler automatically builds them
596 for unconstrained array types and the debugger uses them to represent
597 both these and pointers to these. */
598 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
599 {
600 tree t = TREE_TYPE (decl);
601
10069d53 602 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
1aeb40dd 603 {
4cb12a90 604 /* Array and pointer types aren't "tagged" types so we force the
1aeb40dd
EB
605 type to be associated with its typedef in the DWARF back-end,
606 in order to make sure that the latter is always preserved. */
4cb12a90
EB
607 if (!DECL_ARTIFICIAL (decl)
608 && (TREE_CODE (t) == ARRAY_TYPE
609 || TREE_CODE (t) == POINTER_TYPE))
1aeb40dd
EB
610 {
611 tree tt = build_distinct_type_copy (t);
4cb12a90
EB
612 if (TREE_CODE (t) == POINTER_TYPE)
613 TYPE_NEXT_PTR_TO (t) = tt;
1aeb40dd 614 TYPE_NAME (tt) = DECL_NAME (decl);
d4d05b52 615 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
1aeb40dd
EB
616 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
617 DECL_ORIGINAL_TYPE (decl) = tt;
618 }
619 }
315cff15 620 else if (TYPE_IS_FAT_POINTER_P (t))
a1ab4c31 621 {
e3edbd56 622 /* We need a variant for the placeholder machinery to work. */
a1ab4c31
AC
623 tree tt = build_variant_type_copy (t);
624 TYPE_NAME (tt) = decl;
d4d05b52 625 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
a1ab4c31
AC
626 TREE_USED (tt) = TREE_USED (t);
627 TREE_TYPE (decl) = tt;
40c88b94
EB
628 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
629 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
630 else
631 DECL_ORIGINAL_TYPE (decl) = t;
40c88b94 632 DECL_ARTIFICIAL (decl) = 0;
e3edbd56 633 t = NULL_TREE;
a1ab4c31
AC
634 }
635 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
636 ;
637 else
638 t = NULL_TREE;
639
e3edbd56
EB
640 /* Propagate the name to all the anonymous variants. This is needed
641 for the type qualifiers machinery to work properly. */
a1ab4c31
AC
642 if (t)
643 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
e3edbd56 644 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
645 {
646 TYPE_NAME (t) = decl;
647 TYPE_CONTEXT (t) = DECL_CONTEXT (decl);
648 }
a1ab4c31
AC
649 }
650}
651\f
842d4ee2
EB
652/* Create a record type that contains a SIZE bytes long field of TYPE with a
653 starting bit position so that it is aligned to ALIGN bits, and leaving at
654 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
655 record is guaranteed to get. GNAT_NODE is used for the position of the
656 associated TYPE_DECL. */
842d4ee2
EB
657
658tree
659make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 660 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
661{
662 /* We will be crafting a record type with one field at a position set to be
663 the next multiple of ALIGN past record'address + room bytes. We use a
664 record placeholder to express record'address. */
665 tree record_type = make_node (RECORD_TYPE);
666 tree record = build0 (PLACEHOLDER_EXPR, record_type);
667
668 tree record_addr_st
669 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
670
671 /* The diagram below summarizes the shape of what we manipulate:
672
673 <--------- pos ---------->
674 { +------------+-------------+-----------------+
675 record =>{ |############| ... | field (type) |
676 { +------------+-------------+-----------------+
677 |<-- room -->|<- voffset ->|<---- size ----->|
678 o o
679 | |
680 record_addr vblock_addr
681
682 Every length is in sizetype bytes there, except "pos" which has to be
683 set as a bit position in the GCC tree for the record. */
684 tree room_st = size_int (room);
685 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
686 tree voffset_st, pos, field;
687
688 tree name = TYPE_NAME (type);
689
690 if (TREE_CODE (name) == TYPE_DECL)
691 name = DECL_NAME (name);
692 name = concat_name (name, "ALIGN");
693 TYPE_NAME (record_type) = name;
694
695 /* Compute VOFFSET and then POS. The next byte position multiple of some
696 alignment after some address is obtained by "and"ing the alignment minus
697 1 with the two's complement of the address. */
698 voffset_st = size_binop (BIT_AND_EXPR,
699 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
700 size_int ((align / BITS_PER_UNIT) - 1));
701
702 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
703 pos = size_binop (MULT_EXPR,
704 convert (bitsizetype,
705 size_binop (PLUS_EXPR, room_st, voffset_st)),
706 bitsize_unit_node);
707
708 /* Craft the GCC record representation. We exceptionally do everything
709 manually here because 1) our generic circuitry is not quite ready to
710 handle the complex position/size expressions we are setting up, 2) we
711 have a strong simplifying factor at hand: we know the maximum possible
712 value of voffset, and 3) we have to set/reset at least the sizes in
713 accordance with this maximum value anyway, as we need them to convey
714 what should be "alloc"ated for this type.
715
716 Use -1 as the 'addressable' indication for the field to prevent the
717 creation of a bitfield. We don't need one, it would have damaging
718 consequences on the alignment computation, and create_field_decl would
719 make one without this special argument, for instance because of the
720 complex position expression. */
721 field = create_field_decl (get_identifier ("F"), type, record_type, size,
722 pos, 1, -1);
723 TYPE_FIELDS (record_type) = field;
724
725 TYPE_ALIGN (record_type) = base_align;
726 TYPE_USER_ALIGN (record_type) = 1;
727
728 TYPE_SIZE (record_type)
729 = size_binop (PLUS_EXPR,
730 size_binop (MULT_EXPR, convert (bitsizetype, size),
731 bitsize_unit_node),
732 bitsize_int (align + room * BITS_PER_UNIT));
733 TYPE_SIZE_UNIT (record_type)
734 = size_binop (PLUS_EXPR, size,
735 size_int (room + align / BITS_PER_UNIT));
736
737 SET_TYPE_MODE (record_type, BLKmode);
738 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
739
740 /* Declare it now since it will never be declared otherwise. This is
741 necessary to ensure that its subtrees are properly marked. */
74746d49 742 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
743
744 return record_type;
745}
746
747/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
748 as the field type of a packed record if IN_RECORD is true, or as the
749 component type of a packed array if IN_RECORD is false. See if we can
750 rewrite it either as a type that has a non-BLKmode, which we can pack
751 tighter in the packed record case, or as a smaller type. If so, return
752 the new type. If not, return the original type. */
753
754tree
755make_packable_type (tree type, bool in_record)
756{
757 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
758 unsigned HOST_WIDE_INT new_size;
759 tree new_type, old_field, field_list = NULL_TREE;
760 unsigned int align;
761
762 /* No point in doing anything if the size is zero. */
763 if (size == 0)
764 return type;
765
766 new_type = make_node (TREE_CODE (type));
767
768 /* Copy the name and flags from the old type to that of the new.
769 Note that we rely on the pointer equality created here for
770 TYPE_NAME to look through conversions in various places. */
771 TYPE_NAME (new_type) = TYPE_NAME (type);
772 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
773 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
774 if (TREE_CODE (type) == RECORD_TYPE)
775 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
776
777 /* If we are in a record and have a small size, set the alignment to
778 try for an integral mode. Otherwise set it to try for a smaller
779 type with BLKmode. */
780 if (in_record && size <= MAX_FIXED_MODE_SIZE)
781 {
782 align = ceil_pow2 (size);
783 TYPE_ALIGN (new_type) = align;
784 new_size = (size + align - 1) & -align;
785 }
786 else
787 {
788 unsigned HOST_WIDE_INT align;
789
790 /* Do not try to shrink the size if the RM size is not constant. */
791 if (TYPE_CONTAINS_TEMPLATE_P (type)
792 || !host_integerp (TYPE_ADA_SIZE (type), 1))
793 return type;
794
795 /* Round the RM size up to a unit boundary to get the minimal size
796 for a BLKmode record. Give up if it's already the size. */
797 new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
798 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
799 if (new_size == size)
800 return type;
801
802 align = new_size & -new_size;
803 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
804 }
805
806 TYPE_USER_ALIGN (new_type) = 1;
807
808 /* Now copy the fields, keeping the position and size as we don't want
809 to change the layout by propagating the packedness downwards. */
810 for (old_field = TYPE_FIELDS (type); old_field;
811 old_field = DECL_CHAIN (old_field))
812 {
813 tree new_field_type = TREE_TYPE (old_field);
814 tree new_field, new_size;
815
816 if (RECORD_OR_UNION_TYPE_P (new_field_type)
817 && !TYPE_FAT_POINTER_P (new_field_type)
818 && host_integerp (TYPE_SIZE (new_field_type), 1))
819 new_field_type = make_packable_type (new_field_type, true);
820
821 /* However, for the last field in a not already packed record type
822 that is of an aggregate type, we need to use the RM size in the
823 packable version of the record type, see finish_record_type. */
824 if (!DECL_CHAIN (old_field)
825 && !TYPE_PACKED (type)
826 && RECORD_OR_UNION_TYPE_P (new_field_type)
827 && !TYPE_FAT_POINTER_P (new_field_type)
828 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
829 && TYPE_ADA_SIZE (new_field_type))
830 new_size = TYPE_ADA_SIZE (new_field_type);
831 else
832 new_size = DECL_SIZE (old_field);
833
834 new_field
835 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
836 new_size, bit_position (old_field),
837 TYPE_PACKED (type),
838 !DECL_NONADDRESSABLE_P (old_field));
839
840 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
841 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
842 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
843 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
844
845 DECL_CHAIN (new_field) = field_list;
846 field_list = new_field;
847 }
848
849 finish_record_type (new_type, nreverse (field_list), 2, false);
850 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
851 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
852 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
853
854 /* If this is a padding record, we never want to make the size smaller
855 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
856 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
857 {
858 TYPE_SIZE (new_type) = TYPE_SIZE (type);
859 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
860 new_size = size;
861 }
862 else
863 {
864 TYPE_SIZE (new_type) = bitsize_int (new_size);
865 TYPE_SIZE_UNIT (new_type)
866 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
867 }
868
869 if (!TYPE_CONTAINS_TEMPLATE_P (type))
870 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
871
872 compute_record_mode (new_type);
873
874 /* Try harder to get a packable type if necessary, for example
875 in case the record itself contains a BLKmode field. */
876 if (in_record && TYPE_MODE (new_type) == BLKmode)
877 SET_TYPE_MODE (new_type,
878 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
879
880 /* If neither the mode nor the size has shrunk, return the old type. */
881 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
882 return type;
883
884 return new_type;
885}
886
887/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
888 If TYPE is the best type, return it. Otherwise, make a new type. We
889 only support new integral and pointer types. FOR_BIASED is true if
890 we are making a biased type. */
891
892tree
893make_type_from_size (tree type, tree size_tree, bool for_biased)
894{
895 unsigned HOST_WIDE_INT size;
896 bool biased_p;
897 tree new_type;
898
899 /* If size indicates an error, just return TYPE to avoid propagating
900 the error. Likewise if it's too large to represent. */
901 if (!size_tree || !host_integerp (size_tree, 1))
902 return type;
903
904 size = tree_low_cst (size_tree, 1);
905
906 switch (TREE_CODE (type))
907 {
908 case INTEGER_TYPE:
909 case ENUMERAL_TYPE:
910 case BOOLEAN_TYPE:
911 biased_p = (TREE_CODE (type) == INTEGER_TYPE
912 && TYPE_BIASED_REPRESENTATION_P (type));
913
914 /* Integer types with precision 0 are forbidden. */
915 if (size == 0)
916 size = 1;
917
918 /* Only do something if the type isn't a packed array type and doesn't
919 already have the proper size and the size isn't too large. */
920 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
921 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
922 || size > LONG_LONG_TYPE_SIZE)
923 break;
924
925 biased_p |= for_biased;
926 if (TYPE_UNSIGNED (type) || biased_p)
927 new_type = make_unsigned_type (size);
928 else
929 new_type = make_signed_type (size);
930 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
931 SET_TYPE_RM_MIN_VALUE (new_type,
932 convert (TREE_TYPE (new_type),
933 TYPE_MIN_VALUE (type)));
934 SET_TYPE_RM_MAX_VALUE (new_type,
935 convert (TREE_TYPE (new_type),
936 TYPE_MAX_VALUE (type)));
937 /* Copy the name to show that it's essentially the same type and
938 not a subrange type. */
939 TYPE_NAME (new_type) = TYPE_NAME (type);
940 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
941 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
942 return new_type;
943
944 case RECORD_TYPE:
945 /* Do something if this is a fat pointer, in which case we
946 may need to return the thin pointer. */
947 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
948 {
949 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
950 if (!targetm.valid_pointer_mode (p_mode))
951 p_mode = ptr_mode;
952 return
953 build_pointer_type_for_mode
954 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
955 p_mode, 0);
956 }
957 break;
958
959 case POINTER_TYPE:
960 /* Only do something if this is a thin pointer, in which case we
961 may need to return the fat pointer. */
962 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
963 return
964 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
965 break;
966
967 default:
968 break;
969 }
970
971 return type;
972}
973
974/* See if the data pointed to by the hash table slot is marked. */
975
976static int
977pad_type_hash_marked_p (const void *p)
978{
979 const_tree const type = ((const struct pad_type_hash *) p)->type;
980
981 return ggc_marked_p (type);
982}
983
984/* Return the cached hash value. */
985
986static hashval_t
987pad_type_hash_hash (const void *p)
988{
989 return ((const struct pad_type_hash *) p)->hash;
990}
991
992/* Return 1 iff the padded types are equivalent. */
993
994static int
995pad_type_hash_eq (const void *p1, const void *p2)
996{
997 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
998 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
999 tree type1, type2;
1000
1001 if (t1->hash != t2->hash)
1002 return 0;
1003
1004 type1 = t1->type;
1005 type2 = t2->type;
1006
1007 /* We consider that the padded types are equivalent if they pad the same
1008 type and have the same size, alignment and RM size. Taking the mode
1009 into account is redundant since it is determined by the others. */
1010 return
1011 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1012 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1013 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1014 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1015}
1016
1017/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1018 if needed. We have already verified that SIZE and TYPE are large enough.
1019 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1020 IS_COMPONENT_TYPE is true if this is being done for the component type of
1021 an array. IS_USER_TYPE is true if the original type needs to be completed.
1022 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1023 the RM size of the resulting type is to be set to SIZE too. */
1024
1025tree
1026maybe_pad_type (tree type, tree size, unsigned int align,
1027 Entity_Id gnat_entity, bool is_component_type,
1028 bool is_user_type, bool definition, bool set_rm_size)
1029{
1030 tree orig_size = TYPE_SIZE (type);
1031 tree record, field;
1032
1033 /* If TYPE is a padded type, see if it agrees with any size and alignment
1034 we were given. If so, return the original type. Otherwise, strip
1035 off the padding, since we will either be returning the inner type
1036 or repadding it. If no size or alignment is specified, use that of
1037 the original padded type. */
1038 if (TYPE_IS_PADDING_P (type))
1039 {
1040 if ((!size
1041 || operand_equal_p (round_up (size,
1042 MAX (align, TYPE_ALIGN (type))),
1043 round_up (TYPE_SIZE (type),
1044 MAX (align, TYPE_ALIGN (type))),
1045 0))
1046 && (align == 0 || align == TYPE_ALIGN (type)))
1047 return type;
1048
1049 if (!size)
1050 size = TYPE_SIZE (type);
1051 if (align == 0)
1052 align = TYPE_ALIGN (type);
1053
1054 type = TREE_TYPE (TYPE_FIELDS (type));
1055 orig_size = TYPE_SIZE (type);
1056 }
1057
1058 /* If the size is either not being changed or is being made smaller (which
1059 is not done here and is only valid for bitfields anyway), show the size
1060 isn't changing. Likewise, clear the alignment if it isn't being
1061 changed. Then return if we aren't doing anything. */
1062 if (size
1063 && (operand_equal_p (size, orig_size, 0)
1064 || (TREE_CODE (orig_size) == INTEGER_CST
1065 && tree_int_cst_lt (size, orig_size))))
1066 size = NULL_TREE;
1067
1068 if (align == TYPE_ALIGN (type))
1069 align = 0;
1070
1071 if (align == 0 && !size)
1072 return type;
1073
1074 /* If requested, complete the original type and give it a name. */
1075 if (is_user_type)
1076 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1077 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1078 !(TYPE_NAME (type)
1079 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1080 && DECL_IGNORED_P (TYPE_NAME (type))),
1081 gnat_entity);
1082
1083 /* We used to modify the record in place in some cases, but that could
1084 generate incorrect debugging information. So make a new record
1085 type and name. */
1086 record = make_node (RECORD_TYPE);
1087 TYPE_PADDING_P (record) = 1;
1088
1089 if (Present (gnat_entity))
1090 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1091
1092 TYPE_ALIGN (record) = align;
1093 TYPE_SIZE (record) = size ? size : orig_size;
1094 TYPE_SIZE_UNIT (record)
1095 = convert (sizetype,
1096 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1097 bitsize_unit_node));
1098
1099 /* If we are changing the alignment and the input type is a record with
1100 BLKmode and a small constant size, try to make a form that has an
1101 integral mode. This might allow the padding record to also have an
1102 integral mode, which will be much more efficient. There is no point
1103 in doing so if a size is specified unless it is also a small constant
1104 size and it is incorrect to do so if we cannot guarantee that the mode
1105 will be naturally aligned since the field must always be addressable.
1106
1107 ??? This might not always be a win when done for a stand-alone object:
1108 since the nominal and the effective type of the object will now have
1109 different modes, a VIEW_CONVERT_EXPR will be required for converting
1110 between them and it might be hard to overcome afterwards, including
1111 at the RTL level when the stand-alone object is accessed as a whole. */
1112 if (align != 0
1113 && RECORD_OR_UNION_TYPE_P (type)
1114 && TYPE_MODE (type) == BLKmode
1115 && !TYPE_BY_REFERENCE_P (type)
1116 && TREE_CODE (orig_size) == INTEGER_CST
1117 && !TREE_OVERFLOW (orig_size)
1118 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1119 && (!size
1120 || (TREE_CODE (size) == INTEGER_CST
1121 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1122 {
1123 tree packable_type = make_packable_type (type, true);
1124 if (TYPE_MODE (packable_type) != BLKmode
1125 && align >= TYPE_ALIGN (packable_type))
1126 type = packable_type;
1127 }
1128
1129 /* Now create the field with the original size. */
1130 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1131 bitsize_zero_node, 0, 1);
1132 DECL_INTERNAL_P (field) = 1;
1133
1134 /* Do not emit debug info until after the auxiliary record is built. */
1135 finish_record_type (record, field, 1, false);
1136
1137 /* Set the RM size if requested. */
1138 if (set_rm_size)
1139 {
1140 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1141
1142 /* If the padded type is complete and has constant size, we canonicalize
1143 it by means of the hash table. This is consistent with the language
1144 semantics and ensures that gigi and the middle-end have a common view
1145 of these padded types. */
1146 if (TREE_CONSTANT (TYPE_SIZE (record)))
1147 {
1148 hashval_t hashcode;
1149 struct pad_type_hash in, *h;
1150 void **loc;
1151
1152 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1153 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1154 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1155 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1156
1157 in.hash = hashcode;
1158 in.type = record;
1159 h = (struct pad_type_hash *)
1160 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1161 if (h)
1162 {
1163 record = h->type;
1164 goto built;
1165 }
1166
1167 h = ggc_alloc_pad_type_hash ();
1168 h->hash = hashcode;
1169 h->type = record;
1170 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1171 INSERT);
1172 *loc = (void *)h;
1173 }
1174 }
1175
1176 /* Unless debugging information isn't being written for the input type,
1177 write a record that shows what we are a subtype of and also make a
1178 variable that indicates our size, if still variable. */
1179 if (TREE_CODE (orig_size) != INTEGER_CST
1180 && TYPE_NAME (record)
1181 && TYPE_NAME (type)
1182 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1183 && DECL_IGNORED_P (TYPE_NAME (type))))
1184 {
1185 tree marker = make_node (RECORD_TYPE);
1186 tree name = TYPE_NAME (record);
1187 tree orig_name = TYPE_NAME (type);
1188
1189 if (TREE_CODE (name) == TYPE_DECL)
1190 name = DECL_NAME (name);
1191
1192 if (TREE_CODE (orig_name) == TYPE_DECL)
1193 orig_name = DECL_NAME (orig_name);
1194
1195 TYPE_NAME (marker) = concat_name (name, "XVS");
1196 finish_record_type (marker,
1197 create_field_decl (orig_name,
1198 build_reference_type (type),
1199 marker, NULL_TREE, NULL_TREE,
1200 0, 0),
1201 0, true);
1202
1203 add_parallel_type (record, marker);
1204
1205 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1206 TYPE_SIZE_UNIT (marker)
1207 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1208 TYPE_SIZE_UNIT (record), false, false, false,
1209 false, NULL, gnat_entity);
1210 }
1211
1212 rest_of_record_type_compilation (record);
1213
1214built:
1215 /* If the size was widened explicitly, maybe give a warning. Take the
1216 original size as the maximum size of the input if there was an
1217 unconstrained record involved and round it up to the specified alignment,
1218 if one was specified. But don't do it if we are just annotating types
1219 and the type is tagged, since tagged types aren't fully laid out in this
1220 mode. */
1221 if (!size
1222 || TREE_CODE (size) == COND_EXPR
1223 || TREE_CODE (size) == MAX_EXPR
1224 || No (gnat_entity)
1225 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1226 return record;
1227
1228 if (CONTAINS_PLACEHOLDER_P (orig_size))
1229 orig_size = max_size (orig_size, true);
1230
1231 if (align)
1232 orig_size = round_up (orig_size, align);
1233
1234 if (!operand_equal_p (size, orig_size, 0)
1235 && !(TREE_CODE (size) == INTEGER_CST
1236 && TREE_CODE (orig_size) == INTEGER_CST
1237 && (TREE_OVERFLOW (size)
1238 || TREE_OVERFLOW (orig_size)
1239 || tree_int_cst_lt (size, orig_size))))
1240 {
1241 Node_Id gnat_error_node = Empty;
1242
1243 if (Is_Packed_Array_Type (gnat_entity))
1244 gnat_entity = Original_Array_Type (gnat_entity);
1245
1246 if ((Ekind (gnat_entity) == E_Component
1247 || Ekind (gnat_entity) == E_Discriminant)
1248 && Present (Component_Clause (gnat_entity)))
1249 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1250 else if (Present (Size_Clause (gnat_entity)))
1251 gnat_error_node = Expression (Size_Clause (gnat_entity));
1252
1253 /* Generate message only for entities that come from source, since
1254 if we have an entity created by expansion, the message will be
1255 generated for some other corresponding source entity. */
1256 if (Comes_From_Source (gnat_entity))
1257 {
1258 if (Present (gnat_error_node))
1259 post_error_ne_tree ("{^ }bits of & unused?",
1260 gnat_error_node, gnat_entity,
1261 size_diffop (size, orig_size));
1262 else if (is_component_type)
1263 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1264 gnat_entity, gnat_entity,
1265 size_diffop (size, orig_size));
1266 }
1267 }
1268
1269 return record;
1270}
1271\f
1272/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1273 If this is a multi-dimensional array type, do this recursively.
1274
1275 OP may be
1276 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1277 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1278 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1279
1280void
1281relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1282{
1283 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1284 of a one-dimensional array, since the padding has the same alias set
1285 as the field type, but if it's a multi-dimensional array, we need to
1286 see the inner types. */
1287 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1288 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1289 || TYPE_PADDING_P (gnu_old_type)))
1290 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1291
1292 /* Unconstrained array types are deemed incomplete and would thus be given
1293 alias set 0. Retrieve the underlying array type. */
1294 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1295 gnu_old_type
1296 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1297 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1298 gnu_new_type
1299 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1300
1301 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1302 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1303 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1304 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1305
1306 switch (op)
1307 {
1308 case ALIAS_SET_COPY:
1309 /* The alias set shouldn't be copied between array types with different
1310 aliasing settings because this can break the aliasing relationship
1311 between the array type and its element type. */
1312#ifndef ENABLE_CHECKING
1313 if (flag_strict_aliasing)
1314#endif
1315 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1316 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1317 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1318 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1319
1320 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1321 break;
1322
1323 case ALIAS_SET_SUBSET:
1324 case ALIAS_SET_SUPERSET:
1325 {
1326 alias_set_type old_set = get_alias_set (gnu_old_type);
1327 alias_set_type new_set = get_alias_set (gnu_new_type);
1328
1329 /* Do nothing if the alias sets conflict. This ensures that we
1330 never call record_alias_subset several times for the same pair
1331 or at all for alias set 0. */
1332 if (!alias_sets_conflict_p (old_set, new_set))
1333 {
1334 if (op == ALIAS_SET_SUBSET)
1335 record_alias_subset (old_set, new_set);
1336 else
1337 record_alias_subset (new_set, old_set);
1338 }
1339 }
1340 break;
1341
1342 default:
1343 gcc_unreachable ();
1344 }
1345
1346 record_component_aliases (gnu_new_type);
1347}
1348\f
1aeb40dd
EB
1349/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1350 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
a1ab4c31
AC
1351
1352void
1aeb40dd 1353record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1354{
c172df28
AH
1355 tree type_decl = build_decl (input_location,
1356 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1357 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1358 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1359 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1360
10069d53
EB
1361 if (debug_hooks->type_decl)
1362 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1363}
1364\f
e3edbd56
EB
1365/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1366 finish constructing the record type as a fat pointer type. */
1367
1368void
1369finish_fat_pointer_type (tree record_type, tree field_list)
1370{
1371 /* Make sure we can put it into a register. */
5da8c011
EB
1372 if (STRICT_ALIGNMENT)
1373 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1374
1375 /* Show what it really is. */
1376 TYPE_FAT_POINTER_P (record_type) = 1;
1377
1378 /* Do not emit debug info for it since the types of its fields may still be
1379 incomplete at this point. */
1380 finish_record_type (record_type, field_list, 0, false);
1381
1382 /* Force type_contains_placeholder_p to return true on it. Although the
1383 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1384 type but the representation of the unconstrained array. */
1385 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1386}
1387
032d1b71 1388/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1389 finish constructing the record or union type. If REP_LEVEL is zero, this
1390 record has no representation clause and so will be entirely laid out here.
1391 If REP_LEVEL is one, this record has a representation clause and has been
1392 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1393 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1394 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1395 we need to write debug information about this type. */
a1ab4c31
AC
1396
1397void
032d1b71
EB
1398finish_record_type (tree record_type, tree field_list, int rep_level,
1399 bool debug_info_p)
a1ab4c31
AC
1400{
1401 enum tree_code code = TREE_CODE (record_type);
1402 tree name = TYPE_NAME (record_type);
1403 tree ada_size = bitsize_zero_node;
1404 tree size = bitsize_zero_node;
1405 bool had_size = TYPE_SIZE (record_type) != 0;
1406 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1407 bool had_align = TYPE_ALIGN (record_type) != 0;
1408 tree field;
1409
032d1b71 1410 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1411
10069d53
EB
1412 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1413 generate debug info and have a parallel type. */
1414 if (name && TREE_CODE (name) == TYPE_DECL)
1415 name = DECL_NAME (name);
1416 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1417
1418 /* Globally initialize the record first. If this is a rep'ed record,
1419 that just means some initializations; otherwise, layout the record. */
1420 if (rep_level > 0)
1421 {
1422 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1423
1424 if (!had_size_unit)
1425 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1426
a1ab4c31
AC
1427 if (!had_size)
1428 TYPE_SIZE (record_type) = bitsize_zero_node;
1429
1430 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1431 out just like a UNION_TYPE, since the size will be fixed. */
1432 else if (code == QUAL_UNION_TYPE)
1433 code = UNION_TYPE;
1434 }
1435 else
1436 {
1437 /* Ensure there isn't a size already set. There can be in an error
1438 case where there is a rep clause but all fields have errors and
1439 no longer have a position. */
1440 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1441
1442 /* Ensure we use the traditional GCC layout for bitfields when we need
1443 to pack the record type or have a representation clause. The other
1444 possible layout (Microsoft C compiler), if available, would prevent
1445 efficient packing in almost all cases. */
1446#ifdef TARGET_MS_BITFIELD_LAYOUT
1447 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1448 decl_attributes (&record_type,
1449 tree_cons (get_identifier ("gcc_struct"),
1450 NULL_TREE, NULL_TREE),
1451 ATTR_FLAG_TYPE_IN_PLACE);
1452#endif
1453
a1ab4c31
AC
1454 layout_type (record_type);
1455 }
1456
1457 /* At this point, the position and size of each field is known. It was
1458 either set before entry by a rep clause, or by laying out the type above.
1459
1460 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1461 to compute the Ada size; the GCC size and alignment (for rep'ed records
1462 that are not padding types); and the mode (for rep'ed records). We also
1463 clear the DECL_BIT_FIELD indication for the cases we know have not been
1464 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1465
1466 if (code == QUAL_UNION_TYPE)
032d1b71 1467 field_list = nreverse (field_list);
a1ab4c31 1468
910ad8de 1469 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1470 {
1471 tree type = TREE_TYPE (field);
1472 tree pos = bit_position (field);
1473 tree this_size = DECL_SIZE (field);
1474 tree this_ada_size;
1475
e1e5852c 1476 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1477 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1478 && !TYPE_CONTAINS_TEMPLATE_P (type)
1479 && TYPE_ADA_SIZE (type))
1480 this_ada_size = TYPE_ADA_SIZE (type);
1481 else
1482 this_ada_size = this_size;
1483
1484 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1485 if (DECL_BIT_FIELD (field)
1486 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1487 {
1488 unsigned int align = TYPE_ALIGN (type);
1489
1490 /* In the general case, type alignment is required. */
1491 if (value_factor_p (pos, align))
1492 {
1493 /* The enclosing record type must be sufficiently aligned.
1494 Otherwise, if no alignment was specified for it and it
1495 has been laid out already, bump its alignment to the
1496 desired one if this is compatible with its size. */
1497 if (TYPE_ALIGN (record_type) >= align)
1498 {
1499 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1500 DECL_BIT_FIELD (field) = 0;
1501 }
1502 else if (!had_align
1503 && rep_level == 0
1504 && value_factor_p (TYPE_SIZE (record_type), align))
1505 {
1506 TYPE_ALIGN (record_type) = align;
1507 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1508 DECL_BIT_FIELD (field) = 0;
1509 }
1510 }
1511
1512 /* In the non-strict alignment case, only byte alignment is. */
1513 if (!STRICT_ALIGNMENT
1514 && DECL_BIT_FIELD (field)
1515 && value_factor_p (pos, BITS_PER_UNIT))
1516 DECL_BIT_FIELD (field) = 0;
1517 }
1518
c1abd261
EB
1519 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1520 field is technically not addressable. Except that it can actually
1521 be addressed if it is BLKmode and happens to be properly aligned. */
1522 if (DECL_BIT_FIELD (field)
1523 && !(DECL_MODE (field) == BLKmode
1524 && value_factor_p (pos, BITS_PER_UNIT)))
1525 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1526
1527 /* A type must be as aligned as its most aligned field that is not
1528 a bit-field. But this is already enforced by layout_type. */
1529 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1530 TYPE_ALIGN (record_type)
1531 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1532
1533 switch (code)
1534 {
1535 case UNION_TYPE:
1536 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1537 size = size_binop (MAX_EXPR, size, this_size);
1538 break;
1539
1540 case QUAL_UNION_TYPE:
1541 ada_size
1542 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1543 this_ada_size, ada_size);
1544 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1545 this_size, size);
1546 break;
1547
1548 case RECORD_TYPE:
1549 /* Since we know here that all fields are sorted in order of
1550 increasing bit position, the size of the record is one
1551 higher than the ending bit of the last field processed
1552 unless we have a rep clause, since in that case we might
1553 have a field outside a QUAL_UNION_TYPE that has a higher ending
1554 position. So use a MAX in that case. Also, if this field is a
1555 QUAL_UNION_TYPE, we need to take into account the previous size in
1556 the case of empty variants. */
1557 ada_size
1558 = merge_sizes (ada_size, pos, this_ada_size,
1559 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1560 size
1561 = merge_sizes (size, pos, this_size,
1562 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1563 break;
1564
1565 default:
1566 gcc_unreachable ();
1567 }
1568 }
1569
1570 if (code == QUAL_UNION_TYPE)
032d1b71 1571 nreverse (field_list);
a1ab4c31
AC
1572
1573 if (rep_level < 2)
1574 {
1575 /* If this is a padding record, we never want to make the size smaller
1576 than what was specified in it, if any. */
315cff15 1577 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1578 size = TYPE_SIZE (record_type);
1579
1580 /* Now set any of the values we've just computed that apply. */
315cff15 1581 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1582 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1583 SET_TYPE_ADA_SIZE (record_type, ada_size);
1584
1585 if (rep_level > 0)
1586 {
1587 tree size_unit = had_size_unit
1588 ? TYPE_SIZE_UNIT (record_type)
1589 : convert (sizetype,
1590 size_binop (CEIL_DIV_EXPR, size,
1591 bitsize_unit_node));
1592 unsigned int align = TYPE_ALIGN (record_type);
1593
1594 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1595 TYPE_SIZE_UNIT (record_type)
1596 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1597
1598 compute_record_mode (record_type);
1599 }
1600 }
1601
032d1b71 1602 if (debug_info_p)
a1ab4c31
AC
1603 rest_of_record_type_compilation (record_type);
1604}
1605
a5695aa2
EB
1606/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1607
1608void
1609add_parallel_type (tree type, tree parallel_type)
1610{
1611 tree decl = TYPE_STUB_DECL (type);
1612
1613 while (DECL_PARALLEL_TYPE (decl))
1614 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1615
1616 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1617}
1618
1619/* Return true if TYPE has a parallel type. */
1620
1621static bool
1622has_parallel_type (tree type)
1623{
1624 tree decl = TYPE_STUB_DECL (type);
1625
1626 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1627}
1628
032d1b71
EB
1629/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1630 associated with it. It need not be invoked directly in most cases since
1631 finish_record_type takes care of doing so, but this can be necessary if
1632 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1633
1634void
1635rest_of_record_type_compilation (tree record_type)
1636{
a1ab4c31 1637 bool var_size = false;
fb88e1dd 1638 tree field;
a1ab4c31 1639
fb88e1dd
EB
1640 /* If this is a padded type, the bulk of the debug info has already been
1641 generated for the field's type. */
1642 if (TYPE_IS_PADDING_P (record_type))
1643 return;
1644
a5695aa2
EB
1645 /* If the type already has a parallel type (XVS type), then we're done. */
1646 if (has_parallel_type (record_type))
1647 return;
1648
fb88e1dd 1649 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1650 {
1651 /* We need to make an XVE/XVU record if any field has variable size,
1652 whether or not the record does. For example, if we have a union,
1653 it may be that all fields, rounded up to the alignment, have the
1654 same size, in which case we'll use that size. But the debug
1655 output routines (except Dwarf2) won't be able to output the fields,
1656 so we need to make the special record. */
1657 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1658 /* If a field has a non-constant qualifier, the record will have
1659 variable size too. */
fb88e1dd 1660 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1661 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1662 {
1663 var_size = true;
1664 break;
1665 }
1666 }
1667
fb88e1dd
EB
1668 /* If this record type is of variable size, make a parallel record type that
1669 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1670 if (var_size)
a1ab4c31
AC
1671 {
1672 tree new_record_type
1673 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1674 ? UNION_TYPE : TREE_CODE (record_type));
0fb2335d 1675 tree orig_name = TYPE_NAME (record_type), new_name;
a1ab4c31 1676 tree last_pos = bitsize_zero_node;
0fb2335d 1677 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1678
0fb2335d
EB
1679 if (TREE_CODE (orig_name) == TYPE_DECL)
1680 orig_name = DECL_NAME (orig_name);
1681
1682 new_name
1683 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1684 ? "XVU" : "XVE");
1685 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1686 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1687 TYPE_STUB_DECL (new_record_type)
0fb2335d 1688 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1689 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1690 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1691 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1692 TYPE_SIZE_UNIT (new_record_type)
1693 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1694
a1ab4c31
AC
1695 /* Now scan all the fields, replacing each field with a new
1696 field corresponding to the new encoding. */
1697 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1698 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1699 {
1700 tree field_type = TREE_TYPE (old_field);
1701 tree field_name = DECL_NAME (old_field);
1702 tree new_field;
1703 tree curpos = bit_position (old_field);
1704 bool var = false;
1705 unsigned int align = 0;
1706 tree pos;
1707
1708 /* See how the position was modified from the last position.
1709
1710 There are two basic cases we support: a value was added
1711 to the last position or the last position was rounded to
1712 a boundary and they something was added. Check for the
1713 first case first. If not, see if there is any evidence
1714 of rounding. If so, round the last position and try
1715 again.
1716
1717 If this is a union, the position can be taken as zero. */
1718
1719 /* Some computations depend on the shape of the position expression,
1720 so strip conversions to make sure it's exposed. */
1721 curpos = remove_conversions (curpos, true);
1722
1723 if (TREE_CODE (new_record_type) == UNION_TYPE)
1724 pos = bitsize_zero_node, align = 0;
1725 else
1726 pos = compute_related_constant (curpos, last_pos);
1727
1728 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1729 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1730 {
1731 tree offset = TREE_OPERAND (curpos, 0);
1732 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1733
f58e663b
EB
1734 /* An offset which is a bitwise AND with a mask increases the
1735 alignment according to the number of trailing zeros. */
a1ab4c31
AC
1736 offset = remove_conversions (offset, true);
1737 if (TREE_CODE (offset) == BIT_AND_EXPR
f58e663b 1738 && TREE_CODE (TREE_OPERAND (offset, 1)) == INTEGER_CST)
a1ab4c31 1739 {
f58e663b
EB
1740 unsigned HOST_WIDE_INT mask
1741 = TREE_INT_CST_LOW (TREE_OPERAND (offset, 1));
1742 unsigned int i;
1743
1744 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
1745 {
1746 if (mask & 1)
1747 break;
1748 mask >>= 1;
1749 align *= 2;
1750 }
a1ab4c31
AC
1751 }
1752
1753 pos = compute_related_constant (curpos,
1754 round_up (last_pos, align));
1755 }
1756 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1757 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1758 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1759 && host_integerp (TREE_OPERAND
1760 (TREE_OPERAND (curpos, 0), 1),
1761 1))
1762 {
1763 align
1764 = tree_low_cst
1765 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1766 pos = compute_related_constant (curpos,
1767 round_up (last_pos, align));
1768 }
1769 else if (potential_alignment_gap (prev_old_field, old_field,
1770 pos))
1771 {
1772 align = TYPE_ALIGN (field_type);
1773 pos = compute_related_constant (curpos,
1774 round_up (last_pos, align));
1775 }
1776
1777 /* If we can't compute a position, set it to zero.
1778
1779 ??? We really should abort here, but it's too much work
1780 to get this correct for all cases. */
1781
1782 if (!pos)
1783 pos = bitsize_zero_node;
1784
1785 /* See if this type is variable-sized and make a pointer type
1786 and indicate the indirection if so. Beware that the debug
1787 back-end may adjust the position computed above according
1788 to the alignment of the field type, i.e. the pointer type
1789 in this case, if we don't preventively counter that. */
1790 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1791 {
1792 field_type = build_pointer_type (field_type);
1793 if (align != 0 && TYPE_ALIGN (field_type) > align)
1794 {
1795 field_type = copy_node (field_type);
1796 TYPE_ALIGN (field_type) = align;
1797 }
1798 var = true;
1799 }
1800
1801 /* Make a new field name, if necessary. */
1802 if (var || align != 0)
1803 {
1804 char suffix[16];
1805
1806 if (align != 0)
1807 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1808 align / BITS_PER_UNIT);
1809 else
1810 strcpy (suffix, "XVL");
1811
0fb2335d 1812 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
1813 }
1814
da01bfee
EB
1815 new_field
1816 = create_field_decl (field_name, field_type, new_record_type,
1817 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 1818 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
1819 TYPE_FIELDS (new_record_type) = new_field;
1820
1821 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1822 zero. The only time it's not the last field of the record
1823 is when there are other components at fixed positions after
1824 it (meaning there was a rep clause for every field) and we
1825 want to be able to encode them. */
1826 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1827 (TREE_CODE (TREE_TYPE (old_field))
1828 == QUAL_UNION_TYPE)
1829 ? bitsize_zero_node
1830 : DECL_SIZE (old_field));
1831 prev_old_field = old_field;
1832 }
1833
fb88e1dd 1834 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 1835
a5695aa2 1836 add_parallel_type (record_type, new_record_type);
a1ab4c31 1837 }
a1ab4c31
AC
1838}
1839
a1ab4c31 1840/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
1841 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1842 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1843 replace a value of zero with the old size. If HAS_REP is true, we take the
1844 MAX of the end position of this field with LAST_SIZE. In all other cases,
1845 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1846
1847static tree
1848merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1849 bool has_rep)
1850{
1851 tree type = TREE_TYPE (last_size);
c6bd4220 1852 tree new_size;
a1ab4c31
AC
1853
1854 if (!special || TREE_CODE (size) != COND_EXPR)
1855 {
c6bd4220 1856 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 1857 if (has_rep)
c6bd4220 1858 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
1859 }
1860
1861 else
c6bd4220
EB
1862 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1863 integer_zerop (TREE_OPERAND (size, 1))
1864 ? last_size : merge_sizes (last_size, first_bit,
1865 TREE_OPERAND (size, 1),
1866 1, has_rep),
1867 integer_zerop (TREE_OPERAND (size, 2))
1868 ? last_size : merge_sizes (last_size, first_bit,
1869 TREE_OPERAND (size, 2),
1870 1, has_rep));
a1ab4c31
AC
1871
1872 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1873 when fed through substitute_in_expr) into thinking that a constant
1874 size is not constant. */
c6bd4220
EB
1875 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1876 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 1877
c6bd4220 1878 return new_size;
a1ab4c31
AC
1879}
1880
1881/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1882 related by the addition of a constant. Return that constant if so. */
1883
1884static tree
1885compute_related_constant (tree op0, tree op1)
1886{
1887 tree op0_var, op1_var;
1888 tree op0_con = split_plus (op0, &op0_var);
1889 tree op1_con = split_plus (op1, &op1_var);
1890 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1891
1892 if (operand_equal_p (op0_var, op1_var, 0))
1893 return result;
1894 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1895 return result;
1896 else
1897 return 0;
1898}
1899
1900/* Utility function of above to split a tree OP which may be a sum, into a
1901 constant part, which is returned, and a variable part, which is stored
1902 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1903 bitsizetype. */
1904
1905static tree
1906split_plus (tree in, tree *pvar)
1907{
722356ce
EB
1908 /* Strip conversions in order to ease the tree traversal and maximize the
1909 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
1910 to always return and set *pvar to bitsizetype trees, but it's worth
1911 the effort. */
722356ce 1912 in = remove_conversions (in, false);
a1ab4c31
AC
1913
1914 *pvar = convert (bitsizetype, in);
1915
1916 if (TREE_CODE (in) == INTEGER_CST)
1917 {
1918 *pvar = bitsize_zero_node;
1919 return convert (bitsizetype, in);
1920 }
1921 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1922 {
1923 tree lhs_var, rhs_var;
1924 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1925 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1926
1927 if (lhs_var == TREE_OPERAND (in, 0)
1928 && rhs_var == TREE_OPERAND (in, 1))
1929 return bitsize_zero_node;
1930
1931 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1932 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1933 }
1934 else
1935 return bitsize_zero_node;
1936}
1937\f
d47d0a8d
EB
1938/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1939 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1940 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1941 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1942 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1943 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1944 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1945 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1946 invisible reference. */
a1ab4c31
AC
1947
1948tree
1949create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
1950 bool return_unconstrained_p, bool return_by_direct_ref_p,
1951 bool return_by_invisi_ref_p)
a1ab4c31 1952{
e5b00edf
NF
1953 /* A list of the data type nodes of the subprogram formal parameters.
1954 This list is generated by traversing the input list of PARM_DECL
1955 nodes. */
9771b263 1956 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 1957 tree t, type;
a1ab4c31 1958
910ad8de 1959 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 1960 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 1961
e5b00edf 1962 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 1963
d47d0a8d
EB
1964 /* TYPE may have been shared since GCC hashes types. If it has a different
1965 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
1966 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1967 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
1968 {
1969 type = copy_type (type);
1970 TYPE_CI_CO_LIST (type) = cico_list;
1971 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1972 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1973 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1974 }
a1ab4c31 1975
a1ab4c31
AC
1976 return type;
1977}
1978\f
1979/* Return a copy of TYPE but safe to modify in any way. */
1980
1981tree
1982copy_type (tree type)
1983{
c6bd4220 1984 tree new_type = copy_node (type);
a1ab4c31 1985
90dcfecb
EB
1986 /* Unshare the language-specific data. */
1987 if (TYPE_LANG_SPECIFIC (type))
1988 {
1989 TYPE_LANG_SPECIFIC (new_type) = NULL;
1990 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1991 }
1992
1993 /* And the contents of the language-specific slot if needed. */
1994 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1995 && TYPE_RM_VALUES (type))
1996 {
1997 TYPE_RM_VALUES (new_type) = NULL_TREE;
1998 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1999 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2000 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2001 }
2002
a1ab4c31
AC
2003 /* copy_node clears this field instead of copying it, because it is
2004 aliased with TREE_CHAIN. */
c6bd4220 2005 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2006
c6bd4220
EB
2007 TYPE_POINTER_TO (new_type) = 0;
2008 TYPE_REFERENCE_TO (new_type) = 0;
2009 TYPE_MAIN_VARIANT (new_type) = new_type;
2010 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 2011
c6bd4220 2012 return new_type;
a1ab4c31
AC
2013}
2014\f
c1abd261
EB
2015/* Return a subtype of sizetype with range MIN to MAX and whose
2016 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2017 of the associated TYPE_DECL. */
a1ab4c31
AC
2018
2019tree
2020create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2021{
2022 /* First build a type for the desired range. */
523e82a7 2023 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2024
523e82a7 2025 /* Then set the index type. */
a1ab4c31 2026 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2027 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2028
a1ab4c31
AC
2029 return type;
2030}
84fb43a1
EB
2031
2032/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2033 sizetype is used. */
2034
2035tree
2036create_range_type (tree type, tree min, tree max)
2037{
2038 tree range_type;
2039
2040 if (type == NULL_TREE)
2041 type = sizetype;
2042
2043 /* First build a type with the base range. */
523e82a7
EB
2044 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2045 TYPE_MAX_VALUE (type));
84fb43a1
EB
2046
2047 /* Then set the actual range. */
523e82a7
EB
2048 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2049 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
84fb43a1
EB
2050
2051 return range_type;
2052}
a1ab4c31 2053\f
10069d53
EB
2054/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2055 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2056 its data type. */
2057
2058tree
2059create_type_stub_decl (tree type_name, tree type)
2060{
2061 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2062 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2063 emitted in DWARF. */
c172df28
AH
2064 tree type_decl = build_decl (input_location,
2065 TYPE_DECL, type_name, type);
10069d53 2066 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2067 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2068 return type_decl;
2069}
2070
2071/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2072 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2073 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2074 true if we need to write debug information about this type. GNAT_NODE
2075 is used for the position of the decl. */
a1ab4c31
AC
2076
2077tree
74746d49
EB
2078create_type_decl (tree type_name, tree type, bool artificial_p,
2079 bool debug_info_p, Node_Id gnat_node)
a1ab4c31 2080{
a1ab4c31 2081 enum tree_code code = TREE_CODE (type);
10069d53
EB
2082 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2083 tree type_decl;
a1ab4c31 2084
10069d53
EB
2085 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2086 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2087
10069d53
EB
2088 /* If the type hasn't been named yet, we're naming it; preserve an existing
2089 TYPE_STUB_DECL that has been attached to it for some purpose. */
2090 if (!named && TYPE_STUB_DECL (type))
2091 {
2092 type_decl = TYPE_STUB_DECL (type);
2093 DECL_NAME (type_decl) = type_name;
2094 }
2095 else
74746d49 2096 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
a1ab4c31 2097
10069d53 2098 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2099 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2100
2101 /* Add this decl to the current binding level. */
10069d53 2102 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2103
10069d53
EB
2104 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2105 This causes the name to be also viewed as a "tag" by the debug
2106 back-end, with the advantage that no DW_TAG_typedef is emitted
2107 for artificial "tagged" types in DWARF. */
2108 if (!named)
2109 TYPE_STUB_DECL (type) = type_decl;
2110
50741117
EB
2111 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2112 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2113 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2114 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2115
2116 return type_decl;
2117}
10069d53 2118\f
a1ab4c31
AC
2119/* Return a VAR_DECL or CONST_DECL node.
2120
2121 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2122 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2123 the GCC tree for an optional initial expression; NULL_TREE if none.
2124
2125 CONST_FLAG is true if this variable is constant, in which case we might
2126 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2127
2128 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2129 definition to be made visible outside of the current compilation unit, for
2130 instance variable definitions in a package specification.
2131
1e17ef87 2132 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2133 opposed to a definition: no storage is to be allocated for the variable).
2134
2135 STATIC_FLAG is only relevant when not at top level. In that case
2136 it indicates whether to always allocate storage to the variable.
2137
2138 GNAT_NODE is used for the position of the decl. */
2139
2140tree
2141create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2142 bool const_flag, bool public_flag, bool extern_flag,
2143 bool static_flag, bool const_decl_allowed_p,
2144 struct attrib *attr_list, Node_Id gnat_node)
2145{
50179d58
EB
2146 /* Whether the initializer is a constant initializer. At the global level
2147 or for an external object or an object to be allocated in static memory,
2148 we check that it is a valid constant expression for use in initializing
2149 a static variable; otherwise, we only check that it is constant. */
a1ab4c31
AC
2150 bool init_const
2151 = (var_init != 0
2152 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
50179d58 2153 && (global_bindings_p () || extern_flag || static_flag
a1ab4c31
AC
2154 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2155 : TREE_CONSTANT (var_init)));
2156
2157 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2158 case the initializer may be used in-lieu of the DECL node (as done in
2159 Identifier_to_gnu). This is useful to prevent the need of elaboration
2160 code when an identifier for which such a decl is made is in turn used as
2161 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2162 but extra constraints apply to this choice (see below) and are not
2163 relevant to the distinction we wish to make. */
2164 bool constant_p = const_flag && init_const;
2165
2166 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2167 and may be used for scalars in general but not for aggregates. */
2168 tree var_decl
c172df28
AH
2169 = build_decl (input_location,
2170 (constant_p && const_decl_allowed_p
a1ab4c31
AC
2171 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2172 var_name, type);
2173
2174 /* If this is external, throw away any initializations (they will be done
2175 elsewhere) unless this is a constant for which we would like to remain
2176 able to get the initializer. If we are defining a global here, leave a
2177 constant initialization and save any variable elaborations for the
2178 elaboration routine. If we are just annotating types, throw away the
2179 initialization if it isn't a constant. */
2180 if ((extern_flag && !constant_p)
2181 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2182 var_init = NULL_TREE;
2183
2184 /* At the global level, an initializer requiring code to be generated
2185 produces elaboration statements. Check that such statements are allowed,
2186 that is, not violating a No_Elaboration_Code restriction. */
3b9e8343 2187 if (global_bindings_p () && var_init != 0 && !init_const)
a1ab4c31 2188 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2189
8b7b0c36
JH
2190 DECL_INITIAL (var_decl) = var_init;
2191 TREE_READONLY (var_decl) = const_flag;
2192 DECL_EXTERNAL (var_decl) = extern_flag;
2193 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2194 TREE_CONSTANT (var_decl) = constant_p;
2195 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2196 = TYPE_VOLATILE (type);
a1ab4c31
AC
2197
2198 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2199 try to fiddle with DECL_COMMON. However, on platforms that don't
2200 support global BSS sections, uninitialized global variables would
2201 go in DATA instead, thus increasing the size of the executable. */
2202 if (!flag_no_common
2203 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2204 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2205 && !have_global_bss_p ())
2206 DECL_COMMON (var_decl) = 1;
a1ab4c31 2207
2231f17f
EB
2208 /* At the global binding level, we need to allocate static storage for the
2209 variable if it isn't external. Otherwise, we allocate automatic storage
2210 unless requested not to. */
a1ab4c31 2211 TREE_STATIC (var_decl)
2231f17f 2212 = !extern_flag && (static_flag || global_bindings_p ());
a1ab4c31 2213
5225a138
EB
2214 /* For an external constant whose initializer is not absolute, do not emit
2215 debug info. In DWARF this would mean a global relocation in a read-only
c01fe451 2216 section which runs afoul of the PE-COFF run-time relocation mechanism. */
5225a138
EB
2217 if (extern_flag
2218 && constant_p
50179d58 2219 && var_init
5225a138
EB
2220 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2221 != null_pointer_node)
2222 DECL_IGNORED_P (var_decl) = 1;
2223
a1ab4c31
AC
2224 if (TREE_SIDE_EFFECTS (var_decl))
2225 TREE_ADDRESSABLE (var_decl) = 1;
2226
74746d49
EB
2227 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2228 if (TREE_CODE (var_decl) == VAR_DECL)
2229 process_attributes (&var_decl, &attr_list, true, gnat_node);
2230
2231 /* Add this decl to the current binding level. */
2232 gnat_pushdecl (var_decl, gnat_node);
2233
58c8f770 2234 if (TREE_CODE (var_decl) == VAR_DECL)
a1ab4c31 2235 {
58c8f770
EB
2236 if (asm_name)
2237 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
74746d49 2238
a1ab4c31
AC
2239 if (global_bindings_p ())
2240 rest_of_decl_compilation (var_decl, true, 0);
2241 }
a1ab4c31
AC
2242
2243 return var_decl;
2244}
2245\f
2246/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2247
2248static bool
2249aggregate_type_contains_array_p (tree type)
2250{
2251 switch (TREE_CODE (type))
2252 {
2253 case RECORD_TYPE:
2254 case UNION_TYPE:
2255 case QUAL_UNION_TYPE:
2256 {
2257 tree field;
910ad8de 2258 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2259 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2260 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2261 return true;
2262 return false;
2263 }
2264
2265 case ARRAY_TYPE:
2266 return true;
2267
2268 default:
2269 gcc_unreachable ();
2270 }
2271}
2272
62f9f3ce 2273/* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
da01bfee
EB
2274 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2275 nonzero, it is the specified size of the field. If POS is nonzero, it is
2276 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2277 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2278 means we are allowed to take the address of the field; if it is negative,
2279 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2280
2281tree
2282create_field_decl (tree field_name, tree field_type, tree record_type,
da01bfee 2283 tree size, tree pos, int packed, int addressable)
a1ab4c31 2284{
c172df28
AH
2285 tree field_decl = build_decl (input_location,
2286 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
2287
2288 DECL_CONTEXT (field_decl) = record_type;
2289 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2290
2291 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2292 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2293 Likewise for an aggregate without specified position that contains an
2294 array, because in this case slices of variable length of this array
2295 must be handled by GCC and variable-sized objects need to be aligned
2296 to at least a byte boundary. */
2297 if (packed && (TYPE_MODE (field_type) == BLKmode
2298 || (!pos
2299 && AGGREGATE_TYPE_P (field_type)
2300 && aggregate_type_contains_array_p (field_type))))
2301 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2302
2303 /* If a size is specified, use it. Otherwise, if the record type is packed
2304 compute a size to use, which may differ from the object's natural size.
2305 We always set a size in this case to trigger the checks for bitfield
2306 creation below, which is typically required when no position has been
2307 specified. */
2308 if (size)
2309 size = convert (bitsizetype, size);
2310 else if (packed == 1)
2311 {
2312 size = rm_size (field_type);
62f9f3ce
EB
2313 if (TYPE_MODE (field_type) == BLKmode)
2314 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2315 }
2316
2317 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2318 specified for two reasons: first if the size differs from the natural
2319 size. Second, if the alignment is insufficient. There are a number of
2320 ways the latter can be true.
2321
2322 We never make a bitfield if the type of the field has a nonconstant size,
2323 because no such entity requiring bitfield operations should reach here.
2324
2325 We do *preventively* make a bitfield when there might be the need for it
2326 but we don't have all the necessary information to decide, as is the case
2327 of a field with no specified position in a packed record.
2328
2329 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2330 in layout_decl or finish_record_type to clear the bit_field indication if
2331 it is in fact not needed. */
2332 if (addressable >= 0
2333 && size
2334 && TREE_CODE (size) == INTEGER_CST
2335 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2336 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2337 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2338 || packed
2339 || (TYPE_ALIGN (record_type) != 0
2340 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2341 {
2342 DECL_BIT_FIELD (field_decl) = 1;
2343 DECL_SIZE (field_decl) = size;
2344 if (!packed && !pos)
feec4372
EB
2345 {
2346 if (TYPE_ALIGN (record_type) != 0
2347 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2348 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2349 else
2350 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2351 }
a1ab4c31
AC
2352 }
2353
2354 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2355
2356 /* Bump the alignment if need be, either for bitfield/packing purposes or
2357 to satisfy the type requirements if no such consideration applies. When
2358 we get the alignment from the type, indicate if this is from an explicit
2359 user request, which prevents stor-layout from lowering it later on. */
2360 {
d9223014 2361 unsigned int bit_align
a1ab4c31
AC
2362 = (DECL_BIT_FIELD (field_decl) ? 1
2363 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2364
2365 if (bit_align > DECL_ALIGN (field_decl))
2366 DECL_ALIGN (field_decl) = bit_align;
2367 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2368 {
2369 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2370 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2371 }
2372 }
2373
2374 if (pos)
2375 {
2376 /* We need to pass in the alignment the DECL is known to have.
2377 This is the lowest-order bit set in POS, but no more than
2378 the alignment of the record, if one is specified. Note
2379 that an alignment of 0 is taken as infinite. */
2380 unsigned int known_align;
2381
2382 if (host_integerp (pos, 1))
2383 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
2384 else
2385 known_align = BITS_PER_UNIT;
2386
2387 if (TYPE_ALIGN (record_type)
2388 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2389 known_align = TYPE_ALIGN (record_type);
2390
2391 layout_decl (field_decl, known_align);
2392 SET_DECL_OFFSET_ALIGN (field_decl,
2393 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
2394 : BITS_PER_UNIT);
2395 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2396 &DECL_FIELD_BIT_OFFSET (field_decl),
2397 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2398 }
2399
2400 /* In addition to what our caller says, claim the field is addressable if we
2401 know that its type is not suitable.
2402
2403 The field may also be "technically" nonaddressable, meaning that even if
2404 we attempt to take the field's address we will actually get the address
2405 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2406 value we have at this point is not accurate enough, so we don't account
2407 for this here and let finish_record_type decide. */
4c5a0615 2408 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
2409 addressable = 1;
2410
2411 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2412
2413 return field_decl;
2414}
2415\f
a8e05f92
EB
2416/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2417 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2418 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2419
2420tree
2421create_param_decl (tree param_name, tree param_type, bool readonly)
2422{
c172df28
AH
2423 tree param_decl = build_decl (input_location,
2424 PARM_DECL, param_name, param_type);
a1ab4c31 2425
a8e05f92
EB
2426 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2427 can lead to various ABI violations. */
2428 if (targetm.calls.promote_prototypes (NULL_TREE)
2429 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
2430 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2431 {
2432 /* We have to be careful about biased types here. Make a subtype
2433 of integer_type_node with the proper biasing. */
2434 if (TREE_CODE (param_type) == INTEGER_TYPE
2435 && TYPE_BIASED_REPRESENTATION_P (param_type))
2436 {
84fb43a1
EB
2437 tree subtype
2438 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2439 TREE_TYPE (subtype) = integer_type_node;
2440 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
2441 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2442 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 2443 param_type = subtype;
a1ab4c31
AC
2444 }
2445 else
2446 param_type = integer_type_node;
2447 }
2448
2449 DECL_ARG_TYPE (param_decl) = param_type;
2450 TREE_READONLY (param_decl) = readonly;
2451 return param_decl;
2452}
2453\f
74746d49
EB
2454/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2455 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2456 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2457
74746d49
EB
2458void
2459process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2460 Node_Id gnat_node)
a1ab4c31 2461{
74746d49
EB
2462 struct attrib *attr;
2463
2464 for (attr = *attr_list; attr; attr = attr->next)
2465 switch (attr->type)
a1ab4c31
AC
2466 {
2467 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2468 Sloc_to_locus (Sloc (gnat_node), &input_location);
2469 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2470 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2471 break;
2472
2473 case ATTR_LINK_ALIAS:
74746d49 2474 if (!DECL_EXTERNAL (*node))
a1ab4c31 2475 {
74746d49
EB
2476 TREE_STATIC (*node) = 1;
2477 assemble_alias (*node, attr->name);
a1ab4c31
AC
2478 }
2479 break;
2480
2481 case ATTR_WEAK_EXTERNAL:
2482 if (SUPPORTS_WEAK)
74746d49 2483 declare_weak (*node);
a1ab4c31
AC
2484 else
2485 post_error ("?weak declarations not supported on this target",
74746d49 2486 attr->error_point);
a1ab4c31
AC
2487 break;
2488
2489 case ATTR_LINK_SECTION:
677f3fa8 2490 if (targetm_common.have_named_sections)
a1ab4c31 2491 {
74746d49
EB
2492 DECL_SECTION_NAME (*node)
2493 = build_string (IDENTIFIER_LENGTH (attr->name),
2494 IDENTIFIER_POINTER (attr->name));
2495 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2496 }
2497 else
2498 post_error ("?section attributes are not supported for this target",
74746d49 2499 attr->error_point);
a1ab4c31
AC
2500 break;
2501
2502 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2503 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2504 TREE_USED (*node) = 1;
a1ab4c31
AC
2505 break;
2506
2507 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2508 DECL_STATIC_DESTRUCTOR (*node) = 1;
2509 TREE_USED (*node) = 1;
a1ab4c31 2510 break;
40a14772
TG
2511
2512 case ATTR_THREAD_LOCAL_STORAGE:
74746d49
EB
2513 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2514 DECL_COMMON (*node) = 0;
40a14772 2515 break;
a1ab4c31 2516 }
74746d49
EB
2517
2518 *attr_list = NULL;
a1ab4c31
AC
2519}
2520\f
feec4372 2521/* Record DECL as a global renaming pointer. */
a1ab4c31
AC
2522
2523void
2524record_global_renaming_pointer (tree decl)
2525{
15bf7d19 2526 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
9771b263 2527 vec_safe_push (global_renaming_pointers, decl);
a1ab4c31
AC
2528}
2529
2530/* Invalidate the global renaming pointers. */
2531
2532void
2533invalidate_global_renaming_pointers (void)
2534{
2535 unsigned int i;
2536 tree iter;
2537
9771b263
DN
2538 if (global_renaming_pointers == NULL)
2539 return;
2540
2541 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
a1ab4c31
AC
2542 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2543
9771b263 2544 vec_free (global_renaming_pointers);
a1ab4c31
AC
2545}
2546
2547/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2548 a power of 2. */
2549
2550bool
2551value_factor_p (tree value, HOST_WIDE_INT factor)
2552{
2553 if (host_integerp (value, 1))
2554 return tree_low_cst (value, 1) % factor == 0;
2555
2556 if (TREE_CODE (value) == MULT_EXPR)
2557 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2558 || value_factor_p (TREE_OPERAND (value, 1), factor));
2559
2560 return false;
2561}
2562
7d7fcb08 2563/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2564 unless we can prove these 2 fields are laid out in such a way that no gap
2565 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2566 is the distance in bits between the end of PREV_FIELD and the starting
2567 position of CURR_FIELD. It is ignored if null. */
2568
2569static bool
2570potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2571{
2572 /* If this is the first field of the record, there cannot be any gap */
2573 if (!prev_field)
2574 return false;
2575
78df6221 2576 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
2577 time when such a field is not the last field of the record is when
2578 there are other components at fixed positions after it (meaning there
2579 was a rep clause for every field), in which case we don't want the
2580 alignment constraint to override them. */
2581 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2582 return false;
2583
2584 /* If the distance between the end of prev_field and the beginning of
2585 curr_field is constant, then there is a gap if the value of this
2586 constant is not null. */
2587 if (offset && host_integerp (offset, 1))
2588 return !integer_zerop (offset);
2589
2590 /* If the size and position of the previous field are constant,
2591 then check the sum of this size and position. There will be a gap
2592 iff it is not multiple of the current field alignment. */
2593 if (host_integerp (DECL_SIZE (prev_field), 1)
2594 && host_integerp (bit_position (prev_field), 1))
2595 return ((tree_low_cst (bit_position (prev_field), 1)
2596 + tree_low_cst (DECL_SIZE (prev_field), 1))
2597 % DECL_ALIGN (curr_field) != 0);
2598
2599 /* If both the position and size of the previous field are multiples
2600 of the current field alignment, there cannot be any gap. */
2601 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2602 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2603 return false;
2604
2605 /* Fallback, return that there may be a potential gap */
2606 return true;
2607}
2608
88a94e2b
EB
2609/* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2610 of the decl. */
a1ab4c31
AC
2611
2612tree
88a94e2b 2613create_label_decl (tree label_name, Node_Id gnat_node)
a1ab4c31 2614{
88a94e2b
EB
2615 tree label_decl
2616 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
a1ab4c31 2617
88a94e2b
EB
2618 DECL_MODE (label_decl) = VOIDmode;
2619
2620 /* Add this decl to the current binding level. */
2621 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
2622
2623 return label_decl;
2624}
2625\f
7d7fcb08 2626/* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
a1ab4c31
AC
2627 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2628 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
7d76717d 2629 PARM_DECL nodes chained through the DECL_CHAIN field).
a1ab4c31 2630
0e24192c 2631 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
7d7fcb08
EB
2632 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2633 used for the position of the decl. */
a1ab4c31
AC
2634
2635tree
7d7fcb08 2636create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
0e24192c
EB
2637 tree param_decl_list, enum inline_status_t inline_status,
2638 bool public_flag, bool extern_flag, bool artificial_flag,
7d7fcb08 2639 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 2640{
d47d0a8d
EB
2641 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2642 subprog_type);
2643 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2644 TREE_TYPE (subprog_type));
7d7fcb08 2645 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 2646
d84b344a
EB
2647 /* If this is a non-inline function nested inside an inlined external
2648 function, we cannot honor both requests without cloning the nested
2649 function in the current unit since it is private to the other unit.
2650 We could inline the nested function as well but it's probably better
2651 to err on the side of too little inlining. */
0e24192c 2652 if (inline_status != is_enabled
5daed84a 2653 && !public_flag
d84b344a
EB
2654 && current_function_decl
2655 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 2656 && DECL_EXTERNAL (current_function_decl))
d84b344a 2657 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31 2658
7d7fcb08
EB
2659 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2660 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
2661
2662 switch (inline_status)
2663 {
2664 case is_suppressed:
2665 DECL_UNINLINABLE (subprog_decl) = 1;
2666 break;
2667
2668 case is_disabled:
2669 break;
2670
2671 case is_enabled:
2672 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2673 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2674 break;
2675
2676 default:
2677 gcc_unreachable ();
2678 }
7d7fcb08
EB
2679
2680 TREE_PUBLIC (subprog_decl) = public_flag;
2681 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
a1ab4c31
AC
2682 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2683 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
a1ab4c31 2684
d47d0a8d
EB
2685 DECL_ARTIFICIAL (result_decl) = 1;
2686 DECL_IGNORED_P (result_decl) = 1;
2687 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2688 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 2689
a1ab4c31
AC
2690 if (asm_name)
2691 {
2692 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2693
2694 /* The expand_main_function circuitry expects "main_identifier_node" to
2695 designate the DECL_NAME of the 'main' entry point, in turn expected
2696 to be declared as the "main" function literally by default. Ada
2697 program entry points are typically declared with a different name
2698 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 2699 system expectations. Force main_identifier_node in this case. */
a1ab4c31 2700 if (asm_name == main_identifier_node)
cfbb663c 2701 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
2702 }
2703
74746d49
EB
2704 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2705
a1ab4c31
AC
2706 /* Add this decl to the current binding level. */
2707 gnat_pushdecl (subprog_decl, gnat_node);
2708
2709 /* Output the assembler code and/or RTL for the declaration. */
2710 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2711
2712 return subprog_decl;
2713}
2714\f
2715/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2716 body. This routine needs to be invoked before processing the declarations
2717 appearing in the subprogram. */
2718
2719void
2720begin_subprog_body (tree subprog_decl)
2721{
2722 tree param_decl;
2723
a1ab4c31
AC
2724 announce_function (subprog_decl);
2725
0ae44446
JR
2726 /* This function is being defined. */
2727 TREE_STATIC (subprog_decl) = 1;
2728
58c8f770
EB
2729 current_function_decl = subprog_decl;
2730
a1ab4c31
AC
2731 /* Enter a new binding level and show that all the parameters belong to
2732 this function. */
2733 gnat_pushlevel ();
a09d56d8 2734
a1ab4c31 2735 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 2736 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
2737 DECL_CONTEXT (param_decl) = subprog_decl;
2738
2739 make_decl_rtl (subprog_decl);
a1ab4c31
AC
2740}
2741
71196d4e 2742/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
2743
2744void
a406865a 2745end_subprog_body (tree body)
a1ab4c31
AC
2746{
2747 tree fndecl = current_function_decl;
2748
bd9c7fb9 2749 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
2750 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2751 DECL_INITIAL (fndecl) = current_binding_level->block;
2752 gnat_poplevel ();
2753
a1ab4c31
AC
2754 /* Mark the RESULT_DECL as being in this subprogram. */
2755 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2756
a963da4d
EB
2757 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2758 if (TREE_CODE (body) == BIND_EXPR)
2759 {
2760 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2761 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2762 }
2763
a1ab4c31
AC
2764 DECL_SAVED_TREE (fndecl) = body;
2765
228ee426 2766 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
2767}
2768
2769/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 2770
71196d4e
EB
2771void
2772rest_of_subprog_body_compilation (tree subprog_decl)
2773{
a1ab4c31
AC
2774 /* We cannot track the location of errors past this point. */
2775 error_gnat_node = Empty;
2776
2777 /* If we're only annotating types, don't actually compile this function. */
2778 if (type_annotate_only)
2779 return;
2780
a406865a 2781 /* Dump functions before gimplification. */
71196d4e 2782 dump_function (TDI_original, subprog_decl);
a406865a 2783
228ee426 2784 if (!decl_function_context (subprog_decl))
71196d4e 2785 cgraph_finalize_function (subprog_decl, false);
a1ab4c31
AC
2786 else
2787 /* Register this function with cgraph just far enough to get it
2788 added to our parent's nested function list. */
71196d4e 2789 (void) cgraph_get_create_node (subprog_decl);
a1ab4c31
AC
2790}
2791
a1ab4c31
AC
2792tree
2793gnat_builtin_function (tree decl)
2794{
2795 gnat_pushdecl (decl, Empty);
2796 return decl;
2797}
2798
2799/* Return an integer type with the number of bits of precision given by
2800 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2801 it is a signed type. */
2802
2803tree
2804gnat_type_for_size (unsigned precision, int unsignedp)
2805{
2806 tree t;
2807 char type_name[20];
2808
2809 if (precision <= 2 * MAX_BITS_PER_WORD
2810 && signed_and_unsigned_types[precision][unsignedp])
2811 return signed_and_unsigned_types[precision][unsignedp];
2812
2813 if (unsignedp)
2814 t = make_unsigned_type (precision);
2815 else
2816 t = make_signed_type (precision);
2817
2818 if (precision <= 2 * MAX_BITS_PER_WORD)
2819 signed_and_unsigned_types[precision][unsignedp] = t;
2820
2821 if (!TYPE_NAME (t))
2822 {
c3d79c60 2823 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
2824 TYPE_NAME (t) = get_identifier (type_name);
2825 }
2826
2827 return t;
2828}
2829
2830/* Likewise for floating-point types. */
2831
2832static tree
2833float_type_for_precision (int precision, enum machine_mode mode)
2834{
2835 tree t;
2836 char type_name[20];
2837
2838 if (float_types[(int) mode])
2839 return float_types[(int) mode];
2840
2841 float_types[(int) mode] = t = make_node (REAL_TYPE);
2842 TYPE_PRECISION (t) = precision;
2843 layout_type (t);
2844
2845 gcc_assert (TYPE_MODE (t) == mode);
2846 if (!TYPE_NAME (t))
2847 {
2848 sprintf (type_name, "FLOAT_%d", precision);
2849 TYPE_NAME (t) = get_identifier (type_name);
2850 }
2851
2852 return t;
2853}
2854
2855/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2856 an unsigned type; otherwise a signed type is returned. */
2857
2858tree
2859gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2860{
2861 if (mode == BLKmode)
2862 return NULL_TREE;
2799d18c
EB
2863
2864 if (mode == VOIDmode)
a1ab4c31 2865 return void_type_node;
2799d18c
EB
2866
2867 if (COMPLEX_MODE_P (mode))
a1ab4c31 2868 return NULL_TREE;
2799d18c
EB
2869
2870 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 2871 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
2872
2873 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 2874 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
2875
2876 if (VECTOR_MODE_P (mode))
2877 {
2878 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2879 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2880 if (inner_type)
2881 return build_vector_type_for_mode (inner_type, mode);
2882 }
2883
2884 return NULL_TREE;
a1ab4c31
AC
2885}
2886
2887/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2888
2889tree
2890gnat_unsigned_type (tree type_node)
2891{
2892 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2893
2894 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2895 {
2896 type = copy_node (type);
2897 TREE_TYPE (type) = type_node;
2898 }
2899 else if (TREE_TYPE (type_node)
2900 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2901 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2902 {
2903 type = copy_node (type);
2904 TREE_TYPE (type) = TREE_TYPE (type_node);
2905 }
2906
2907 return type;
2908}
2909
2910/* Return the signed version of a TYPE_NODE, a scalar type. */
2911
2912tree
2913gnat_signed_type (tree type_node)
2914{
2915 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2916
2917 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2918 {
2919 type = copy_node (type);
2920 TREE_TYPE (type) = type_node;
2921 }
2922 else if (TREE_TYPE (type_node)
2923 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2924 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2925 {
2926 type = copy_node (type);
2927 TREE_TYPE (type) = TREE_TYPE (type_node);
2928 }
2929
2930 return type;
2931}
2932
2933/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2934 transparently converted to each other. */
2935
2936int
2937gnat_types_compatible_p (tree t1, tree t2)
2938{
2939 enum tree_code code;
2940
2941 /* This is the default criterion. */
2942 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2943 return 1;
2944
2945 /* We only check structural equivalence here. */
2946 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2947 return 0;
2948
7948ae37
OH
2949 /* Vector types are also compatible if they have the same number of subparts
2950 and the same form of (scalar) element type. */
2951 if (code == VECTOR_TYPE
2952 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2953 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2954 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2955 return 1;
2956
cfa0bd19 2957 /* Array types are also compatible if they are constrained and have the same
96bba5e6 2958 domain(s) and the same component type. */
a1ab4c31 2959 if (code == ARRAY_TYPE
0adef32b
JJ
2960 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2961 || (TYPE_DOMAIN (t1)
b4680ca1 2962 && TYPE_DOMAIN (t2)
0adef32b
JJ
2963 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2964 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2965 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 2966 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
2967 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2968 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2969 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
a1ab4c31
AC
2970 return 1;
2971
a1ab4c31
AC
2972 return 0;
2973}
523e82a7 2974
71196d4e
EB
2975/* Return true if EXPR is a useless type conversion. */
2976
2977bool
2978gnat_useless_type_conversion (tree expr)
2979{
2980 if (CONVERT_EXPR_P (expr)
2981 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2982 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2983 return gnat_types_compatible_p (TREE_TYPE (expr),
2984 TREE_TYPE (TREE_OPERAND (expr, 0)));
2985
2986 return false;
2987}
2988
523e82a7
EB
2989/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2990
2991bool
2992fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2993 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2994{
2995 return TYPE_CI_CO_LIST (t) == cico_list
2996 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2997 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2998 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2999}
a1ab4c31
AC
3000\f
3001/* EXP is an expression for the size of an object. If this size contains
3002 discriminant references, replace them with the maximum (if MAX_P) or
3003 minimum (if !MAX_P) possible value of the discriminant. */
3004
3005tree
3006max_size (tree exp, bool max_p)
3007{
3008 enum tree_code code = TREE_CODE (exp);
3009 tree type = TREE_TYPE (exp);
3010
3011 switch (TREE_CODE_CLASS (code))
3012 {
3013 case tcc_declaration:
3014 case tcc_constant:
3015 return exp;
3016
3017 case tcc_vl_exp:
3018 if (code == CALL_EXPR)
3019 {
f82a627c
EB
3020 tree t, *argarray;
3021 int n, i;
3022
3023 t = maybe_inline_call_in_expr (exp);
3024 if (t)
3025 return max_size (t, max_p);
a1ab4c31 3026
f82a627c
EB
3027 n = call_expr_nargs (exp);
3028 gcc_assert (n > 0);
2bb1fc26 3029 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3030 for (i = 0; i < n; i++)
3031 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3032 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3033 }
3034 break;
3035
3036 case tcc_reference:
3037 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3038 modify. Otherwise, we treat it like a variable. */
3039 if (!CONTAINS_PLACEHOLDER_P (exp))
3040 return exp;
3041
3042 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3043 return
3044 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3045
3046 case tcc_comparison:
3047 return max_p ? size_one_node : size_zero_node;
3048
3049 case tcc_unary:
ce3da0d0
EB
3050 if (code == NON_LVALUE_EXPR)
3051 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3052
ce3da0d0
EB
3053 return fold_build1 (code, type,
3054 max_size (TREE_OPERAND (exp, 0),
3055 code == NEGATE_EXPR ? !max_p : max_p));
3056
a1ab4c31 3057 case tcc_binary:
ce3da0d0
EB
3058 {
3059 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3060 tree rhs = max_size (TREE_OPERAND (exp, 1),
3061 code == MINUS_EXPR ? !max_p : max_p);
3062
3063 /* Special-case wanting the maximum value of a MIN_EXPR.
3064 In that case, if one side overflows, return the other. */
3065 if (max_p && code == MIN_EXPR)
3066 {
3067 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3068 return lhs;
3069
3070 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3071 return rhs;
3072 }
3073
3074 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3075 overflowing and the RHS a variable. */
3076 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3077 && TREE_CODE (lhs) == INTEGER_CST
3078 && TREE_OVERFLOW (lhs)
3079 && !TREE_CONSTANT (rhs))
3080 return lhs;
3081
3082 return size_binop (code, lhs, rhs);
3083 }
3084
a1ab4c31
AC
3085 case tcc_expression:
3086 switch (TREE_CODE_LENGTH (code))
3087 {
3088 case 1:
722356ce
EB
3089 if (code == SAVE_EXPR)
3090 return exp;
ce3da0d0
EB
3091
3092 return fold_build1 (code, type,
3093 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3094
3095 case 2:
3096 if (code == COMPOUND_EXPR)
3097 return max_size (TREE_OPERAND (exp, 1), max_p);
3098
ce3da0d0
EB
3099 return fold_build2 (code, type,
3100 max_size (TREE_OPERAND (exp, 0), max_p),
3101 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3102
3103 case 3:
722356ce 3104 if (code == COND_EXPR)
a1ab4c31
AC
3105 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3106 max_size (TREE_OPERAND (exp, 1), max_p),
3107 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3108
3109 default:
3110 break;
a1ab4c31
AC
3111 }
3112
3113 /* Other tree classes cannot happen. */
3114 default:
3115 break;
3116 }
3117
3118 gcc_unreachable ();
3119}
3120\f
3121/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3122 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3123 Return a constructor for the template. */
3124
3125tree
3126build_template (tree template_type, tree array_type, tree expr)
3127{
9771b263 3128 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3129 tree bound_list = NULL_TREE;
3130 tree field;
3131
3132 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3133 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3134 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3135 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3136
3137 if (TREE_CODE (array_type) == ARRAY_TYPE
3138 || (TREE_CODE (array_type) == INTEGER_TYPE
3139 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3140 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3141
3142 /* First make the list for a CONSTRUCTOR for the template. Go down the
3143 field list of the template instead of the type chain because this
3144 array might be an Ada array of arrays and we can't tell where the
3145 nested arrays stop being the underlying object. */
3146
3147 for (field = TYPE_FIELDS (template_type); field;
3148 (bound_list
3149 ? (bound_list = TREE_CHAIN (bound_list))
3150 : (array_type = TREE_TYPE (array_type))),
910ad8de 3151 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3152 {
3153 tree bounds, min, max;
3154
3155 /* If we have a bound list, get the bounds from there. Likewise
3156 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3157 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3158 This will give us a maximum range. */
3159 if (bound_list)
3160 bounds = TREE_VALUE (bound_list);
3161 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3162 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3163 else if (expr && TREE_CODE (expr) == PARM_DECL
3164 && DECL_BY_COMPONENT_PTR_P (expr))
3165 bounds = TREE_TYPE (field);
3166 else
3167 gcc_unreachable ();
3168
3169 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3170 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3171
3172 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3173 substitute it from OBJECT. */
3174 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3175 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3176
0e228dd9 3177 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3178 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3179 }
3180
0e228dd9 3181 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3182}
3183\f
31a5a547
EB
3184/* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3185 being built; the new decl is chained on to the front of the list. */
3186
3187static tree
3188make_descriptor_field (const char *name, tree type, tree rec_type,
3189 tree initial, tree field_list)
3190{
3191 tree field
3192 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3193 NULL_TREE, 0, 0);
3194
3195 DECL_INITIAL (field) = initial;
3196 DECL_CHAIN (field) = field_list;
3197 return field;
3198}
3199
58c8f770
EB
3200/* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3201 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3202 type contains in its DECL_INITIAL the expression to use when a constructor
3203 is made for the type. GNAT_ENTITY is an entity used to print out an error
3204 message if the mechanism cannot be applied to an object of that type and
3205 also for the name. */
a1ab4c31
AC
3206
3207tree
d628c015 3208build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
3209{
3210 tree record_type = make_node (RECORD_TYPE);
31a5a547 3211 tree pointer32_type, pointer64_type;
788e5046 3212 tree field_list = NULL_TREE;
31a5a547
EB
3213 int klass, ndim, i, dtype = 0;
3214 tree inner_type, tem;
a1ab4c31 3215 tree *idx_arr;
a1ab4c31
AC
3216
3217 /* If TYPE is an unconstrained array, use the underlying array type. */
3218 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3219 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3220
3221 /* If this is an array, compute the number of dimensions in the array,
3222 get the index types, and point to the inner type. */
3223 if (TREE_CODE (type) != ARRAY_TYPE)
3224 ndim = 0;
3225 else
3226 for (ndim = 1, inner_type = type;
3227 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3228 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3229 ndim++, inner_type = TREE_TYPE (inner_type))
3230 ;
3231
2bb1fc26 3232 idx_arr = XALLOCAVEC (tree, ndim);
a1ab4c31 3233
d628c015 3234 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
3235 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3236 for (i = ndim - 1, inner_type = type;
3237 i >= 0;
3238 i--, inner_type = TREE_TYPE (inner_type))
3239 idx_arr[i] = TYPE_DOMAIN (inner_type);
3240 else
3241 for (i = 0, inner_type = type;
3242 i < ndim;
3243 i++, inner_type = TREE_TYPE (inner_type))
3244 idx_arr[i] = TYPE_DOMAIN (inner_type);
3245
3246 /* Now get the DTYPE value. */
3247 switch (TREE_CODE (type))
3248 {
3249 case INTEGER_TYPE:
3250 case ENUMERAL_TYPE:
01ddebf2 3251 case BOOLEAN_TYPE:
a1ab4c31
AC
3252 if (TYPE_VAX_FLOATING_POINT_P (type))
3253 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3254 {
3255 case 6:
3256 dtype = 10;
3257 break;
3258 case 9:
3259 dtype = 11;
3260 break;
3261 case 15:
3262 dtype = 27;
3263 break;
3264 }
3265 else
3266 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3267 {
3268 case 8:
3269 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3270 break;
3271 case 16:
3272 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3273 break;
3274 case 32:
3275 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3276 break;
3277 case 64:
3278 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3279 break;
3280 case 128:
3281 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3282 break;
3283 }
3284 break;
3285
3286 case REAL_TYPE:
3287 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3288 break;
3289
3290 case COMPLEX_TYPE:
3291 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3292 && TYPE_VAX_FLOATING_POINT_P (type))
3293 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3294 {
3295 case 6:
3296 dtype = 12;
3297 break;
3298 case 9:
3299 dtype = 13;
3300 break;
3301 case 15:
3302 dtype = 29;
3303 }
3304 else
3305 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3306 break;
3307
3308 case ARRAY_TYPE:
3309 dtype = 14;
3310 break;
3311
3312 default:
3313 break;
3314 }
3315
3316 /* Get the CLASS value. */
3317 switch (mech)
3318 {
3319 case By_Descriptor_A:
d628c015 3320 case By_Short_Descriptor_A:
c6bd4220 3321 klass = 4;
a1ab4c31
AC
3322 break;
3323 case By_Descriptor_NCA:
d628c015 3324 case By_Short_Descriptor_NCA:
c6bd4220 3325 klass = 10;
a1ab4c31
AC
3326 break;
3327 case By_Descriptor_SB:
d628c015 3328 case By_Short_Descriptor_SB:
c6bd4220 3329 klass = 15;
a1ab4c31
AC
3330 break;
3331 case By_Descriptor:
d628c015 3332 case By_Short_Descriptor:
a1ab4c31 3333 case By_Descriptor_S:
d628c015 3334 case By_Short_Descriptor_S:
a1ab4c31 3335 default:
c6bd4220 3336 klass = 1;
a1ab4c31
AC
3337 break;
3338 }
3339
58c8f770
EB
3340 /* Make the type for a descriptor for VMS. The first four fields are the
3341 same for all types. */
82b481ed
EB
3342 field_list
3343 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3344 size_in_bytes ((mech == By_Descriptor_A
3345 || mech == By_Short_Descriptor_A)
3346 ? inner_type : type),
3347 field_list);
3348 field_list
3349 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3350 size_int (dtype), field_list);
3351 field_list
3352 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3353 size_int (klass), field_list);
a1ab4c31 3354
a1ab4c31 3355 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
31a5a547
EB
3356 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3357
3358 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3359 that we cannot build a template call to the CE routine as it would get a
3360 wrong source location; instead we use a second placeholder for it. */
3361 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3362 build0 (PLACEHOLDER_EXPR, type));
3363 tem = build3 (COND_EXPR, pointer32_type,
f542c405
EB
3364 Pmode != SImode
3365 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3366 build_int_cstu (pointer64_type, 0x80000000))
3367 : boolean_false_node,
31a5a547
EB
3368 build0 (PLACEHOLDER_EXPR, void_type_node),
3369 convert (pointer32_type, tem));
a1ab4c31 3370
82b481ed 3371 field_list
31a5a547 3372 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
82b481ed 3373 field_list);
a1ab4c31
AC
3374
3375 switch (mech)
3376 {
3377 case By_Descriptor:
d628c015 3378 case By_Short_Descriptor:
a1ab4c31 3379 case By_Descriptor_S:
d628c015 3380 case By_Short_Descriptor_S:
a1ab4c31
AC
3381 break;
3382
3383 case By_Descriptor_SB:
d628c015 3384 case By_Short_Descriptor_SB:
82b481ed
EB
3385 field_list
3386 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3387 record_type,
3388 (TREE_CODE (type) == ARRAY_TYPE
3389 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3390 : size_zero_node),
3391 field_list);
3392 field_list
3393 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3394 record_type,
3395 (TREE_CODE (type) == ARRAY_TYPE
3396 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3397 : size_zero_node),
3398 field_list);
a1ab4c31
AC
3399 break;
3400
3401 case By_Descriptor_A:
d628c015 3402 case By_Short_Descriptor_A:
a1ab4c31 3403 case By_Descriptor_NCA:
d628c015 3404 case By_Short_Descriptor_NCA:
82b481ed
EB
3405 field_list
3406 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3407 record_type, size_zero_node, field_list);
3408
3409 field_list
3410 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3411 record_type, size_zero_node, field_list);
3412
82b481ed
EB
3413 field_list
3414 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3415 record_type,
3416 size_int ((mech == By_Descriptor_NCA
3417 || mech == By_Short_Descriptor_NCA)
3418 ? 0
3419 /* Set FL_COLUMN, FL_COEFF, and
3420 FL_BOUNDS. */
3421 : (TREE_CODE (type) == ARRAY_TYPE
3422 && TYPE_CONVENTION_FORTRAN_P
3423 (type)
3424 ? 224 : 192)),
3425 field_list);
3426
3427 field_list
3428 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3429 record_type, size_int (ndim), field_list);
3430
3431 field_list
3432 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3433 record_type, size_in_bytes (type),
3434 field_list);
a1ab4c31
AC
3435
3436 /* Now build a pointer to the 0,0,0... element. */
3437 tem = build0 (PLACEHOLDER_EXPR, type);
3438 for (i = 0, inner_type = type; i < ndim;
3439 i++, inner_type = TREE_TYPE (inner_type))
3440 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3441 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3442 NULL_TREE, NULL_TREE);
3443
82b481ed
EB
3444 field_list
3445 = make_descriptor_field ("A0", pointer32_type, record_type,
3446 build1 (ADDR_EXPR, pointer32_type, tem),
3447 field_list);
a1ab4c31
AC
3448
3449 /* Next come the addressing coefficients. */
3450 tem = size_one_node;
3451 for (i = 0; i < ndim; i++)
3452 {
3453 char fname[3];
3454 tree idx_length
3455 = size_binop (MULT_EXPR, tem,
3456 size_binop (PLUS_EXPR,
3457 size_binop (MINUS_EXPR,
3458 TYPE_MAX_VALUE (idx_arr[i]),
3459 TYPE_MIN_VALUE (idx_arr[i])),
3460 size_int (1)));
3461
d628c015
DR
3462 fname[0] = ((mech == By_Descriptor_NCA ||
3463 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31 3464 fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3465 field_list
3466 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3467 record_type, idx_length, field_list);
a1ab4c31 3468
d628c015 3469 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
3470 tem = idx_length;
3471 }
3472
3473 /* Finally here are the bounds. */
3474 for (i = 0; i < ndim; i++)
3475 {
3476 char fname[3];
3477
3478 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3479 field_list
3480 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3481 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3482 field_list);
a1ab4c31
AC
3483
3484 fname[0] = 'U';
82b481ed
EB
3485 field_list
3486 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3487 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3488 field_list);
a1ab4c31
AC
3489 }
3490 break;
3491
3492 default:
3493 post_error ("unsupported descriptor type for &", gnat_entity);
3494 }
3495
10069d53 3496 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
788e5046 3497 finish_record_type (record_type, nreverse (field_list), 0, false);
a1ab4c31
AC
3498 return record_type;
3499}
3500
58c8f770
EB
3501/* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3502 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3503 type contains in its DECL_INITIAL the expression to use when a constructor
3504 is made for the type. GNAT_ENTITY is an entity used to print out an error
3505 message if the mechanism cannot be applied to an object of that type and
3506 also for the name. */
6ca2b0a0
DR
3507
3508tree
d628c015 3509build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0 3510{
31a5a547 3511 tree record_type = make_node (RECORD_TYPE);
6ca2b0a0 3512 tree pointer64_type;
31a5a547
EB
3513 tree field_list = NULL_TREE;
3514 int klass, ndim, i, dtype = 0;
3515 tree inner_type, tem;
6ca2b0a0 3516 tree *idx_arr;
6ca2b0a0
DR
3517
3518 /* If TYPE is an unconstrained array, use the underlying array type. */
3519 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3520 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3521
3522 /* If this is an array, compute the number of dimensions in the array,
3523 get the index types, and point to the inner type. */
3524 if (TREE_CODE (type) != ARRAY_TYPE)
3525 ndim = 0;
3526 else
3527 for (ndim = 1, inner_type = type;
3528 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3529 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3530 ndim++, inner_type = TREE_TYPE (inner_type))
3531 ;
3532
2bb1fc26 3533 idx_arr = XALLOCAVEC (tree, ndim);
6ca2b0a0
DR
3534
3535 if (mech != By_Descriptor_NCA
3536 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3537 for (i = ndim - 1, inner_type = type;
3538 i >= 0;
3539 i--, inner_type = TREE_TYPE (inner_type))
3540 idx_arr[i] = TYPE_DOMAIN (inner_type);
3541 else
3542 for (i = 0, inner_type = type;
3543 i < ndim;
3544 i++, inner_type = TREE_TYPE (inner_type))
3545 idx_arr[i] = TYPE_DOMAIN (inner_type);
3546
3547 /* Now get the DTYPE value. */
3548 switch (TREE_CODE (type))
3549 {
3550 case INTEGER_TYPE:
3551 case ENUMERAL_TYPE:
01ddebf2 3552 case BOOLEAN_TYPE:
6ca2b0a0
DR
3553 if (TYPE_VAX_FLOATING_POINT_P (type))
3554 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3555 {
3556 case 6:
3557 dtype = 10;
3558 break;
3559 case 9:
3560 dtype = 11;
3561 break;
3562 case 15:
3563 dtype = 27;
3564 break;
3565 }
3566 else
3567 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3568 {
3569 case 8:
3570 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3571 break;
3572 case 16:
3573 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3574 break;
3575 case 32:
3576 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3577 break;
3578 case 64:
3579 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3580 break;
3581 case 128:
3582 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3583 break;
3584 }
3585 break;
3586
3587 case REAL_TYPE:
3588 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3589 break;
3590
3591 case COMPLEX_TYPE:
3592 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3593 && TYPE_VAX_FLOATING_POINT_P (type))
3594 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3595 {
3596 case 6:
3597 dtype = 12;
3598 break;
3599 case 9:
3600 dtype = 13;
3601 break;
3602 case 15:
3603 dtype = 29;
3604 }
3605 else
3606 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3607 break;
3608
3609 case ARRAY_TYPE:
3610 dtype = 14;
3611 break;
3612
3613 default:
3614 break;
3615 }
3616
3617 /* Get the CLASS value. */
3618 switch (mech)
3619 {
3620 case By_Descriptor_A:
c6bd4220 3621 klass = 4;
6ca2b0a0
DR
3622 break;
3623 case By_Descriptor_NCA:
c6bd4220 3624 klass = 10;
6ca2b0a0
DR
3625 break;
3626 case By_Descriptor_SB:
c6bd4220 3627 klass = 15;
6ca2b0a0
DR
3628 break;
3629 case By_Descriptor:
3630 case By_Descriptor_S:
3631 default:
c6bd4220 3632 klass = 1;
6ca2b0a0
DR
3633 break;
3634 }
3635
58c8f770 3636 /* Make the type for a 64-bit descriptor for VMS. The first six fields
6ca2b0a0 3637 are the same for all types. */
31a5a547 3638 field_list
788e5046 3639 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
31a5a547
EB
3640 record_type, size_int (1), field_list);
3641 field_list
788e5046 3642 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
31a5a547
EB
3643 record_type, size_int (dtype), field_list);
3644 field_list
788e5046 3645 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
31a5a547
EB
3646 record_type, size_int (klass), field_list);
3647 field_list
788e5046 3648 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
f54ee980 3649 record_type, size_int (-1), field_list);
31a5a547 3650 field_list
788e5046 3651 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
31a5a547 3652 record_type,
788e5046 3653 size_in_bytes (mech == By_Descriptor_A
82b481ed 3654 ? inner_type : type),
31a5a547 3655 field_list);
6ca2b0a0
DR
3656
3657 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3658
31a5a547
EB
3659 field_list
3660 = make_descriptor_field ("POINTER", pointer64_type, record_type,
788e5046 3661 build_unary_op (ADDR_EXPR, pointer64_type,
82b481ed 3662 build0 (PLACEHOLDER_EXPR, type)),
31a5a547 3663 field_list);
6ca2b0a0
DR
3664
3665 switch (mech)
3666 {
3667 case By_Descriptor:
3668 case By_Descriptor_S:
3669 break;
3670
3671 case By_Descriptor_SB:
31a5a547 3672 field_list
788e5046 3673 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
31a5a547 3674 record_type,
788e5046
NF
3675 (TREE_CODE (type) == ARRAY_TYPE
3676 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
82b481ed 3677 : size_zero_node),
31a5a547
EB
3678 field_list);
3679 field_list
788e5046 3680 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
31a5a547 3681 record_type,
788e5046
NF
3682 (TREE_CODE (type) == ARRAY_TYPE
3683 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
82b481ed 3684 : size_zero_node),
31a5a547 3685 field_list);
6ca2b0a0
DR
3686 break;
3687
3688 case By_Descriptor_A:
3689 case By_Descriptor_NCA:
31a5a547 3690 field_list
788e5046 3691 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
31a5a547 3692 record_type, size_zero_node, field_list);
788e5046 3693
31a5a547 3694 field_list
788e5046 3695 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
31a5a547 3696 record_type, size_zero_node, field_list);
788e5046
NF
3697
3698 dtype = (mech == By_Descriptor_NCA
3699 ? 0
3700 /* Set FL_COLUMN, FL_COEFF, and
3701 FL_BOUNDS. */
3702 : (TREE_CODE (type) == ARRAY_TYPE
3703 && TYPE_CONVENTION_FORTRAN_P (type)
3704 ? 224 : 192));
31a5a547 3705 field_list
788e5046 3706 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
31a5a547
EB
3707 record_type, size_int (dtype),
3708 field_list);
6ca2b0a0 3709
31a5a547 3710 field_list
788e5046 3711 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
31a5a547 3712 record_type, size_int (ndim), field_list);
788e5046 3713
31a5a547 3714 field_list
788e5046 3715 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
31a5a547
EB
3716 record_type, size_int (0), field_list);
3717 field_list
788e5046 3718 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
31a5a547
EB
3719 record_type, size_in_bytes (type),
3720 field_list);
6ca2b0a0
DR
3721
3722 /* Now build a pointer to the 0,0,0... element. */
3723 tem = build0 (PLACEHOLDER_EXPR, type);
3724 for (i = 0, inner_type = type; i < ndim;
3725 i++, inner_type = TREE_TYPE (inner_type))
3726 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3727 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3728 NULL_TREE, NULL_TREE);
3729
31a5a547
EB
3730 field_list
3731 = make_descriptor_field ("A0", pointer64_type, record_type,
788e5046 3732 build1 (ADDR_EXPR, pointer64_type, tem),
31a5a547 3733 field_list);
6ca2b0a0
DR
3734
3735 /* Next come the addressing coefficients. */
3736 tem = size_one_node;
3737 for (i = 0; i < ndim; i++)
3738 {
3739 char fname[3];
3740 tree idx_length
3741 = size_binop (MULT_EXPR, tem,
3742 size_binop (PLUS_EXPR,
3743 size_binop (MINUS_EXPR,
3744 TYPE_MAX_VALUE (idx_arr[i]),
3745 TYPE_MIN_VALUE (idx_arr[i])),
3746 size_int (1)));
3747
3748 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3749 fname[1] = '0' + i, fname[2] = 0;
31a5a547 3750 field_list
788e5046 3751 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547 3752 record_type, idx_length, field_list);
6ca2b0a0
DR
3753
3754 if (mech == By_Descriptor_NCA)
3755 tem = idx_length;
3756 }
3757
3758 /* Finally here are the bounds. */
3759 for (i = 0; i < ndim; i++)
3760 {
3761 char fname[3];
3762
3763 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
31a5a547 3764 field_list
788e5046 3765 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3766 record_type,
3767 TYPE_MIN_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3768
3769 fname[0] = 'U';
31a5a547 3770 field_list
788e5046 3771 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3772 record_type,
3773 TYPE_MAX_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3774 }
3775 break;
3776
3777 default:
3778 post_error ("unsupported descriptor type for &", gnat_entity);
3779 }
3780
31a5a547
EB
3781 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3782 finish_record_type (record_type, nreverse (field_list), 0, false);
3783 return record_type;
6ca2b0a0
DR
3784}
3785
31a5a547
EB
3786/* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3787 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
a1ab4c31 3788
31a5a547
EB
3789tree
3790fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
a1ab4c31 3791{
9771b263 3792 vec<constructor_elt, va_gc> *v = NULL;
31a5a547 3793 tree field;
a1ab4c31 3794
31a5a547
EB
3795 gnu_expr = maybe_unconstrained_array (gnu_expr);
3796 gnu_expr = gnat_protect_expr (gnu_expr);
3797 gnat_mark_addressable (gnu_expr);
3798
3799 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3800 routine in case we have a 32-bit descriptor. */
3801 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3802 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3803 N_Raise_Constraint_Error),
3804 gnu_expr);
3805
3806 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3807 {
3808 tree value
3809 = convert (TREE_TYPE (field),
3810 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3811 gnu_expr));
3812 CONSTRUCTOR_APPEND_ELT (v, field, value);
3813 }
3814
3815 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
3816}
3817
d628c015
DR
3818/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3819 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3820 which the VMS descriptor is passed. */
a1ab4c31
AC
3821
3822static tree
d628c015
DR
3823convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3824{
3825 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3826 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3827 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3828 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
d628c015 3829 /* The POINTER field is the 6th field in the descriptor. */
910ad8de 3830 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
d628c015
DR
3831
3832 /* Retrieve the value of the POINTER field. */
3833 tree gnu_expr64
58c8f770 3834 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
d628c015
DR
3835
3836 if (POINTER_TYPE_P (gnu_type))
3837 return convert (gnu_type, gnu_expr64);
3838
315cff15 3839 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
d628c015
DR
3840 {
3841 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 3842 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
d628c015
DR
3843 tree template_type = TREE_TYPE (p_bounds_type);
3844 tree min_field = TYPE_FIELDS (template_type);
7d76717d 3845 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3846 tree template_tree, template_addr, aflags, dimct, t, u;
d628c015 3847 /* See the head comment of build_vms_descriptor. */
c6bd4220 3848 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
d628c015 3849 tree lfield, ufield;
9771b263 3850 vec<constructor_elt, va_gc> *v;
d628c015 3851
86060344 3852 /* Convert POINTER to the pointer-to-array type. */
d628c015
DR
3853 gnu_expr64 = convert (p_array_type, gnu_expr64);
3854
c6bd4220 3855 switch (iklass)
d628c015
DR
3856 {
3857 case 1: /* Class S */
3858 case 15: /* Class SB */
3859 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
9771b263 3860 vec_alloc (v, 2);
910ad8de 3861 t = DECL_CHAIN (DECL_CHAIN (klass));
d628c015 3862 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
3863 CONSTRUCTOR_APPEND_ELT (v, min_field,
3864 convert (TREE_TYPE (min_field),
3865 integer_one_node));
3866 CONSTRUCTOR_APPEND_ELT (v, max_field,
3867 convert (TREE_TYPE (max_field), t));
3868 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 3869 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
d628c015
DR
3870
3871 /* For class S, we are done. */
c6bd4220 3872 if (iklass == 1)
d628c015
DR
3873 break;
3874
3875 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
3876 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3877 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 3878 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
d628c015
DR
3879 /* If so, there is already a template in the descriptor and
3880 it is located right after the POINTER field. The fields are
3881 64bits so they must be repacked. */
7d76717d 3882 t = DECL_CHAIN (pointer);
d628c015
DR
3883 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3884 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3885
7d76717d 3886 t = DECL_CHAIN (t);
d628c015
DR
3887 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3888 ufield = convert
910ad8de 3889 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3890
3891 /* Build the template in the form of a constructor. */
9771b263 3892 vec_alloc (v, 2);
0e228dd9 3893 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
7d76717d 3894 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3895 ufield);
3896 template_tree = gnat_build_constructor (template_type, v);
d628c015
DR
3897
3898 /* Otherwise use the {1, LENGTH} template we build above. */
3899 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3900 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 3901 template_tree),
d628c015
DR
3902 template_addr);
3903 break;
3904
3905 case 4: /* Class A */
3906 /* The AFLAGS field is the 3rd field after the pointer in the
3907 descriptor. */
910ad8de 3908 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
d628c015
DR
3909 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3910 /* The DIMCT field is the next field in the descriptor after
3911 aflags. */
7d76717d 3912 t = DECL_CHAIN (t);
d628c015
DR
3913 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3914 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3915 or FL_COEFF or FL_BOUNDS not set. */
3916 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
3917 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3918 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3919 dimct,
3920 convert (TREE_TYPE (dimct),
3921 size_one_node)),
1139f2e8 3922 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3923 build2 (BIT_AND_EXPR,
3924 TREE_TYPE (aflags),
3925 aflags, u),
3926 u));
3927 /* There is already a template in the descriptor and it is located
3928 in block 3. The fields are 64bits so they must be repacked. */
910ad8de 3929 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
d628c015
DR
3930 (t)))));
3931 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3932 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3933
7d76717d 3934 t = DECL_CHAIN (t);
d628c015
DR
3935 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3936 ufield = convert
910ad8de 3937 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3938
3939 /* Build the template in the form of a constructor. */
9771b263 3940 vec_alloc (v, 2);
0e228dd9 3941 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
910ad8de 3942 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3943 ufield);
3944 template_tree = gnat_build_constructor (template_type, v);
f76d6e6f 3945 template_tree = build3 (COND_EXPR, template_type, u,
d628c015
DR
3946 build_call_raise (CE_Length_Check_Failed, Empty,
3947 N_Raise_Constraint_Error),
6bf68a93 3948 template_tree);
c6bd4220
EB
3949 template_addr
3950 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
d628c015
DR
3951 break;
3952
3953 case 10: /* Class NCA */
3954 default:
3955 post_error ("unsupported descriptor type for &", gnat_subprog);
3956 template_addr = integer_zero_node;
3957 break;
3958 }
3959
3960 /* Build the fat pointer in the form of a constructor. */
9771b263 3961 vec_alloc (v, 2);
0e228dd9 3962 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
910ad8de 3963 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9
NF
3964 template_addr);
3965 return gnat_build_constructor (gnu_type, v);
d628c015
DR
3966 }
3967
3968 else
3969 gcc_unreachable ();
3970}
3971
3972/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3973 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3974 which the VMS descriptor is passed. */
3975
3976static tree
3977convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3978{
3979 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3980 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3981 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3982 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
a1ab4c31 3983 /* The POINTER field is the 4th field in the descriptor. */
910ad8de 3984 tree pointer = DECL_CHAIN (klass);
a1ab4c31
AC
3985
3986 /* Retrieve the value of the POINTER field. */
d628c015 3987 tree gnu_expr32
a1ab4c31
AC
3988 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3989
3990 if (POINTER_TYPE_P (gnu_type))
d628c015 3991 return convert (gnu_type, gnu_expr32);
a1ab4c31 3992
315cff15 3993 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
a1ab4c31
AC
3994 {
3995 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 3996 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
a1ab4c31
AC
3997 tree template_type = TREE_TYPE (p_bounds_type);
3998 tree min_field = TYPE_FIELDS (template_type);
7d76717d 3999 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 4000 tree template_tree, template_addr, aflags, dimct, t, u;
a1ab4c31 4001 /* See the head comment of build_vms_descriptor. */
c6bd4220 4002 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
9771b263 4003 vec<constructor_elt, va_gc> *v;
a1ab4c31 4004
86060344 4005 /* Convert POINTER to the pointer-to-array type. */
d628c015 4006 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31 4007
c6bd4220 4008 switch (iklass)
a1ab4c31
AC
4009 {
4010 case 1: /* Class S */
4011 case 15: /* Class SB */
4012 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
9771b263 4013 vec_alloc (v, 2);
a1ab4c31
AC
4014 t = TYPE_FIELDS (desc_type);
4015 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
4016 CONSTRUCTOR_APPEND_ELT (v, min_field,
4017 convert (TREE_TYPE (min_field),
4018 integer_one_node));
4019 CONSTRUCTOR_APPEND_ELT (v, max_field,
4020 convert (TREE_TYPE (max_field), t));
4021 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 4022 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
a1ab4c31
AC
4023
4024 /* For class S, we are done. */
c6bd4220 4025 if (iklass == 1)
a1ab4c31
AC
4026 break;
4027
4028 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
4029 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4030 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 4031 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
a1ab4c31
AC
4032 /* If so, there is already a template in the descriptor and
4033 it is located right after the POINTER field. */
7d76717d 4034 t = DECL_CHAIN (pointer);
c6bd4220
EB
4035 template_tree
4036 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
a1ab4c31
AC
4037 /* Otherwise use the {1, LENGTH} template we build above. */
4038 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4039 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 4040 template_tree),
a1ab4c31
AC
4041 template_addr);
4042 break;
4043
4044 case 4: /* Class A */
4045 /* The AFLAGS field is the 7th field in the descriptor. */
910ad8de 4046 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
a1ab4c31
AC
4047 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4048 /* The DIMCT field is the 8th field in the descriptor. */
7d76717d 4049 t = DECL_CHAIN (t);
a1ab4c31
AC
4050 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4051 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4052 or FL_COEFF or FL_BOUNDS not set. */
4053 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
4054 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4055 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4056 dimct,
4057 convert (TREE_TYPE (dimct),
4058 size_one_node)),
1139f2e8 4059 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4060 build2 (BIT_AND_EXPR,
4061 TREE_TYPE (aflags),
4062 aflags, u),
4063 u));
a1ab4c31
AC
4064 /* There is already a template in the descriptor and it is
4065 located at the start of block 3 (12th field). */
910ad8de 4066 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
c6bd4220
EB
4067 template_tree
4068 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
f76d6e6f 4069 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
d628c015
DR
4070 build_call_raise (CE_Length_Check_Failed, Empty,
4071 N_Raise_Constraint_Error),
6bf68a93 4072 template_tree);
c6bd4220
EB
4073 template_addr
4074 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
a1ab4c31
AC
4075 break;
4076
4077 case 10: /* Class NCA */
4078 default:
4079 post_error ("unsupported descriptor type for &", gnat_subprog);
4080 template_addr = integer_zero_node;
4081 break;
4082 }
4083
4084 /* Build the fat pointer in the form of a constructor. */
9771b263 4085 vec_alloc (v, 2);
0e228dd9 4086 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
910ad8de 4087 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9 4088 template_addr);
d628c015 4089
0e228dd9 4090 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
4091 }
4092
4093 else
4094 gcc_unreachable ();
4095}
4096
a981c964
EB
4097/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4098 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
491f54a7
EB
4099 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4100 descriptor is passed. */
d628c015 4101
f3d34576 4102tree
a981c964 4103convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
491f54a7 4104 Entity_Id gnat_subprog)
d628c015
DR
4105{
4106 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4107 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4108 tree mbo = TYPE_FIELDS (desc_type);
4109 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
910ad8de 4110 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
491f54a7 4111 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 4112
a981c964
EB
4113 /* If the field name is not MBO, it must be 32-bit and no alternate.
4114 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 4115 if (strcmp (mbostr, "MBO") != 0)
0c700259 4116 {
491f54a7 4117 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
0c700259
EB
4118 return ret;
4119 }
d628c015 4120
a981c964 4121 /* Build the test for 64-bit descriptor. */
d628c015
DR
4122 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4123 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964 4124 is64bit
1139f2e8
EB
4125 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4126 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4127 convert (integer_type_node, mbo),
4128 integer_one_node),
1139f2e8 4129 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4130 convert (integer_type_node, mbmo),
4131 integer_minus_one_node));
4132
4133 /* Build the 2 possible end results. */
491f54a7 4134 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4135 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
491f54a7 4136 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4137 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015 4138}
a1ab4c31 4139\f
928dfa4b
EB
4140/* Build a type to be used to represent an aliased object whose nominal type
4141 is an unconstrained array. This consists of a RECORD_TYPE containing a
4142 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4143 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4144 an arbitrary unconstrained object. Use NAME as the name of the record.
4145 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
4146
4147tree
928dfa4b
EB
4148build_unc_object_type (tree template_type, tree object_type, tree name,
4149 bool debug_info_p)
a1ab4c31
AC
4150{
4151 tree type = make_node (RECORD_TYPE);
da01bfee
EB
4152 tree template_field
4153 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4154 NULL_TREE, NULL_TREE, 0, 1);
4155 tree array_field
4156 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4157 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
4158
4159 TYPE_NAME (type) = name;
4160 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 4161 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
4162 finish_record_type (type, template_field, 0, true);
4163
4164 /* Declare it now since it will never be declared otherwise. This is
4165 necessary to ensure that its subtrees are properly marked. */
74746d49 4166 create_type_decl (name, type, true, debug_info_p, Empty);
a1ab4c31
AC
4167
4168 return type;
4169}
4170
4171/* Same, taking a thin or fat pointer type instead of a template type. */
4172
4173tree
4174build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 4175 tree name, bool debug_info_p)
a1ab4c31
AC
4176{
4177 tree template_type;
4178
315cff15 4179 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
4180
4181 template_type
315cff15 4182 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 4183 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 4184 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
4185
4186 return
4187 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 4188}
a1ab4c31 4189\f
229077b0
EB
4190/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4191 In the normal case this is just two adjustments, but we have more to
4192 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
4193
4194void
4195update_pointer_to (tree old_type, tree new_type)
4196{
4197 tree ptr = TYPE_POINTER_TO (old_type);
4198 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 4199 tree t;
a1ab4c31
AC
4200
4201 /* If this is the main variant, process all the other variants first. */
4202 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
4203 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4204 update_pointer_to (t, new_type);
a1ab4c31 4205
229077b0 4206 /* If no pointers and no references, we are done. */
a1ab4c31
AC
4207 if (!ptr && !ref)
4208 return;
4209
4210 /* Merge the old type qualifiers in the new type.
4211
4212 Each old variant has qualifiers for specific reasons, and the new
229077b0 4213 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
4214 information grabbed at some point, and merging the two simply unifies
4215 these inputs into the final type description.
4216
4217 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
4218 type designating it; after the designated type's freeze, we get here with
4219 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4220 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
4221 designated type, because that's what it really is.
4222
229077b0
EB
4223 We might also get here for a non-dummy OLD_TYPE variant with different
4224 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 4225 to private record type elaboration (see the comments around the call to
229077b0
EB
4226 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4227 the qualifiers in those cases too, to avoid accidentally discarding the
4228 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4229 new_type
4230 = build_qualified_type (new_type,
4231 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4232
4233 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
4234 if (old_type == new_type)
4235 return;
4236
4237 /* Otherwise, first handle the simple case. */
4238 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4239 {
aeecf17c
EB
4240 tree new_ptr, new_ref;
4241
4242 /* If pointer or reference already points to new type, nothing to do.
4243 This can happen as update_pointer_to can be invoked multiple times
4244 on the same couple of types because of the type variants. */
4245 if ((ptr && TREE_TYPE (ptr) == new_type)
4246 || (ref && TREE_TYPE (ref) == new_type))
4247 return;
4248
4249 /* Chain PTR and its variants at the end. */
4250 new_ptr = TYPE_POINTER_TO (new_type);
4251 if (new_ptr)
4252 {
4253 while (TYPE_NEXT_PTR_TO (new_ptr))
4254 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4255 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4256 }
4257 else
4258 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 4259
aeecf17c 4260 /* Now adjust them. */
a1ab4c31 4261 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 4262 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
4263 {
4264 TREE_TYPE (t) = new_type;
4265 if (TYPE_NULL_BOUNDS (t))
4266 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4267 }
de9528f0 4268
aeecf17c
EB
4269 /* Chain REF and its variants at the end. */
4270 new_ref = TYPE_REFERENCE_TO (new_type);
4271 if (new_ref)
4272 {
4273 while (TYPE_NEXT_REF_TO (new_ref))
4274 new_ref = TYPE_NEXT_REF_TO (new_ref);
4275 TYPE_NEXT_REF_TO (new_ref) = ref;
4276 }
4277 else
4278 TYPE_REFERENCE_TO (new_type) = ref;
4279
4280 /* Now adjust them. */
a1ab4c31 4281 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
4282 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4283 TREE_TYPE (t) = new_type;
de9528f0
EB
4284
4285 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 4286 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4287 }
4288
aeecf17c
EB
4289 /* Now deal with the unconstrained array case. In this case the pointer
4290 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
4291 Turn them into pointers to the correct types using update_pointer_to.
4292 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
4293 else
4294 {
e3edbd56 4295 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
4296
4297 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4298
e3edbd56 4299 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
4300 since update_pointer_to can be invoked multiple times on the same
4301 couple of types because of the type variants. */
4302 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4303 return;
4304
a1ab4c31 4305 update_pointer_to
e3edbd56
EB
4306 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4307 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 4308
a1ab4c31 4309 update_pointer_to
e3edbd56
EB
4310 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4311 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 4312
e3edbd56
EB
4313 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4314 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 4315
e3edbd56 4316 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4317 }
4318}
4319\f
8df2e902
EB
4320/* Convert EXPR, a pointer to a constrained array, into a pointer to an
4321 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
4322
4323static tree
4324convert_to_fat_pointer (tree type, tree expr)
4325{
910ad8de 4326 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 4327 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 4328 tree etype = TREE_TYPE (expr);
6bf68a93 4329 tree template_tree;
9771b263
DN
4330 vec<constructor_elt, va_gc> *v;
4331 vec_alloc (v, 2);
a1ab4c31 4332
50179d58
EB
4333 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4334 array (compare_fat_pointers ensures that this is the full discriminant)
4335 and a valid pointer to the bounds. This latter property is necessary
4336 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 4337 if (integer_zerop (expr))
0e228dd9 4338 {
50179d58
EB
4339 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4340 tree null_bounds, t;
4341
4342 if (TYPE_NULL_BOUNDS (ptr_template_type))
4343 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4344 else
4345 {
4346 /* The template type can still be dummy at this point so we build an
4347 empty constructor. The middle-end will fill it in with zeros. */
9771b263
DN
4348 t = build_constructor (template_type,
4349 NULL);
50179d58
EB
4350 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4351 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4352 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4353 }
4354
0e228dd9 4355 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
4356 fold_convert (p_array_type, null_pointer_node));
4357 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4358 t = build_constructor (type, v);
4359 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4360 TREE_CONSTANT (t) = 0;
4361 TREE_STATIC (t) = 1;
4362
4363 return t;
0e228dd9 4364 }
a1ab4c31 4365
0d7de0e1
EB
4366 /* If EXPR is a thin pointer, make template and data from the record. */
4367 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 4368 {
0d7de0e1 4369 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 4370
7d7a1fe8 4371 expr = gnat_protect_expr (expr);
a1ab4c31
AC
4372 if (TREE_CODE (expr) == ADDR_EXPR)
4373 expr = TREE_OPERAND (expr, 0);
4374 else
2b45154d
EB
4375 {
4376 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4377 the thin pointer value has been shifted so we first need to shift
4378 it back to get the template address. */
4379 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4380 expr
4381 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4382 fold_build1 (NEGATE_EXPR, sizetype,
4383 byte_position
4384 (DECL_CHAIN (field))));
4385 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
4386 }
a1ab4c31 4387
0d7de0e1 4388 template_tree = build_component_ref (expr, NULL_TREE, field, false);
a1ab4c31
AC
4389 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4390 build_component_ref (expr, NULL_TREE,
0d7de0e1 4391 DECL_CHAIN (field), false));
a1ab4c31 4392 }
8df2e902
EB
4393
4394 /* Otherwise, build the constructor for the template. */
a1ab4c31 4395 else
6bf68a93 4396 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
a1ab4c31 4397
8df2e902 4398 /* The final result is a constructor for the fat pointer.
a1ab4c31 4399
8df2e902
EB
4400 If EXPR is an argument of a foreign convention subprogram, the type it
4401 points to is directly the component type. In this case, the expression
a1ab4c31 4402 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4403 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4404 required, for instance because it ensures that possible later folding of
8df2e902 4405 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4406 same type as the initial reference.
4407
8df2e902
EB
4408 Note that the call to "build_template" above is still fine because it
4409 will only refer to the provided TEMPLATE_TYPE in this case. */
0e228dd9
NF
4410 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4411 convert (p_array_type, expr));
910ad8de 4412 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4413 build_unary_op (ADDR_EXPR, NULL_TREE,
4414 template_tree));
4415 return gnat_build_constructor (type, v);
a1ab4c31
AC
4416}
4417\f
a1ab4c31
AC
4418/* Create an expression whose value is that of EXPR,
4419 converted to type TYPE. The TREE_TYPE of the value
4420 is always TYPE. This function implements all reasonable
4421 conversions; callers should filter out those that are
4422 not permitted by the language being compiled. */
4423
4424tree
4425convert (tree type, tree expr)
4426{
a1ab4c31
AC
4427 tree etype = TREE_TYPE (expr);
4428 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4429 enum tree_code code = TREE_CODE (type);
a1ab4c31 4430
c34f3839
EB
4431 /* If the expression is already of the right type, we are done. */
4432 if (etype == type)
a1ab4c31
AC
4433 return expr;
4434
4435 /* If both input and output have padding and are of variable size, do this
4436 as an unchecked conversion. Likewise if one is a mere variant of the
4437 other, so we avoid a pointless unpad/repad sequence. */
4438 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4439 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4440 && (!TREE_CONSTANT (TYPE_SIZE (type))
4441 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4442 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4443 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4444 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4445 ;
4446
f88facfe
EB
4447 /* If the output type has padding, convert to the inner type and make a
4448 constructor to build the record, unless a variable size is involved. */
315cff15 4449 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4450 {
9771b263 4451 vec<constructor_elt, va_gc> *v;
0e228dd9 4452
a1ab4c31
AC
4453 /* If we previously converted from another type and our type is
4454 of variable size, remove the conversion to avoid the need for
f88facfe 4455 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4456 original and packable version. */
4457 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4458 && (!TREE_CONSTANT (TYPE_SIZE (type))
4459 || (ecode == RECORD_TYPE
4460 && TYPE_NAME (etype)
4461 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4462 expr = TREE_OPERAND (expr, 0);
4463
4464 /* If we are just removing the padding from expr, convert the original
4465 object if we have variable size in order to avoid the need for some
f88facfe 4466 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4467 of the other, so we avoid a pointless unpad/repad sequence. */
4468 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4469 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4470 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4471 || TYPE_MAIN_VARIANT (type)
4472 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4473 || (ecode == RECORD_TYPE
4474 && TYPE_NAME (etype)
4475 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4476 return convert (type, TREE_OPERAND (expr, 0));
4477
431cfac1
EB
4478 /* If the inner type is of self-referential size and the expression type
4479 is a record, do this as an unchecked conversion. But first pad the
4480 expression if possible to have the same size on both sides. */
c34f3839 4481 if (ecode == RECORD_TYPE
f88facfe 4482 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4483 {
980a0501 4484 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4485 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4486 false, false, false, true),
4487 expr);
431cfac1
EB
4488 return unchecked_convert (type, expr, false);
4489 }
a1ab4c31 4490
f88facfe
EB
4491 /* If we are converting between array types with variable size, do the
4492 final conversion as an unchecked conversion, again to avoid the need
4493 for some variable-sized temporaries. If valid, this conversion is
4494 very likely purely technical and without real effects. */
c34f3839 4495 if (ecode == ARRAY_TYPE
f88facfe
EB
4496 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4497 && !TREE_CONSTANT (TYPE_SIZE (etype))
4498 && !TREE_CONSTANT (TYPE_SIZE (type)))
4499 return unchecked_convert (type,
4500 convert (TREE_TYPE (TYPE_FIELDS (type)),
4501 expr),
4502 false);
4503
9771b263 4504 vec_alloc (v, 1);
0e228dd9
NF
4505 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4506 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4507 return gnat_build_constructor (type, v);
a1ab4c31
AC
4508 }
4509
4510 /* If the input type has padding, remove it and convert to the output type.
4511 The conditions ordering is arranged to ensure that the output type is not
4512 a padding type here, as it is not clear whether the conversion would
4513 always be correct if this was to happen. */
315cff15 4514 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4515 {
4516 tree unpadded;
4517
4518 /* If we have just converted to this padded type, just get the
4519 inner expression. */
4520 if (TREE_CODE (expr) == CONSTRUCTOR
9771b263
DN
4521 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4522 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4523 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
a1ab4c31
AC
4524
4525 /* Otherwise, build an explicit component reference. */
4526 else
4527 unpadded
4528 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4529
4530 return convert (type, unpadded);
4531 }
4532
4533 /* If the input is a biased type, adjust first. */
4534 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4535 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4536 fold_convert (TREE_TYPE (etype),
4537 expr),
4538 TYPE_MIN_VALUE (etype)));
4539
4540 /* If the input is a justified modular type, we need to extract the actual
4541 object before converting it to any other type with the exceptions of an
4542 unconstrained array or of a mere type variant. It is useful to avoid the
4543 extraction and conversion in the type variant case because it could end
4544 up replacing a VAR_DECL expr by a constructor and we might be about the
4545 take the address of the result. */
4546 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4547 && code != UNCONSTRAINED_ARRAY_TYPE
4548 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4549 return convert (type, build_component_ref (expr, NULL_TREE,
4550 TYPE_FIELDS (etype), false));
4551
4552 /* If converting to a type that contains a template, convert to the data
4553 type and then build the template. */
4554 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4555 {
910ad8de 4556 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4557 vec<constructor_elt, va_gc> *v;
4558 vec_alloc (v, 2);
a1ab4c31
AC
4559
4560 /* If the source already has a template, get a reference to the
4561 associated array only, as we are going to rebuild a template
4562 for the target type anyway. */
4563 expr = maybe_unconstrained_array (expr);
4564
0e228dd9
NF
4565 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4566 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4567 obj_type, NULL_TREE));
910ad8de 4568 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4569 convert (obj_type, expr));
4570 return gnat_build_constructor (type, v);
a1ab4c31
AC
4571 }
4572
a1c7d797 4573 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4574 switch (TREE_CODE (expr))
4575 {
4576 case ERROR_MARK:
4577 return expr;
4578
4579 case NULL_EXPR:
4580 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4581 conversion in gnat_expand_expr. NULL_EXPR does not represent
4582 and actual value, so no conversion is needed. */
4583 expr = copy_node (expr);
4584 TREE_TYPE (expr) = type;
4585 return expr;
4586
4587 case STRING_CST:
4588 /* If we are converting a STRING_CST to another constrained array type,
4589 just make a new one in the proper type. */
4590 if (code == ecode && AGGREGATE_TYPE_P (etype)
4591 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4592 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4593 {
4594 expr = copy_node (expr);
4595 TREE_TYPE (expr) = type;
4596 return expr;
4597 }
4598 break;
4599
7948ae37
OH
4600 case VECTOR_CST:
4601 /* If we are converting a VECTOR_CST to a mere variant type, just make
4602 a new one in the proper type. */
4603 if (code == ecode && gnat_types_compatible_p (type, etype))
4604 {
4605 expr = copy_node (expr);
4606 TREE_TYPE (expr) = type;
4607 return expr;
4608 }
4609
a1ab4c31
AC
4610 case CONSTRUCTOR:
4611 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4612 a new one in the proper type. */
4613 if (code == ecode && gnat_types_compatible_p (type, etype))
4614 {
4615 expr = copy_node (expr);
4616 TREE_TYPE (expr) = type;
9771b263 4617 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4618 return expr;
4619 }
4620
cb3d597d
EB
4621 /* Likewise for a conversion between original and packable version, or
4622 conversion between types of the same size and with the same list of
4623 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4624 if (code == ecode
4625 && code == RECORD_TYPE
cb3d597d
EB
4626 && (TYPE_NAME (type) == TYPE_NAME (etype)
4627 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4628
a1ab4c31 4629 {
9771b263
DN
4630 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4631 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4632 vec<constructor_elt, va_gc> *v;
4633 vec_alloc (v, len);
a1ab4c31
AC
4634 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4635 unsigned HOST_WIDE_INT idx;
4636 tree index, value;
4637
db868e1e
OH
4638 /* Whether we need to clear TREE_CONSTANT et al. on the output
4639 constructor when we convert in place. */
4640 bool clear_constant = false;
4641
a1ab4c31
AC
4642 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4643 {
cb3d597d
EB
4644 /* We expect only simple constructors. */
4645 if (!SAME_FIELD_P (index, efield))
4646 break;
4647 /* The field must be the same. */
4648 if (!SAME_FIELD_P (efield, field))
a1ab4c31 4649 break;
f32682ca 4650 constructor_elt elt = {field, convert (TREE_TYPE (field), value)};
9771b263 4651 v->quick_push (elt);
db868e1e
OH
4652
4653 /* If packing has made this field a bitfield and the input
4654 value couldn't be emitted statically any more, we need to
4655 clear TREE_CONSTANT on our output. */
ced57283
EB
4656 if (!clear_constant
4657 && TREE_CONSTANT (expr)
db868e1e
OH
4658 && !CONSTRUCTOR_BITFIELD_P (efield)
4659 && CONSTRUCTOR_BITFIELD_P (field)
4660 && !initializer_constant_valid_for_bitfield_p (value))
4661 clear_constant = true;
4662
910ad8de
NF
4663 efield = DECL_CHAIN (efield);
4664 field = DECL_CHAIN (field);
a1ab4c31
AC
4665 }
4666
db868e1e
OH
4667 /* If we have been able to match and convert all the input fields
4668 to their output type, convert in place now. We'll fallback to a
4669 view conversion downstream otherwise. */
a1ab4c31
AC
4670 if (idx == len)
4671 {
4672 expr = copy_node (expr);
4673 TREE_TYPE (expr) = type;
4674 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4675 if (clear_constant)
ced57283 4676 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4677 return expr;
4678 }
4679 }
7948ae37
OH
4680
4681 /* Likewise for a conversion between array type and vector type with a
4682 compatible representative array. */
4683 else if (code == VECTOR_TYPE
4684 && ecode == ARRAY_TYPE
4685 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4686 etype))
4687 {
9771b263
DN
4688 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4689 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4690 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4691 unsigned HOST_WIDE_INT ix;
4692 tree value;
4693
4694 /* Build a VECTOR_CST from a *constant* array constructor. */
4695 if (TREE_CONSTANT (expr))
4696 {
4697 bool constant_p = true;
4698
4699 /* Iterate through elements and check if all constructor
4700 elements are *_CSTs. */
4701 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4702 if (!CONSTANT_CLASS_P (value))
4703 {
4704 constant_p = false;
4705 break;
4706 }
4707
4708 if (constant_p)
4709 return build_vector_from_ctor (type,
4710 CONSTRUCTOR_ELTS (expr));
4711 }
4712
4713 /* Otherwise, build a regular vector constructor. */
9771b263 4714 vec_alloc (v, len);
7948ae37
OH
4715 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4716 {
f32682ca 4717 constructor_elt elt = {NULL_TREE, value};
9771b263 4718 v->quick_push (elt);
7948ae37
OH
4719 }
4720 expr = copy_node (expr);
4721 TREE_TYPE (expr) = type;
4722 CONSTRUCTOR_ELTS (expr) = v;
4723 return expr;
4724 }
a1ab4c31
AC
4725 break;
4726
4727 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4728 /* First retrieve the underlying array. */
4729 expr = maybe_unconstrained_array (expr);
4730 etype = TREE_TYPE (expr);
4731 ecode = TREE_CODE (etype);
4732 break;
a1ab4c31
AC
4733
4734 case VIEW_CONVERT_EXPR:
4735 {
4736 /* GCC 4.x is very sensitive to type consistency overall, and view
4737 conversions thus are very frequent. Even though just "convert"ing
4738 the inner operand to the output type is fine in most cases, it
4739 might expose unexpected input/output type mismatches in special
4740 circumstances so we avoid such recursive calls when we can. */
4741 tree op0 = TREE_OPERAND (expr, 0);
4742
4743 /* If we are converting back to the original type, we can just
4744 lift the input conversion. This is a common occurrence with
4745 switches back-and-forth amongst type variants. */
4746 if (type == TREE_TYPE (op0))
4747 return op0;
4748
7948ae37
OH
4749 /* Otherwise, if we're converting between two aggregate or vector
4750 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4751 target type in place or to just convert the inner expression. */
4752 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4753 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4754 {
4755 /* If we are converting between mere variants, we can just
4756 substitute the VIEW_CONVERT_EXPR in place. */
4757 if (gnat_types_compatible_p (type, etype))
4758 return build1 (VIEW_CONVERT_EXPR, type, op0);
4759
4760 /* Otherwise, we may just bypass the input view conversion unless
4761 one of the types is a fat pointer, which is handled by
4762 specialized code below which relies on exact type matching. */
315cff15
EB
4763 else if (!TYPE_IS_FAT_POINTER_P (type)
4764 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4765 return convert (type, op0);
4766 }
ad1d36ba
EB
4767
4768 break;
a1ab4c31 4769 }
a1ab4c31 4770
a1ab4c31
AC
4771 default:
4772 break;
4773 }
4774
4775 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4776 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4777 return convert_to_fat_pointer (type, expr);
4778
7948ae37
OH
4779 /* If we are converting between two aggregate or vector types that are mere
4780 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4781 to a vector type from its representative array type. */
4782 else if ((code == ecode
4783 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4784 && gnat_types_compatible_p (type, etype))
4785 || (code == VECTOR_TYPE
4786 && ecode == ARRAY_TYPE
4787 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4788 etype)))
a1ab4c31
AC
4789 return build1 (VIEW_CONVERT_EXPR, type, expr);
4790
76af763d
EB
4791 /* If we are converting between tagged types, try to upcast properly. */
4792 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4793 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4794 {
4795 tree child_etype = etype;
4796 do {
4797 tree field = TYPE_FIELDS (child_etype);
4798 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4799 return build_component_ref (expr, NULL_TREE, field, false);
4800 child_etype = TREE_TYPE (field);
4801 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4802 }
4803
bb1f7929
EB
4804 /* If we are converting from a smaller form of record type back to it, just
4805 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4806 size on both sides. */
4807 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4808 && smaller_form_type_p (etype, type))
4809 {
4810 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4811 false, false, false, true),
4812 expr);
4813 return build1 (VIEW_CONVERT_EXPR, type, expr);
4814 }
4815
a1ab4c31 4816 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4817 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4818 return fold_convert (type, expr);
4819
4820 switch (code)
4821 {
4822 case VOID_TYPE:
4823 return fold_build1 (CONVERT_EXPR, type, expr);
4824
a1ab4c31
AC
4825 case INTEGER_TYPE:
4826 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4827 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4828 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4829 return unchecked_convert (type, expr, false);
4830 else if (TYPE_BIASED_REPRESENTATION_P (type))
4831 return fold_convert (type,
4832 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4833 convert (TREE_TYPE (type), expr),
4834 TYPE_MIN_VALUE (type)));
4835
4836 /* ... fall through ... */
4837
4838 case ENUMERAL_TYPE:
01ddebf2 4839 case BOOLEAN_TYPE:
a1ab4c31
AC
4840 /* If we are converting an additive expression to an integer type
4841 with lower precision, be wary of the optimization that can be
4842 applied by convert_to_integer. There are 2 problematic cases:
4843 - if the first operand was originally of a biased type,
4844 because we could be recursively called to convert it
4845 to an intermediate type and thus rematerialize the
4846 additive operator endlessly,
4847 - if the expression contains a placeholder, because an
4848 intermediate conversion that changes the sign could
4849 be inserted and thus introduce an artificial overflow
4850 at compile time when the placeholder is substituted. */
4851 if (code == INTEGER_TYPE
4852 && ecode == INTEGER_TYPE
4853 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4854 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4855 {
4856 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4857
4858 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4859 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4860 || CONTAINS_PLACEHOLDER_P (expr))
4861 return build1 (NOP_EXPR, type, expr);
4862 }
4863
4864 return fold (convert_to_integer (type, expr));
4865
4866 case POINTER_TYPE:
4867 case REFERENCE_TYPE:
0d7de0e1 4868 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4869 for differing offsets from the base pointer, depending on whether
4870 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4871 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4872 {
2b45154d
EB
4873 tree etype_pos
4874 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4875 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4876 : size_zero_node;
4877 tree type_pos
4878 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4879 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4880 : size_zero_node;
4881 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4882
a1ab4c31 4883 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4884 if (integer_zerop (byte_diff))
4885 return expr;
4886
4887 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4888 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4889 }
4890
0d7de0e1
EB
4891 /* If converting fat pointer to normal or thin pointer, get the pointer
4892 to the array and then convert it. */
4893 if (TYPE_IS_FAT_POINTER_P (etype))
86060344
EB
4894 expr
4895 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4896
4897 return fold (convert_to_pointer (type, expr));
4898
4899 case REAL_TYPE:
4900 return fold (convert_to_real (type, expr));
4901
4902 case RECORD_TYPE:
4903 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4904 {
9771b263
DN
4905 vec<constructor_elt, va_gc> *v;
4906 vec_alloc (v, 1);
0e228dd9
NF
4907
4908 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4909 convert (TREE_TYPE (TYPE_FIELDS (type)),
4910 expr));
4911 return gnat_build_constructor (type, v);
4912 }
a1ab4c31
AC
4913
4914 /* ... fall through ... */
4915
4916 case ARRAY_TYPE:
4917 /* In these cases, assume the front-end has validated the conversion.
4918 If the conversion is valid, it will be a bit-wise conversion, so
4919 it can be viewed as an unchecked conversion. */
4920 return unchecked_convert (type, expr, false);
4921
4922 case UNION_TYPE:
4923 /* This is a either a conversion between a tagged type and some
4924 subtype, which we have to mark as a UNION_TYPE because of
4925 overlapping fields or a conversion of an Unchecked_Union. */
4926 return unchecked_convert (type, expr, false);
4927
4928 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4929 /* If the input is a VECTOR_TYPE, convert to the representative
4930 array type first. */
4931 if (ecode == VECTOR_TYPE)
4932 {
4933 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4934 etype = TREE_TYPE (expr);
4935 ecode = TREE_CODE (etype);
4936 }
4937
a1ab4c31
AC
4938 /* If EXPR is a constrained array, take its address, convert it to a
4939 fat pointer, and then dereference it. Likewise if EXPR is a
4940 record containing both a template and a constrained array.
4941 Note that a record representing a justified modular type
4942 always represents a packed constrained array. */
4943 if (ecode == ARRAY_TYPE
4944 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4945 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4946 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4947 return
4948 build_unary_op
4949 (INDIRECT_REF, NULL_TREE,
4950 convert_to_fat_pointer (TREE_TYPE (type),
4951 build_unary_op (ADDR_EXPR,
4952 NULL_TREE, expr)));
4953
4954 /* Do something very similar for converting one unconstrained
4955 array to another. */
4956 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4957 return
4958 build_unary_op (INDIRECT_REF, NULL_TREE,
4959 convert (TREE_TYPE (type),
4960 build_unary_op (ADDR_EXPR,
4961 NULL_TREE, expr)));
4962 else
4963 gcc_unreachable ();
4964
4965 case COMPLEX_TYPE:
4966 return fold (convert_to_complex (type, expr));
4967
4968 default:
4969 gcc_unreachable ();
4970 }
4971}
15bf7d19
EB
4972
4973/* Create an expression whose value is that of EXPR converted to the common
4974 index type, which is sizetype. EXPR is supposed to be in the base type
4975 of the GNAT index type. Calling it is equivalent to doing
4976
4977 convert (sizetype, expr)
4978
4979 but we try to distribute the type conversion with the knowledge that EXPR
4980 cannot overflow in its type. This is a best-effort approach and we fall
4981 back to the above expression as soon as difficulties are encountered.
4982
4983 This is necessary to overcome issues that arise when the GNAT base index
4984 type and the GCC common index type (sizetype) don't have the same size,
4985 which is quite frequent on 64-bit architectures. In this case, and if
4986 the GNAT base index type is signed but the iteration type of the loop has
4987 been forced to unsigned, the loop scalar evolution engine cannot compute
4988 a simple evolution for the general induction variables associated with the
4989 array indices, because it will preserve the wrap-around semantics in the
4990 unsigned type of their "inner" part. As a result, many loop optimizations
4991 are blocked.
4992
4993 The solution is to use a special (basic) induction variable that is at
4994 least as large as sizetype, and to express the aforementioned general
4995 induction variables in terms of this induction variable, eliminating
4996 the problematic intermediate truncation to the GNAT base index type.
4997 This is possible as long as the original expression doesn't overflow
4998 and if the middle-end hasn't introduced artificial overflows in the
4999 course of the various simplification it can make to the expression. */
5000
5001tree
5002convert_to_index_type (tree expr)
5003{
5004 enum tree_code code = TREE_CODE (expr);
5005 tree type = TREE_TYPE (expr);
5006
5007 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5008 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5009 if (TYPE_UNSIGNED (type) || !optimize)
5010 return convert (sizetype, expr);
5011
5012 switch (code)
5013 {
5014 case VAR_DECL:
5015 /* The main effect of the function: replace a loop parameter with its
5016 associated special induction variable. */
5017 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5018 expr = DECL_INDUCTION_VAR (expr);
5019 break;
5020
5021 CASE_CONVERT:
5022 {
5023 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5024 /* Bail out as soon as we suspect some sort of type frobbing. */
5025 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5026 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5027 break;
5028 }
5029
5030 /* ... fall through ... */
5031
5032 case NON_LVALUE_EXPR:
5033 return fold_build1 (code, sizetype,
5034 convert_to_index_type (TREE_OPERAND (expr, 0)));
5035
5036 case PLUS_EXPR:
5037 case MINUS_EXPR:
5038 case MULT_EXPR:
5039 return fold_build2 (code, sizetype,
5040 convert_to_index_type (TREE_OPERAND (expr, 0)),
5041 convert_to_index_type (TREE_OPERAND (expr, 1)));
5042
5043 case COMPOUND_EXPR:
5044 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5045 convert_to_index_type (TREE_OPERAND (expr, 1)));
5046
5047 case COND_EXPR:
5048 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5049 convert_to_index_type (TREE_OPERAND (expr, 1)),
5050 convert_to_index_type (TREE_OPERAND (expr, 2)));
5051
5052 default:
5053 break;
5054 }
5055
5056 return convert (sizetype, expr);
5057}
a1ab4c31
AC
5058\f
5059/* Remove all conversions that are done in EXP. This includes converting
5060 from a padded type or to a justified modular type. If TRUE_ADDRESS
5061 is true, always return the address of the containing object even if
5062 the address is not bit-aligned. */
5063
5064tree
5065remove_conversions (tree exp, bool true_address)
5066{
5067 switch (TREE_CODE (exp))
5068 {
5069 case CONSTRUCTOR:
5070 if (true_address
5071 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5072 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5073 return
9771b263 5074 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
a1ab4c31
AC
5075 break;
5076
5077 case COMPONENT_REF:
315cff15 5078 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
5079 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5080 break;
5081
a1ab4c31 5082 CASE_CONVERT:
722356ce
EB
5083 case VIEW_CONVERT_EXPR:
5084 case NON_LVALUE_EXPR:
a1ab4c31
AC
5085 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5086
5087 default:
5088 break;
5089 }
5090
5091 return exp;
5092}
5093\f
5094/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 5095 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
5096 likewise return an expression pointing to the underlying array. */
5097
5098tree
5099maybe_unconstrained_array (tree exp)
5100{
5101 enum tree_code code = TREE_CODE (exp);
1aa291f7 5102 tree type = TREE_TYPE (exp);
a1ab4c31 5103
1aa291f7 5104 switch (TREE_CODE (type))
a1ab4c31
AC
5105 {
5106 case UNCONSTRAINED_ARRAY_TYPE:
5107 if (code == UNCONSTRAINED_ARRAY_REF)
5108 {
7e169899 5109 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
5110 const bool no_trap = TREE_THIS_NOTRAP (exp);
5111
7e169899 5112 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
5113 type = TREE_TYPE (exp);
5114
7e169899
EB
5115 if (TREE_CODE (exp) == COND_EXPR)
5116 {
5117 tree op1
5118 = build_unary_op (INDIRECT_REF, NULL_TREE,
5119 build_component_ref (TREE_OPERAND (exp, 1),
5120 NULL_TREE,
1aa291f7 5121 TYPE_FIELDS (type),
7e169899
EB
5122 false));
5123 tree op2
5124 = build_unary_op (INDIRECT_REF, NULL_TREE,
5125 build_component_ref (TREE_OPERAND (exp, 2),
5126 NULL_TREE,
1aa291f7 5127 TYPE_FIELDS (type),
7e169899
EB
5128 false));
5129
5130 exp = build3 (COND_EXPR,
1aa291f7 5131 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
5132 TREE_OPERAND (exp, 0), op1, op2);
5133 }
5134 else
a1c7d797
EB
5135 {
5136 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5137 build_component_ref (exp, NULL_TREE,
1aa291f7 5138 TYPE_FIELDS (type),
a1c7d797
EB
5139 false));
5140 TREE_READONLY (exp) = read_only;
5141 TREE_THIS_NOTRAP (exp) = no_trap;
5142 }
a1ab4c31
AC
5143 }
5144
5145 else if (code == NULL_EXPR)
1aa291f7
EB
5146 exp = build1 (NULL_EXPR,
5147 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5148 TREE_OPERAND (exp, 0));
5149 break;
a1ab4c31
AC
5150
5151 case RECORD_TYPE:
1aa291f7
EB
5152 /* If this is a padded type and it contains a template, convert to the
5153 unpadded type first. */
5154 if (TYPE_PADDING_P (type)
5155 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5156 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 5157 {
1aa291f7
EB
5158 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5159 type = TREE_TYPE (exp);
5160 }
5161
5162 if (TYPE_CONTAINS_TEMPLATE_P (type))
5163 {
5164 exp = build_component_ref (exp, NULL_TREE,
5165 DECL_CHAIN (TYPE_FIELDS (type)),
5166 false);
5167 type = TREE_TYPE (exp);
5168
5169 /* If the array type is padded, convert to the unpadded type. */
5170 if (TYPE_IS_PADDING_P (type))
5171 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 5172 }
a1ab4c31
AC
5173 break;
5174
5175 default:
5176 break;
5177 }
5178
5179 return exp;
5180}
5181\f
afcea859 5182/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 5183 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
5184
5185static bool
5186can_fold_for_view_convert_p (tree expr)
5187{
5188 tree t1, t2;
5189
5190 /* The folder will fold NOP_EXPRs between integral types with the same
5191 precision (in the middle-end's sense). We cannot allow it if the
5192 types don't have the same precision in the Ada sense as well. */
5193 if (TREE_CODE (expr) != NOP_EXPR)
5194 return true;
5195
5196 t1 = TREE_TYPE (expr);
5197 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5198
5199 /* Defer to the folder for non-integral conversions. */
5200 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5201 return true;
5202
5203 /* Only fold conversions that preserve both precisions. */
5204 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5205 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5206 return true;
5207
5208 return false;
5209}
5210
a1ab4c31 5211/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
5212 If NOTRUNC_P is true, truncation operations should be suppressed.
5213
5214 Special care is required with (source or target) integral types whose
5215 precision is not equal to their size, to make sure we fetch or assign
5216 the value bits whose location might depend on the endianness, e.g.
5217
5218 Rmsize : constant := 8;
5219 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5220
5221 type Bit_Array is array (1 .. Rmsize) of Boolean;
5222 pragma Pack (Bit_Array);
5223
5224 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5225
5226 Value : Int := 2#1000_0001#;
5227 Vbits : Bit_Array := To_Bit_Array (Value);
5228
5229 we expect the 8 bits at Vbits'Address to always contain Value, while
5230 their original location depends on the endianness, at Value'Address
84fb43a1 5231 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
5232
5233tree
5234unchecked_convert (tree type, tree expr, bool notrunc_p)
5235{
5236 tree etype = TREE_TYPE (expr);
c34f3839
EB
5237 enum tree_code ecode = TREE_CODE (etype);
5238 enum tree_code code = TREE_CODE (type);
980a0501 5239 int c;
a1ab4c31 5240
c34f3839 5241 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
5242 if (etype == type)
5243 return expr;
5244
5245 /* If both types types are integral just do a normal conversion.
5246 Likewise for a conversion to an unconstrained array. */
5247 if ((((INTEGRAL_TYPE_P (type)
c34f3839 5248 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
0d7de0e1 5249 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 5250 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
a1ab4c31 5251 && ((INTEGRAL_TYPE_P (etype)
c34f3839 5252 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
315cff15 5253 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
5254 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5255 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 5256 {
c34f3839 5257 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
5258 {
5259 tree ntype = copy_type (etype);
a1ab4c31
AC
5260 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5261 TYPE_MAIN_VARIANT (ntype) = ntype;
5262 expr = build1 (NOP_EXPR, ntype, expr);
5263 }
5264
c34f3839 5265 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 5266 {
afcea859 5267 tree rtype = copy_type (type);
a1ab4c31
AC
5268 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5269 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
5270 expr = convert (rtype, expr);
5271 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 5272 }
afcea859
EB
5273 else
5274 expr = convert (type, expr);
a1ab4c31
AC
5275 }
5276
afcea859 5277 /* If we are converting to an integral type whose precision is not equal
416de7d5
EB
5278 to its size, first unchecked convert to a record type that contains an
5279 field of the given precision. Then extract the field. */
980a0501
EB
5280 else if (INTEGRAL_TYPE_P (type)
5281 && TYPE_RM_SIZE (type)
a1ab4c31
AC
5282 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5283 GET_MODE_BITSIZE (TYPE_MODE (type))))
5284 {
5285 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
5286 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5287 tree field_type, field;
5288
5289 if (TYPE_UNSIGNED (type))
5290 field_type = make_unsigned_type (prec);
5291 else
5292 field_type = make_signed_type (prec);
5293 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5294
5295 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5296 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5297
5298 TYPE_FIELDS (rec_type) = field;
5299 layout_type (rec_type);
5300
5301 expr = unchecked_convert (rec_type, expr, notrunc_p);
3cd64bab 5302 expr = build_component_ref (expr, NULL_TREE, field, false);
416de7d5 5303 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
5304 }
5305
416de7d5
EB
5306 /* Similarly if we are converting from an integral type whose precision is
5307 not equal to its size, first copy into a field of the given precision
5308 and unchecked convert the record type. */
980a0501
EB
5309 else if (INTEGRAL_TYPE_P (etype)
5310 && TYPE_RM_SIZE (etype)
5311 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5312 GET_MODE_BITSIZE (TYPE_MODE (etype))))
a1ab4c31
AC
5313 {
5314 tree rec_type = make_node (RECORD_TYPE);
416de7d5 5315 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
5316 vec<constructor_elt, va_gc> *v;
5317 vec_alloc (v, 1);
416de7d5
EB
5318 tree field_type, field;
5319
5320 if (TYPE_UNSIGNED (etype))
5321 field_type = make_unsigned_type (prec);
5322 else
5323 field_type = make_signed_type (prec);
5324 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5325
5326 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5327 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5328
5329 TYPE_FIELDS (rec_type) = field;
5330 layout_type (rec_type);
5331
416de7d5 5332 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
5333 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5334 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
5335 expr = unchecked_convert (type, expr, notrunc_p);
5336 }
5337
980a0501
EB
5338 /* If we are converting from a scalar type to a type with a different size,
5339 we need to pad to have the same size on both sides.
5340
5341 ??? We cannot do it unconditionally because unchecked conversions are
5342 used liberally by the front-end to implement polymorphism, e.g. in:
5343
5344 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5345 return p___size__4 (p__object!(S191s.all));
5346
5347 so we skip all expressions that are references. */
5348 else if (!REFERENCE_CLASS_P (expr)
5349 && !AGGREGATE_TYPE_P (etype)
5350 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5351 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5352 {
5353 if (c < 0)
5354 {
5355 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5356 false, false, false, true),
5357 expr);
5358 expr = unchecked_convert (type, expr, notrunc_p);
5359 }
5360 else
5361 {
5362 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5363 false, false, false, true);
5364 expr = unchecked_convert (rec_type, expr, notrunc_p);
5365 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5366 false);
5367 }
5368 }
5369
7948ae37
OH
5370 /* We have a special case when we are converting between two unconstrained
5371 array types. In that case, take the address, convert the fat pointer
5372 types, and dereference. */
c34f3839 5373 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5374 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5375 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5376 build_unary_op (ADDR_EXPR, NULL_TREE,
5377 expr)));
7948ae37
OH
5378
5379 /* Another special case is when we are converting to a vector type from its
5380 representative array type; this a regular conversion. */
c34f3839
EB
5381 else if (code == VECTOR_TYPE
5382 && ecode == ARRAY_TYPE
7948ae37
OH
5383 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5384 etype))
5385 expr = convert (type, expr);
5386
a1ab4c31
AC
5387 else
5388 {
5389 expr = maybe_unconstrained_array (expr);
5390 etype = TREE_TYPE (expr);
c34f3839 5391 ecode = TREE_CODE (etype);
afcea859
EB
5392 if (can_fold_for_view_convert_p (expr))
5393 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5394 else
5395 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5396 }
5397
afcea859
EB
5398 /* If the result is an integral type whose precision is not equal to its
5399 size, sign- or zero-extend the result. We need not do this if the input
5400 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
5401 is a biased type or if both the input and output are unsigned. */
5402 if (!notrunc_p
5403 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 5404 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
5405 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5406 GET_MODE_BITSIZE (TYPE_MODE (type)))
5407 && !(INTEGRAL_TYPE_P (etype)
5408 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5409 && operand_equal_p (TYPE_RM_SIZE (type),
5410 (TYPE_RM_SIZE (etype) != 0
5411 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5412 0))
5413 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5414 {
c34f3839
EB
5415 tree base_type
5416 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
5417 tree shift_expr
5418 = convert (base_type,
5419 size_binop (MINUS_EXPR,
5420 bitsize_int
5421 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5422 TYPE_RM_SIZE (type)));
5423 expr
5424 = convert (type,
5425 build_binary_op (RSHIFT_EXPR, base_type,
5426 build_binary_op (LSHIFT_EXPR, base_type,
5427 convert (base_type, expr),
5428 shift_expr),
5429 shift_expr));
5430 }
5431
5432 /* An unchecked conversion should never raise Constraint_Error. The code
5433 below assumes that GCC's conversion routines overflow the same way that
5434 the underlying hardware does. This is probably true. In the rare case
5435 when it is false, we can rely on the fact that such conversions are
5436 erroneous anyway. */
5437 if (TREE_CODE (expr) == INTEGER_CST)
5438 TREE_OVERFLOW (expr) = 0;
5439
5440 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5441 show no longer constant. */
5442 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5443 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5444 OEP_ONLY_CONST))
5445 TREE_CONSTANT (expr) = 0;
5446
5447 return expr;
5448}
5449\f
feec4372 5450/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5451 the latter being a record type as predicated by Is_Record_Type. */
5452
5453enum tree_code
5454tree_code_for_record_type (Entity_Id gnat_type)
5455{
b1a785fb 5456 Node_Id component_list, component;
a1ab4c31 5457
b1a785fb
EB
5458 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5459 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5460 if (!Is_Unchecked_Union (gnat_type))
5461 return RECORD_TYPE;
5462
b1a785fb
EB
5463 gnat_type = Implementation_Base_Type (gnat_type);
5464 component_list
5465 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5466
a1ab4c31
AC
5467 for (component = First_Non_Pragma (Component_Items (component_list));
5468 Present (component);
5469 component = Next_Non_Pragma (component))
5470 if (Ekind (Defining_Entity (component)) == E_Component)
5471 return RECORD_TYPE;
5472
5473 return UNION_TYPE;
5474}
5475
caa9d12a
EB
5476/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5477 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5478 according to the presence of an alignment clause on the type or, if it
5479 is an array, on the component type. */
5480
5481bool
5482is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5483{
5484 gnat_type = Underlying_Type (gnat_type);
5485
5486 *align_clause = Present (Alignment_Clause (gnat_type));
5487
5488 if (Is_Array_Type (gnat_type))
5489 {
5490 gnat_type = Underlying_Type (Component_Type (gnat_type));
5491 if (Present (Alignment_Clause (gnat_type)))
5492 *align_clause = true;
5493 }
5494
5495 if (!Is_Floating_Point_Type (gnat_type))
5496 return false;
5497
5498 if (UI_To_Int (Esize (gnat_type)) != 64)
5499 return false;
5500
5501 return true;
5502}
5503
5504/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5505 size is greater or equal to 64 bits, or an array of such a type. Set
5506 ALIGN_CLAUSE according to the presence of an alignment clause on the
5507 type or, if it is an array, on the component type. */
5508
5509bool
5510is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5511{
5512 gnat_type = Underlying_Type (gnat_type);
5513
5514 *align_clause = Present (Alignment_Clause (gnat_type));
5515
5516 if (Is_Array_Type (gnat_type))
5517 {
5518 gnat_type = Underlying_Type (Component_Type (gnat_type));
5519 if (Present (Alignment_Clause (gnat_type)))
5520 *align_clause = true;
5521 }
5522
5523 if (!Is_Scalar_Type (gnat_type))
5524 return false;
5525
5526 if (UI_To_Int (Esize (gnat_type)) < 64)
5527 return false;
5528
5529 return true;
5530}
5531
a1ab4c31
AC
5532/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5533 component of an aggregate type. */
5534
5535bool
5536type_for_nonaliased_component_p (tree gnu_type)
5537{
5538 /* If the type is passed by reference, we may have pointers to the
5539 component so it cannot be made non-aliased. */
5540 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5541 return false;
5542
5543 /* We used to say that any component of aggregate type is aliased
5544 because the front-end may take 'Reference of it. The front-end
5545 has been enhanced in the meantime so as to use a renaming instead
5546 in most cases, but the back-end can probably take the address of
5547 such a component too so we go for the conservative stance.
5548
5549 For instance, we might need the address of any array type, even
5550 if normally passed by copy, to construct a fat pointer if the
5551 component is used as an actual for an unconstrained formal.
5552
5553 Likewise for record types: even if a specific record subtype is
5554 passed by copy, the parent type might be passed by ref (e.g. if
5555 it's of variable size) and we might take the address of a child
5556 component to pass to a parent formal. We have no way to check
5557 for such conditions here. */
5558 if (AGGREGATE_TYPE_P (gnu_type))
5559 return false;
5560
5561 return true;
5562}
5563
bb1f7929
EB
5564/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5565
5566bool
5567smaller_form_type_p (tree type, tree orig_type)
5568{
5569 tree size, osize;
5570
5571 /* We're not interested in variants here. */
5572 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5573 return false;
5574
5575 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5576 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5577 return false;
5578
5579 size = TYPE_SIZE (type);
5580 osize = TYPE_SIZE (orig_type);
5581
5582 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5583 return false;
5584
5585 return tree_int_cst_lt (size, osize) != 0;
5586}
5587
a1ab4c31
AC
5588/* Perform final processing on global variables. */
5589
65444786
EB
5590static GTY (()) tree dummy_global;
5591
a1ab4c31
AC
5592void
5593gnat_write_global_declarations (void)
5594{
10e4d056
EB
5595 unsigned int i;
5596 tree iter;
5597
65444786
EB
5598 /* If we have declared types as used at the global level, insert them in
5599 the global hash table. We use a dummy variable for this purpose. */
9771b263 5600 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
65444786 5601 {
35e8bcf4 5602 struct varpool_node *node;
d3c268ab
EB
5603 char *label;
5604
5605 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5606 dummy_global
d3c268ab
EB
5607 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5608 void_type_node);
65444786
EB
5609 TREE_STATIC (dummy_global) = 1;
5610 TREE_ASM_WRITTEN (dummy_global) = 1;
5d59b5e1 5611 node = varpool_node_for_decl (dummy_global);
ead84f73 5612 node->symbol.force_output = 1;
65444786 5613
9771b263 5614 while (!types_used_by_cur_var_decl->is_empty ())
65444786 5615 {
9771b263 5616 tree t = types_used_by_cur_var_decl->pop ();
65444786
EB
5617 types_used_by_var_decl_insert (t, dummy_global);
5618 }
5619 }
5620
10e4d056
EB
5621 /* Output debug information for all global type declarations first. This
5622 ensures that global types whose compilation hasn't been finalized yet,
5623 for example pointers to Taft amendment types, have their compilation
5624 finalized in the right context. */
9771b263 5625 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5626 if (TREE_CODE (iter) == TYPE_DECL)
5627 debug_hooks->global_decl (iter);
5628
65d630d4
JH
5629 /* Proceed to optimize and emit assembly. */
5630 finalize_compilation_unit ();
a1ab4c31 5631
10e4d056
EB
5632 /* After cgraph has had a chance to emit everything that's going to
5633 be emitted, output debug information for the rest of globals. */
5634 if (!seen_error ())
5635 {
5636 timevar_push (TV_SYMOUT);
9771b263 5637 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5638 if (TREE_CODE (iter) != TYPE_DECL)
5639 debug_hooks->global_decl (iter);
5640 timevar_pop (TV_SYMOUT);
5641 }
a1ab4c31
AC
5642}
5643
5644/* ************************************************************************
5645 * * GCC builtins support *
5646 * ************************************************************************ */
5647
5648/* The general scheme is fairly simple:
5649
5650 For each builtin function/type to be declared, gnat_install_builtins calls
5651 internal facilities which eventually get to gnat_push_decl, which in turn
5652 tracks the so declared builtin function decls in the 'builtin_decls' global
5653 datastructure. When an Intrinsic subprogram declaration is processed, we
5654 search this global datastructure to retrieve the associated BUILT_IN DECL
5655 node. */
5656
5657/* Search the chain of currently available builtin declarations for a node
5658 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5659 found, if any, or NULL_TREE otherwise. */
5660tree
5661builtin_decl_for (tree name)
5662{
5663 unsigned i;
5664 tree decl;
5665
9771b263 5666 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5667 if (DECL_NAME (decl) == name)
5668 return decl;
5669
5670 return NULL_TREE;
5671}
5672
5673/* The code below eventually exposes gnat_install_builtins, which declares
5674 the builtin types and functions we might need, either internally or as
5675 user accessible facilities.
5676
5677 ??? This is a first implementation shot, still in rough shape. It is
5678 heavily inspired from the "C" family implementation, with chunks copied
5679 verbatim from there.
5680
5681 Two obvious TODO candidates are
5682 o Use a more efficient name/decl mapping scheme
5683 o Devise a middle-end infrastructure to avoid having to copy
5684 pieces between front-ends. */
5685
5686/* ----------------------------------------------------------------------- *
5687 * BUILTIN ELEMENTARY TYPES *
5688 * ----------------------------------------------------------------------- */
5689
5690/* Standard data types to be used in builtin argument declarations. */
5691
5692enum c_tree_index
5693{
5694 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5695 CTI_STRING_TYPE,
5696 CTI_CONST_STRING_TYPE,
5697
5698 CTI_MAX
5699};
5700
5701static tree c_global_trees[CTI_MAX];
5702
5703#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5704#define string_type_node c_global_trees[CTI_STRING_TYPE]
5705#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5706
5707/* ??? In addition some attribute handlers, we currently don't support a
5708 (small) number of builtin-types, which in turns inhibits support for a
5709 number of builtin functions. */
5710#define wint_type_node void_type_node
5711#define intmax_type_node void_type_node
5712#define uintmax_type_node void_type_node
5713
5714/* Build the void_list_node (void_type_node having been created). */
5715
5716static tree
5717build_void_list_node (void)
5718{
5719 tree t = build_tree_list (NULL_TREE, void_type_node);
5720 return t;
5721}
5722
5723/* Used to help initialize the builtin-types.def table. When a type of
5724 the correct size doesn't exist, use error_mark_node instead of NULL.
5725 The later results in segfaults even when a decl using the type doesn't
5726 get invoked. */
5727
5728static tree
5729builtin_type_for_size (int size, bool unsignedp)
5730{
ced57283 5731 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5732 return type ? type : error_mark_node;
5733}
5734
5735/* Build/push the elementary type decls that builtin functions/types
5736 will need. */
5737
5738static void
5739install_builtin_elementary_types (void)
5740{
728936bb 5741 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5742 pid_type_node = integer_type_node;
5743 void_list_node = build_void_list_node ();
5744
5745 string_type_node = build_pointer_type (char_type_node);
5746 const_string_type_node
5747 = build_pointer_type (build_qualified_type
5748 (char_type_node, TYPE_QUAL_CONST));
5749}
5750
5751/* ----------------------------------------------------------------------- *
5752 * BUILTIN FUNCTION TYPES *
5753 * ----------------------------------------------------------------------- */
5754
5755/* Now, builtin function types per se. */
5756
5757enum c_builtin_type
5758{
5759#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5760#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5761#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5762#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5763#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5764#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5765#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5766#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5767#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5768#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5769#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5770#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5771#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5772#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5773#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5774 NAME,
5775#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5776#include "builtin-types.def"
5777#undef DEF_PRIMITIVE_TYPE
5778#undef DEF_FUNCTION_TYPE_0
5779#undef DEF_FUNCTION_TYPE_1
5780#undef DEF_FUNCTION_TYPE_2
5781#undef DEF_FUNCTION_TYPE_3
5782#undef DEF_FUNCTION_TYPE_4
5783#undef DEF_FUNCTION_TYPE_5
5784#undef DEF_FUNCTION_TYPE_6
5785#undef DEF_FUNCTION_TYPE_7
5786#undef DEF_FUNCTION_TYPE_VAR_0
5787#undef DEF_FUNCTION_TYPE_VAR_1
5788#undef DEF_FUNCTION_TYPE_VAR_2
5789#undef DEF_FUNCTION_TYPE_VAR_3
5790#undef DEF_FUNCTION_TYPE_VAR_4
5791#undef DEF_FUNCTION_TYPE_VAR_5
5792#undef DEF_POINTER_TYPE
5793 BT_LAST
5794};
5795
5796typedef enum c_builtin_type builtin_type;
5797
5798/* A temporary array used in communication with def_fn_type. */
5799static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5800
5801/* A helper function for install_builtin_types. Build function type
5802 for DEF with return type RET and N arguments. If VAR is true, then the
5803 function should be variadic after those N arguments.
5804
5805 Takes special care not to ICE if any of the types involved are
5806 error_mark_node, which indicates that said type is not in fact available
5807 (see builtin_type_for_size). In which case the function type as a whole
5808 should be error_mark_node. */
5809
5810static void
5811def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5812{
e5b00edf
NF
5813 tree t;
5814 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5815 va_list list;
5816 int i;
5817
5818 va_start (list, n);
5819 for (i = 0; i < n; ++i)
5820 {
c6bd4220 5821 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5822 t = builtin_types[a];
5823 if (t == error_mark_node)
5824 goto egress;
e5b00edf 5825 args[i] = t;
a1ab4c31 5826 }
a1ab4c31 5827
a1ab4c31
AC
5828 t = builtin_types[ret];
5829 if (t == error_mark_node)
5830 goto egress;
e5b00edf
NF
5831 if (var)
5832 t = build_varargs_function_type_array (t, n, args);
5833 else
5834 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5835
5836 egress:
5837 builtin_types[def] = t;
0edf1bb2 5838 va_end (list);
a1ab4c31
AC
5839}
5840
5841/* Build the builtin function types and install them in the builtin_types
5842 array for later use in builtin function decls. */
5843
5844static void
5845install_builtin_function_types (void)
5846{
5847 tree va_list_ref_type_node;
5848 tree va_list_arg_type_node;
5849
5850 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5851 {
5852 va_list_arg_type_node = va_list_ref_type_node =
5853 build_pointer_type (TREE_TYPE (va_list_type_node));
5854 }
5855 else
5856 {
5857 va_list_arg_type_node = va_list_type_node;
5858 va_list_ref_type_node = build_reference_type (va_list_type_node);
5859 }
5860
5861#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5862 builtin_types[ENUM] = VALUE;
5863#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5864 def_fn_type (ENUM, RETURN, 0, 0);
5865#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5866 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5867#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5868 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5869#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5870 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5871#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5872 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5873#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5874 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5875#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5876 ARG6) \
5877 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5878#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5879 ARG6, ARG7) \
5880 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5881#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5882 def_fn_type (ENUM, RETURN, 1, 0);
5883#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5884 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5885#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5886 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5887#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5888 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5889#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5890 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5891#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5892 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5893#define DEF_POINTER_TYPE(ENUM, TYPE) \
5894 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5895
5896#include "builtin-types.def"
5897
5898#undef DEF_PRIMITIVE_TYPE
5899#undef DEF_FUNCTION_TYPE_1
5900#undef DEF_FUNCTION_TYPE_2
5901#undef DEF_FUNCTION_TYPE_3
5902#undef DEF_FUNCTION_TYPE_4
5903#undef DEF_FUNCTION_TYPE_5
5904#undef DEF_FUNCTION_TYPE_6
5905#undef DEF_FUNCTION_TYPE_VAR_0
5906#undef DEF_FUNCTION_TYPE_VAR_1
5907#undef DEF_FUNCTION_TYPE_VAR_2
5908#undef DEF_FUNCTION_TYPE_VAR_3
5909#undef DEF_FUNCTION_TYPE_VAR_4
5910#undef DEF_FUNCTION_TYPE_VAR_5
5911#undef DEF_POINTER_TYPE
5912 builtin_types[(int) BT_LAST] = NULL_TREE;
5913}
5914
5915/* ----------------------------------------------------------------------- *
5916 * BUILTIN ATTRIBUTES *
5917 * ----------------------------------------------------------------------- */
5918
5919enum built_in_attribute
5920{
5921#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5922#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5923#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5924#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5925#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5926#include "builtin-attrs.def"
5927#undef DEF_ATTR_NULL_TREE
5928#undef DEF_ATTR_INT
e384e6b5 5929#undef DEF_ATTR_STRING
a1ab4c31
AC
5930#undef DEF_ATTR_IDENT
5931#undef DEF_ATTR_TREE_LIST
5932 ATTR_LAST
5933};
5934
5935static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5936
5937static void
5938install_builtin_attributes (void)
5939{
5940 /* Fill in the built_in_attributes array. */
5941#define DEF_ATTR_NULL_TREE(ENUM) \
5942 built_in_attributes[(int) ENUM] = NULL_TREE;
5943#define DEF_ATTR_INT(ENUM, VALUE) \
5944 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5945#define DEF_ATTR_STRING(ENUM, VALUE) \
5946 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5947#define DEF_ATTR_IDENT(ENUM, STRING) \
5948 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5949#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5950 built_in_attributes[(int) ENUM] \
5951 = tree_cons (built_in_attributes[(int) PURPOSE], \
5952 built_in_attributes[(int) VALUE], \
5953 built_in_attributes[(int) CHAIN]);
5954#include "builtin-attrs.def"
5955#undef DEF_ATTR_NULL_TREE
5956#undef DEF_ATTR_INT
e384e6b5 5957#undef DEF_ATTR_STRING
a1ab4c31
AC
5958#undef DEF_ATTR_IDENT
5959#undef DEF_ATTR_TREE_LIST
5960}
5961
5962/* Handle a "const" attribute; arguments as in
5963 struct attribute_spec.handler. */
5964
5965static tree
5966handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5967 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5968 bool *no_add_attrs)
5969{
5970 if (TREE_CODE (*node) == FUNCTION_DECL)
5971 TREE_READONLY (*node) = 1;
5972 else
5973 *no_add_attrs = true;
5974
5975 return NULL_TREE;
5976}
5977
5978/* Handle a "nothrow" attribute; arguments as in
5979 struct attribute_spec.handler. */
5980
5981static tree
5982handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5983 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5984 bool *no_add_attrs)
5985{
5986 if (TREE_CODE (*node) == FUNCTION_DECL)
5987 TREE_NOTHROW (*node) = 1;
5988 else
5989 *no_add_attrs = true;
5990
5991 return NULL_TREE;
5992}
5993
5994/* Handle a "pure" attribute; arguments as in
5995 struct attribute_spec.handler. */
5996
5997static tree
5998handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5999 int ARG_UNUSED (flags), bool *no_add_attrs)
6000{
6001 if (TREE_CODE (*node) == FUNCTION_DECL)
6002 DECL_PURE_P (*node) = 1;
6003 /* ??? TODO: Support types. */
6004 else
6005 {
7948ae37
OH
6006 warning (OPT_Wattributes, "%qs attribute ignored",
6007 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6008 *no_add_attrs = true;
6009 }
6010
6011 return NULL_TREE;
6012}
6013
6014/* Handle a "no vops" attribute; arguments as in
6015 struct attribute_spec.handler. */
6016
6017static tree
6018handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6019 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6020 bool *ARG_UNUSED (no_add_attrs))
6021{
6022 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6023 DECL_IS_NOVOPS (*node) = 1;
6024 return NULL_TREE;
6025}
6026
6027/* Helper for nonnull attribute handling; fetch the operand number
6028 from the attribute argument list. */
6029
6030static bool
6031get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6032{
6033 /* Verify the arg number is a constant. */
6034 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6035 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6036 return false;
6037
6038 *valp = TREE_INT_CST_LOW (arg_num_expr);
6039 return true;
6040}
6041
6042/* Handle the "nonnull" attribute. */
6043static tree
6044handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6045 tree args, int ARG_UNUSED (flags),
6046 bool *no_add_attrs)
6047{
6048 tree type = *node;
6049 unsigned HOST_WIDE_INT attr_arg_num;
6050
6051 /* If no arguments are specified, all pointer arguments should be
6052 non-null. Verify a full prototype is given so that the arguments
6053 will have the correct types when we actually check them later. */
6054 if (!args)
6055 {
f4da8dce 6056 if (!prototype_p (type))
a1ab4c31
AC
6057 {
6058 error ("nonnull attribute without arguments on a non-prototype");
6059 *no_add_attrs = true;
6060 }
6061 return NULL_TREE;
6062 }
6063
6064 /* Argument list specified. Verify that each argument number references
6065 a pointer argument. */
6066 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6067 {
a1ab4c31
AC
6068 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6069
6070 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6071 {
6072 error ("nonnull argument has invalid operand number (argument %lu)",
6073 (unsigned long) attr_arg_num);
6074 *no_add_attrs = true;
6075 return NULL_TREE;
6076 }
6077
d7d058c5 6078 if (prototype_p (type))
a1ab4c31 6079 {
d7d058c5
NF
6080 function_args_iterator iter;
6081 tree argument;
6082
6083 function_args_iter_init (&iter, type);
6084 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 6085 {
d7d058c5 6086 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
6087 if (!argument || ck_num == arg_num)
6088 break;
a1ab4c31
AC
6089 }
6090
6091 if (!argument
d7d058c5 6092 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 6093 {
58c8f770
EB
6094 error ("nonnull argument with out-of-range operand number "
6095 "(argument %lu, operand %lu)",
a1ab4c31
AC
6096 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6097 *no_add_attrs = true;
6098 return NULL_TREE;
6099 }
6100
d7d058c5 6101 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 6102 {
58c8f770
EB
6103 error ("nonnull argument references non-pointer operand "
6104 "(argument %lu, operand %lu)",
a1ab4c31
AC
6105 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6106 *no_add_attrs = true;
6107 return NULL_TREE;
6108 }
6109 }
6110 }
6111
6112 return NULL_TREE;
6113}
6114
6115/* Handle a "sentinel" attribute. */
6116
6117static tree
6118handle_sentinel_attribute (tree *node, tree name, tree args,
6119 int ARG_UNUSED (flags), bool *no_add_attrs)
6120{
f4da8dce 6121 if (!prototype_p (*node))
a1ab4c31
AC
6122 {
6123 warning (OPT_Wattributes,
7948ae37
OH
6124 "%qs attribute requires prototypes with named arguments",
6125 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6126 *no_add_attrs = true;
6127 }
6128 else
6129 {
dcf0c47e 6130 if (!stdarg_p (*node))
a1ab4c31
AC
6131 {
6132 warning (OPT_Wattributes,
7948ae37
OH
6133 "%qs attribute only applies to variadic functions",
6134 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6135 *no_add_attrs = true;
6136 }
6137 }
6138
6139 if (args)
6140 {
6141 tree position = TREE_VALUE (args);
6142
6143 if (TREE_CODE (position) != INTEGER_CST)
6144 {
6145 warning (0, "requested position is not an integer constant");
6146 *no_add_attrs = true;
6147 }
6148 else
6149 {
6150 if (tree_int_cst_lt (position, integer_zero_node))
6151 {
6152 warning (0, "requested position is less than zero");
6153 *no_add_attrs = true;
6154 }
6155 }
6156 }
6157
6158 return NULL_TREE;
6159}
6160
6161/* Handle a "noreturn" attribute; arguments as in
6162 struct attribute_spec.handler. */
6163
6164static tree
6165handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6166 int ARG_UNUSED (flags), bool *no_add_attrs)
6167{
6168 tree type = TREE_TYPE (*node);
6169
6170 /* See FIXME comment in c_common_attribute_table. */
6171 if (TREE_CODE (*node) == FUNCTION_DECL)
6172 TREE_THIS_VOLATILE (*node) = 1;
6173 else if (TREE_CODE (type) == POINTER_TYPE
6174 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6175 TREE_TYPE (*node)
6176 = build_pointer_type
6177 (build_type_variant (TREE_TYPE (type),
6178 TYPE_READONLY (TREE_TYPE (type)), 1));
6179 else
6180 {
7948ae37
OH
6181 warning (OPT_Wattributes, "%qs attribute ignored",
6182 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6183 *no_add_attrs = true;
6184 }
6185
6186 return NULL_TREE;
6187}
6188
0d6e14fd
JH
6189/* Handle a "leaf" attribute; arguments as in
6190 struct attribute_spec.handler. */
6191
6192static tree
6193handle_leaf_attribute (tree *node, tree name,
6194 tree ARG_UNUSED (args),
6195 int ARG_UNUSED (flags), bool *no_add_attrs)
6196{
6197 if (TREE_CODE (*node) != FUNCTION_DECL)
6198 {
6199 warning (OPT_Wattributes, "%qE attribute ignored", name);
6200 *no_add_attrs = true;
6201 }
6202 if (!TREE_PUBLIC (*node))
6203 {
32a5388a 6204 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd
JH
6205 *no_add_attrs = true;
6206 }
6207
6208 return NULL_TREE;
6209}
6210
a1ab4c31
AC
6211/* Handle a "malloc" attribute; arguments as in
6212 struct attribute_spec.handler. */
6213
6214static tree
6215handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6216 int ARG_UNUSED (flags), bool *no_add_attrs)
6217{
6218 if (TREE_CODE (*node) == FUNCTION_DECL
6219 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6220 DECL_IS_MALLOC (*node) = 1;
6221 else
6222 {
7948ae37
OH
6223 warning (OPT_Wattributes, "%qs attribute ignored",
6224 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6225 *no_add_attrs = true;
6226 }
6227
6228 return NULL_TREE;
6229}
6230
6231/* Fake handler for attributes we don't properly support. */
6232
6233tree
6234fake_attribute_handler (tree * ARG_UNUSED (node),
6235 tree ARG_UNUSED (name),
6236 tree ARG_UNUSED (args),
6237 int ARG_UNUSED (flags),
6238 bool * ARG_UNUSED (no_add_attrs))
6239{
6240 return NULL_TREE;
6241}
6242
6243/* Handle a "type_generic" attribute. */
6244
6245static tree
6246handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6247 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6248 bool * ARG_UNUSED (no_add_attrs))
6249{
a1ab4c31
AC
6250 /* Ensure we have a function type. */
6251 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 6252
a1ab4c31 6253 /* Ensure we have a variadic function. */
dcf0c47e 6254 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
6255
6256 return NULL_TREE;
6257}
6258
2724e58f
OH
6259/* Handle a "vector_size" attribute; arguments as in
6260 struct attribute_spec.handler. */
6261
6262static tree
6263handle_vector_size_attribute (tree *node, tree name, tree args,
6264 int ARG_UNUSED (flags),
6265 bool *no_add_attrs)
6266{
6267 unsigned HOST_WIDE_INT vecsize, nunits;
6268 enum machine_mode orig_mode;
6269 tree type = *node, new_type, size;
6270
6271 *no_add_attrs = true;
6272
6273 size = TREE_VALUE (args);
6274
6275 if (!host_integerp (size, 1))
6276 {
7948ae37
OH
6277 warning (OPT_Wattributes, "%qs attribute ignored",
6278 IDENTIFIER_POINTER (name));
2724e58f
OH
6279 return NULL_TREE;
6280 }
6281
6282 /* Get the vector size (in bytes). */
6283 vecsize = tree_low_cst (size, 1);
6284
6285 /* We need to provide for vector pointers, vector arrays, and
6286 functions returning vectors. For example:
6287
6288 __attribute__((vector_size(16))) short *foo;
6289
6290 In this case, the mode is SI, but the type being modified is
6291 HI, so we need to look further. */
6292
6293 while (POINTER_TYPE_P (type)
6294 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6295 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6296 type = TREE_TYPE (type);
6297
6298 /* Get the mode of the type being modified. */
6299 orig_mode = TYPE_MODE (type);
6300
6301 if ((!INTEGRAL_TYPE_P (type)
6302 && !SCALAR_FLOAT_TYPE_P (type)
6303 && !FIXED_POINT_TYPE_P (type))
6304 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6305 && GET_MODE_CLASS (orig_mode) != MODE_INT
6306 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
6307 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
6308 || TREE_CODE (type) == BOOLEAN_TYPE)
6309 {
7948ae37
OH
6310 error ("invalid vector type for attribute %qs",
6311 IDENTIFIER_POINTER (name));
2724e58f
OH
6312 return NULL_TREE;
6313 }
6314
6315 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
6316 {
6317 error ("vector size not an integral multiple of component size");
6318 return NULL;
6319 }
6320
6321 if (vecsize == 0)
6322 {
6323 error ("zero vector size");
6324 return NULL;
6325 }
6326
6327 /* Calculate how many units fit in the vector. */
6328 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
6329 if (nunits & (nunits - 1))
6330 {
6331 error ("number of components of the vector not a power of two");
6332 return NULL_TREE;
6333 }
6334
6335 new_type = build_vector_type (type, nunits);
6336
6337 /* Build back pointers if needed. */
9b469089 6338 *node = reconstruct_complex_type (*node, new_type);
2724e58f
OH
6339
6340 return NULL_TREE;
6341}
6342
7948ae37
OH
6343/* Handle a "vector_type" attribute; arguments as in
6344 struct attribute_spec.handler. */
6345
6346static tree
6347handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6348 int ARG_UNUSED (flags),
6349 bool *no_add_attrs)
6350{
6351 /* Vector representative type and size. */
6352 tree rep_type = *node;
6353 tree rep_size = TYPE_SIZE_UNIT (rep_type);
7948ae37
OH
6354
6355 /* Vector size in bytes and number of units. */
6356 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6357
6358 /* Vector element type and mode. */
6359 tree elem_type;
6360 enum machine_mode elem_mode;
6361
6362 *no_add_attrs = true;
6363
7948ae37
OH
6364 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6365 {
6366 error ("attribute %qs applies to array types only",
6367 IDENTIFIER_POINTER (name));
6368 return NULL_TREE;
6369 }
6370
6371 /* Silently punt on variable sizes. We can't make vector types for them,
6372 need to ignore them on front-end generated subtypes of unconstrained
6373 bases, and this attribute is for binding implementors, not end-users, so
6374 we should never get there from legitimate explicit uses. */
6375
6376 if (!host_integerp (rep_size, 1))
6377 return NULL_TREE;
6378
6379 /* Get the element type/mode and check this is something we know
6380 how to make vectors of. */
6381
6382 elem_type = TREE_TYPE (rep_type);
6383 elem_mode = TYPE_MODE (elem_type);
6384
6385 if ((!INTEGRAL_TYPE_P (elem_type)
6386 && !SCALAR_FLOAT_TYPE_P (elem_type)
6387 && !FIXED_POINT_TYPE_P (elem_type))
6388 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6389 && GET_MODE_CLASS (elem_mode) != MODE_INT
6390 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
6391 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
6392 {
6393 error ("invalid element type for attribute %qs",
6394 IDENTIFIER_POINTER (name));
6395 return NULL_TREE;
6396 }
6397
6398 /* Sanity check the vector size and element type consistency. */
6399
6400 vec_bytes = tree_low_cst (rep_size, 1);
6401
6402 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
6403 {
6404 error ("vector size not an integral multiple of component size");
6405 return NULL;
6406 }
6407
6408 if (vec_bytes == 0)
6409 {
6410 error ("zero vector size");
6411 return NULL;
6412 }
6413
6414 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
6415 if (vec_units & (vec_units - 1))
6416 {
6417 error ("number of components of the vector not a power of two");
6418 return NULL_TREE;
6419 }
6420
6421 /* Build the vector type and replace. */
6422
6423 *node = build_vector_type (elem_type, vec_units);
7948ae37
OH
6424 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6425
6426 return NULL_TREE;
6427}
6428
a1ab4c31
AC
6429/* ----------------------------------------------------------------------- *
6430 * BUILTIN FUNCTIONS *
6431 * ----------------------------------------------------------------------- */
6432
6433/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6434 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6435 if nonansi_p and flag_no_nonansi_builtin. */
6436
6437static void
6438def_builtin_1 (enum built_in_function fncode,
6439 const char *name,
6440 enum built_in_class fnclass,
6441 tree fntype, tree libtype,
6442 bool both_p, bool fallback_p,
6443 bool nonansi_p ATTRIBUTE_UNUSED,
6444 tree fnattrs, bool implicit_p)
6445{
6446 tree decl;
6447 const char *libname;
6448
6449 /* Preserve an already installed decl. It most likely was setup in advance
6450 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 6451 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
6452 return;
6453
6454 gcc_assert ((!both_p && !fallback_p)
6455 || !strncmp (name, "__builtin_",
6456 strlen ("__builtin_")));
6457
6458 libname = name + strlen ("__builtin_");
6459 decl = add_builtin_function (name, fntype, fncode, fnclass,
6460 (fallback_p ? libname : NULL),
6461 fnattrs);
6462 if (both_p)
6463 /* ??? This is normally further controlled by command-line options
6464 like -fno-builtin, but we don't have them for Ada. */
6465 add_builtin_function (libname, libtype, fncode, fnclass,
6466 NULL, fnattrs);
6467
e79983f4 6468 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6469}
6470
6471static int flag_isoc94 = 0;
6472static int flag_isoc99 = 0;
6473
6474/* Install what the common builtins.def offers. */
6475
6476static void
6477install_builtin_functions (void)
6478{
6479#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6480 NONANSI_P, ATTRS, IMPLICIT, COND) \
6481 if (NAME && COND) \
6482 def_builtin_1 (ENUM, NAME, CLASS, \
6483 builtin_types[(int) TYPE], \
6484 builtin_types[(int) LIBTYPE], \
6485 BOTH_P, FALLBACK_P, NONANSI_P, \
6486 built_in_attributes[(int) ATTRS], IMPLICIT);
6487#include "builtins.def"
6488#undef DEF_BUILTIN
6489}
6490
6491/* ----------------------------------------------------------------------- *
6492 * BUILTIN FUNCTIONS *
6493 * ----------------------------------------------------------------------- */
6494
6495/* Install the builtin functions we might need. */
6496
6497void
6498gnat_install_builtins (void)
6499{
6500 install_builtin_elementary_types ();
6501 install_builtin_function_types ();
6502 install_builtin_attributes ();
6503
6504 /* Install builtins used by generic middle-end pieces first. Some of these
6505 know about internal specificities and control attributes accordingly, for
6506 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6507 the generic definition from builtins.def. */
384c400a 6508 build_common_builtin_nodes ();
a1ab4c31
AC
6509
6510 /* Now, install the target specific builtins, such as the AltiVec family on
6511 ppc, and the common set as exposed by builtins.def. */
6512 targetm.init_builtins ();
6513 install_builtin_functions ();
6514}
6515
6516#include "gt-ada-utils.h"
6517#include "gtype-ada.h"
This page took 2.312114 seconds and 5 git commands to generate.