]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
cuintp.c: Remove useless include directives.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
0e24192c 9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
31#include "flags.h"
a1ab4c31 32#include "toplev.h"
718f9c0f 33#include "diagnostic-core.h"
a1ab4c31
AC
34#include "output.h"
35#include "ggc.h"
36#include "debug.h"
37#include "convert.h"
38#include "target.h"
677f3fa8 39#include "common/common-target.h"
8713b7e4 40#include "langhooks.h"
a1ab4c31 41#include "cgraph.h"
10e4d056 42#include "diagnostic.h"
7ee2468b 43#include "timevar.h"
8713b7e4 44#include "tree-dump.h"
a1ab4c31
AC
45#include "tree-inline.h"
46#include "tree-iterator.h"
a1ab4c31
AC
47
48#include "ada.h"
49#include "types.h"
50#include "atree.h"
51#include "elists.h"
52#include "namet.h"
53#include "nlists.h"
54#include "stringt.h"
55#include "uintp.h"
56#include "fe.h"
57#include "sinfo.h"
58#include "einfo.h"
59#include "ada-tree.h"
60#include "gigi.h"
61
a1ab4c31
AC
62/* If nonzero, pretend we are allocating at global level. */
63int force_global;
64
caa9d12a
EB
65/* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68int double_float_alignment;
69
70/* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73int double_scalar_alignment;
74
a1ab4c31
AC
75/* Tree nodes for the various types and decls we create. */
76tree gnat_std_decls[(int) ADT_LAST];
77
78/* Functions to call for each of the possible raise reasons. */
79tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
80
ca8e13e8 81/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
82tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
83
a1ab4c31
AC
84/* Forward declarations for handlers of attributes. */
85static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 92static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
93static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 95static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 96static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
97
98/* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
101
102/* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104const struct attribute_spec gnat_internal_attribute_table[] =
105{
62d784f7
KT
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "const", 0, 0, true, false, false, handle_const_attribute,
109 false },
110 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
111 false },
112 { "pure", 0, 0, true, false, false, handle_pure_attribute,
113 false },
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
115 false },
116 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
117 false },
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
119 false },
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
121 false },
122 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
123 false },
124 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
125 false },
126 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
127 false },
128
129 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
130 false },
131 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
132 false },
133 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
134
135 /* ??? format and format_arg are heavy and not supported, which actually
136 prevents support for stdio builtins, which we however declare as part
137 of the common builtins.def contents. */
62d784f7
KT
138 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
139 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 140
62d784f7 141 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
142};
143
144/* Associates a GNAT tree node to a GCC tree node. It is used in
145 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
146 of `save_gnu_tree' for more info. */
147static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
148
149#define GET_GNU_TREE(GNAT_ENTITY) \
150 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
151
152#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
154
155#define PRESENT_GNU_TREE(GNAT_ENTITY) \
156 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
157
158/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
159static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
160
161#define GET_DUMMY_NODE(GNAT_ENTITY) \
162 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
163
164#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166
167#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
168 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169
170/* This variable keeps a table for types for each precision so that we only
171 allocate each of them once. Signed and unsigned types are kept separate.
172
173 Note that these types are only used when fold-const requests something
174 special. Perhaps we should NOT share these types; we'll see how it
175 goes later. */
176static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
177
178/* Likewise for float types, but record these by mode. */
179static GTY(()) tree float_types[NUM_MACHINE_MODES];
180
181/* For each binding contour we allocate a binding_level structure to indicate
182 the binding depth. */
183
d1b38208 184struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
185 /* The binding level containing this one (the enclosing binding level). */
186 struct gnat_binding_level *chain;
187 /* The BLOCK node for this level. */
188 tree block;
189 /* If nonzero, the setjmp buffer that needs to be updated for any
190 variable-sized definition within this context. */
191 tree jmpbuf_decl;
192};
193
194/* The binding level currently in effect. */
195static GTY(()) struct gnat_binding_level *current_binding_level;
196
197/* A chain of gnat_binding_level structures awaiting reuse. */
198static GTY((deletable)) struct gnat_binding_level *free_binding_level;
199
228ee426
EB
200/* The context to be used for global declarations. */
201static GTY(()) tree global_context;
202
a1ab4c31 203/* An array of global declarations. */
9771b263 204static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
205
206/* An array of builtin function declarations. */
9771b263 207static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31
AC
208
209/* An array of global renaming pointers. */
9771b263 210static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
a1ab4c31
AC
211
212/* A chain of unused BLOCK nodes. */
213static GTY((deletable)) tree free_block_chain;
214
842d4ee2
EB
215static int pad_type_hash_marked_p (const void *p);
216static hashval_t pad_type_hash_hash (const void *p);
217static int pad_type_hash_eq (const void *p1, const void *p2);
218
219/* A hash table of padded types. It is modelled on the generic type
220 hash table in tree.c, which must thus be used as a reference. */
221struct GTY(()) pad_type_hash {
222 unsigned long hash;
223 tree type;
224};
225
226static GTY ((if_marked ("pad_type_hash_marked_p"),
227 param_is (struct pad_type_hash)))
228 htab_t pad_type_hash_table;
229
a1ab4c31
AC
230static tree merge_sizes (tree, tree, tree, bool, bool);
231static tree compute_related_constant (tree, tree);
232static tree split_plus (tree, tree *);
a1ab4c31
AC
233static tree float_type_for_precision (int, enum machine_mode);
234static tree convert_to_fat_pointer (tree, tree);
a1ab4c31
AC
235static bool potential_alignment_gap (tree, tree, tree);
236\f
842d4ee2 237/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
238
239void
842d4ee2 240init_gnat_utils (void)
a1ab4c31 241{
842d4ee2 242 /* Initialize the association of GNAT nodes to GCC trees. */
a9429e29 243 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
842d4ee2
EB
244
245 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
246 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
247
248 /* Initialize the hash table of padded types. */
249 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
250 pad_type_hash_eq, 0);
a1ab4c31
AC
251}
252
842d4ee2 253/* Destroy data structures of the utils.c module. */
f04b8d69
EB
254
255void
842d4ee2 256destroy_gnat_utils (void)
f04b8d69 257{
842d4ee2 258 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
259 ggc_free (associate_gnat_to_gnu);
260 associate_gnat_to_gnu = NULL;
f04b8d69 261
842d4ee2
EB
262 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
263 ggc_free (dummy_node_table);
264 dummy_node_table = NULL;
265
266 /* Destroy the hash table of padded types. */
267 htab_delete (pad_type_hash_table);
268 pad_type_hash_table = NULL;
269
270 /* Invalidate the global renaming pointers. */
271 invalidate_global_renaming_pointers ();
272}
273\f
a1d8cc63
EB
274/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
275 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
276 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 277
a1d8cc63 278 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
279
280void
281save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
282{
283 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 284 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
285 means GNAT_ENTITY is defined twice, but occasionally is due to some
286 Gigi problem. */
287 gcc_assert (!(gnu_decl
288 && (PRESENT_GNU_TREE (gnat_entity)
289 || (!no_check && !DECL_P (gnu_decl)))));
290
291 SET_GNU_TREE (gnat_entity, gnu_decl);
292}
293
a1d8cc63
EB
294/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
295 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
296
297 In some cases, such as delayed elaboration or expressions that need to
298 be elaborated only once, GNAT_ENTITY is really not an entity. */
299
300tree
301get_gnu_tree (Entity_Id gnat_entity)
302{
303 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
304 return GET_GNU_TREE (gnat_entity);
305}
306
307/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
308
309bool
310present_gnu_tree (Entity_Id gnat_entity)
311{
312 return PRESENT_GNU_TREE (gnat_entity);
313}
314\f
a1ab4c31
AC
315/* Make a dummy type corresponding to GNAT_TYPE. */
316
317tree
318make_dummy_type (Entity_Id gnat_type)
319{
320 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
321 tree gnu_type;
322
323 /* If there is an equivalent type, get its underlying type. */
324 if (Present (gnat_underlying))
a0b8b1b7 325 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
a1ab4c31
AC
326
327 /* If there was no equivalent type (can only happen when just annotating
328 types) or underlying type, go back to the original type. */
329 if (No (gnat_underlying))
330 gnat_underlying = gnat_type;
331
332 /* If it there already a dummy type, use that one. Else make one. */
333 if (PRESENT_DUMMY_NODE (gnat_underlying))
334 return GET_DUMMY_NODE (gnat_underlying);
335
336 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
337 an ENUMERAL_TYPE. */
338 gnu_type = make_node (Is_Record_Type (gnat_underlying)
339 ? tree_code_for_record_type (gnat_underlying)
340 : ENUMERAL_TYPE);
341 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
342 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
343 TYPE_STUB_DECL (gnu_type)
344 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a0b8b1b7
EB
345 if (Is_By_Reference_Type (gnat_underlying))
346 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31
AC
347
348 SET_DUMMY_NODE (gnat_underlying, gnu_type);
349
350 return gnu_type;
351}
e3edbd56
EB
352
353/* Return the dummy type that was made for GNAT_TYPE, if any. */
354
355tree
356get_dummy_type (Entity_Id gnat_type)
357{
358 return GET_DUMMY_NODE (gnat_type);
359}
360
361/* Build dummy fat and thin pointer types whose designated type is specified
362 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
363
364void
365build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
366{
367 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
368 tree gnu_fat_type, fields, gnu_object_type;
369
370 gnu_template_type = make_node (RECORD_TYPE);
371 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
372 TYPE_DUMMY_P (gnu_template_type) = 1;
373 gnu_ptr_template = build_pointer_type (gnu_template_type);
374
375 gnu_array_type = make_node (ENUMERAL_TYPE);
376 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
377 TYPE_DUMMY_P (gnu_array_type) = 1;
378 gnu_ptr_array = build_pointer_type (gnu_array_type);
379
380 gnu_fat_type = make_node (RECORD_TYPE);
381 /* Build a stub DECL to trigger the special processing for fat pointer types
382 in gnat_pushdecl. */
383 TYPE_NAME (gnu_fat_type)
384 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
385 gnu_fat_type);
386 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
387 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
388 DECL_CHAIN (fields)
389 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
390 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
391 finish_fat_pointer_type (gnu_fat_type, fields);
392 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
393 /* Suppress debug info until after the type is completed. */
394 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
395
396 gnu_object_type = make_node (RECORD_TYPE);
397 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
398 TYPE_DUMMY_P (gnu_object_type) = 1;
399
400 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
401 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
402}
a1ab4c31 403\f
c99c0026 404/* Return true if we are in the global binding level. */
a1ab4c31 405
c99c0026 406bool
a1ab4c31
AC
407global_bindings_p (void)
408{
c99c0026 409 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
410}
411
a09d56d8 412/* Enter a new binding level. */
a1ab4c31
AC
413
414void
c6bd4220 415gnat_pushlevel (void)
a1ab4c31
AC
416{
417 struct gnat_binding_level *newlevel = NULL;
418
419 /* Reuse a struct for this binding level, if there is one. */
420 if (free_binding_level)
421 {
422 newlevel = free_binding_level;
423 free_binding_level = free_binding_level->chain;
424 }
425 else
a9429e29 426 newlevel = ggc_alloc_gnat_binding_level ();
a1ab4c31
AC
427
428 /* Use a free BLOCK, if any; otherwise, allocate one. */
429 if (free_block_chain)
430 {
431 newlevel->block = free_block_chain;
432 free_block_chain = BLOCK_CHAIN (free_block_chain);
433 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
434 }
435 else
436 newlevel->block = make_node (BLOCK);
437
438 /* Point the BLOCK we just made to its parent. */
439 if (current_binding_level)
440 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
441
a09d56d8
EB
442 BLOCK_VARS (newlevel->block) = NULL_TREE;
443 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
444 TREE_USED (newlevel->block) = 1;
445
a09d56d8 446 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
447 newlevel->chain = current_binding_level;
448 newlevel->jmpbuf_decl = NULL_TREE;
449 current_binding_level = newlevel;
450}
451
452/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
453 and point FNDECL to this BLOCK. */
454
455void
456set_current_block_context (tree fndecl)
457{
458 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
459 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 460 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
461}
462
463/* Set the jmpbuf_decl for the current binding level to DECL. */
464
465void
466set_block_jmpbuf_decl (tree decl)
467{
468 current_binding_level->jmpbuf_decl = decl;
469}
470
471/* Get the jmpbuf_decl, if any, for the current binding level. */
472
473tree
c6bd4220 474get_block_jmpbuf_decl (void)
a1ab4c31
AC
475{
476 return current_binding_level->jmpbuf_decl;
477}
478
a09d56d8 479/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
480
481void
c6bd4220 482gnat_poplevel (void)
a1ab4c31
AC
483{
484 struct gnat_binding_level *level = current_binding_level;
485 tree block = level->block;
486
487 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 488 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
489
490 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
491 are no variables free the block and merge its subblocks into those of its
a09d56d8 492 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
493 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
494 ;
495 else if (BLOCK_VARS (block) == NULL_TREE)
496 {
497 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
498 = block_chainon (BLOCK_SUBBLOCKS (block),
499 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
500 BLOCK_CHAIN (block) = free_block_chain;
501 free_block_chain = block;
502 }
503 else
504 {
505 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
506 BLOCK_SUBBLOCKS (level->chain->block) = block;
507 TREE_USED (block) = 1;
508 set_block_for_group (block);
509 }
510
511 /* Free this binding structure. */
512 current_binding_level = level->chain;
513 level->chain = free_binding_level;
514 free_binding_level = level;
515}
516
2231f17f
EB
517/* Exit a binding level and discard the associated BLOCK. */
518
519void
520gnat_zaplevel (void)
521{
522 struct gnat_binding_level *level = current_binding_level;
523 tree block = level->block;
524
525 BLOCK_CHAIN (block) = free_block_chain;
526 free_block_chain = block;
527
528 /* Free this binding structure. */
529 current_binding_level = level->chain;
530 level->chain = free_binding_level;
531 free_binding_level = level;
532}
a1ab4c31 533\f
4708440c
EB
534/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
535
536static void
537gnat_set_type_context (tree type, tree context)
538{
539 tree decl = TYPE_STUB_DECL (type);
540
541 TYPE_CONTEXT (type) = context;
542
543 while (decl && DECL_PARALLEL_TYPE (decl))
544 {
545 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl)) = context;
546 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
547 }
548}
549
228ee426
EB
550/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
551 for location information and flag propagation. */
a1ab4c31
AC
552
553void
554gnat_pushdecl (tree decl, Node_Id gnat_node)
555{
228ee426 556 /* If DECL is public external or at top level, it has global context. */
bd9c7fb9 557 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
228ee426
EB
558 {
559 if (!global_context)
560 global_context = build_translation_unit_decl (NULL_TREE);
561 DECL_CONTEXT (decl) = global_context;
562 }
a1ab4c31
AC
563 else
564 {
565 DECL_CONTEXT (decl) = current_function_decl;
566
9f62cb92
JJ
567 /* Functions imported in another function are not really nested.
568 For really nested functions mark them initially as needing
569 a static chain for uses of that flag before unnesting;
570 lower_nested_functions will then recompute it. */
571 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
572 DECL_STATIC_CHAIN (decl) = 1;
a1ab4c31
AC
573 }
574
228ee426 575 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
576
577 /* Set the location of DECL and emit a declaration for it. */
578 if (Present (gnat_node))
579 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 580
a1ab4c31
AC
581 add_decl_expr (decl, gnat_node);
582
583 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
584 order. The list will be reversed later. Put global declarations in the
585 globals list and local ones in the current block. But skip TYPE_DECLs
586 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
587 with the debugger and aren't needed anyway. */
588 if (!(TREE_CODE (decl) == TYPE_DECL
589 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 590 {
9083aacd 591 if (DECL_EXTERNAL (decl))
a1ab4c31 592 {
a1ab4c31 593 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 594 vec_safe_push (builtin_decls, decl);
a1ab4c31 595 }
9083aacd 596 else if (global_bindings_p ())
9771b263 597 vec_safe_push (global_decls, decl);
9083aacd 598 else
a1ab4c31 599 {
a963da4d
EB
600 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
601 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
602 }
603 }
604
605 /* For the declaration of a type, set its name if it either is not already
10069d53 606 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
607 We'd rather have the type named with a real name and all the pointer
608 types to the same object have the same POINTER_TYPE node. Code in the
609 equivalent function of c-decl.c makes a copy of the type node here, but
610 that may cause us trouble with incomplete types. We make an exception
611 for fat pointer types because the compiler automatically builds them
612 for unconstrained array types and the debugger uses them to represent
613 both these and pointers to these. */
614 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
615 {
616 tree t = TREE_TYPE (decl);
617
10069d53 618 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
1aeb40dd 619 {
4cb12a90 620 /* Array and pointer types aren't "tagged" types so we force the
1aeb40dd
EB
621 type to be associated with its typedef in the DWARF back-end,
622 in order to make sure that the latter is always preserved. */
4cb12a90
EB
623 if (!DECL_ARTIFICIAL (decl)
624 && (TREE_CODE (t) == ARRAY_TYPE
625 || TREE_CODE (t) == POINTER_TYPE))
1aeb40dd
EB
626 {
627 tree tt = build_distinct_type_copy (t);
4cb12a90
EB
628 if (TREE_CODE (t) == POINTER_TYPE)
629 TYPE_NEXT_PTR_TO (t) = tt;
1aeb40dd 630 TYPE_NAME (tt) = DECL_NAME (decl);
4708440c 631 gnat_set_type_context (tt, DECL_CONTEXT (decl));
1aeb40dd
EB
632 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
633 DECL_ORIGINAL_TYPE (decl) = tt;
634 }
635 }
315cff15 636 else if (TYPE_IS_FAT_POINTER_P (t))
a1ab4c31 637 {
e3edbd56 638 /* We need a variant for the placeholder machinery to work. */
a1ab4c31
AC
639 tree tt = build_variant_type_copy (t);
640 TYPE_NAME (tt) = decl;
4708440c 641 gnat_set_type_context (tt, DECL_CONTEXT (decl));
a1ab4c31
AC
642 TREE_USED (tt) = TREE_USED (t);
643 TREE_TYPE (decl) = tt;
40c88b94
EB
644 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
645 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
646 else
647 DECL_ORIGINAL_TYPE (decl) = t;
40c88b94 648 DECL_ARTIFICIAL (decl) = 0;
e3edbd56 649 t = NULL_TREE;
a1ab4c31
AC
650 }
651 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
652 ;
653 else
654 t = NULL_TREE;
655
e3edbd56
EB
656 /* Propagate the name to all the anonymous variants. This is needed
657 for the type qualifiers machinery to work properly. */
a1ab4c31
AC
658 if (t)
659 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
e3edbd56 660 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
661 {
662 TYPE_NAME (t) = decl;
4708440c 663 gnat_set_type_context (t, DECL_CONTEXT (decl));
d4d05b52 664 }
a1ab4c31
AC
665 }
666}
667\f
842d4ee2
EB
668/* Create a record type that contains a SIZE bytes long field of TYPE with a
669 starting bit position so that it is aligned to ALIGN bits, and leaving at
670 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
671 record is guaranteed to get. GNAT_NODE is used for the position of the
672 associated TYPE_DECL. */
842d4ee2
EB
673
674tree
675make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 676 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
677{
678 /* We will be crafting a record type with one field at a position set to be
679 the next multiple of ALIGN past record'address + room bytes. We use a
680 record placeholder to express record'address. */
681 tree record_type = make_node (RECORD_TYPE);
682 tree record = build0 (PLACEHOLDER_EXPR, record_type);
683
684 tree record_addr_st
685 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
686
687 /* The diagram below summarizes the shape of what we manipulate:
688
689 <--------- pos ---------->
690 { +------------+-------------+-----------------+
691 record =>{ |############| ... | field (type) |
692 { +------------+-------------+-----------------+
693 |<-- room -->|<- voffset ->|<---- size ----->|
694 o o
695 | |
696 record_addr vblock_addr
697
698 Every length is in sizetype bytes there, except "pos" which has to be
699 set as a bit position in the GCC tree for the record. */
700 tree room_st = size_int (room);
701 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
702 tree voffset_st, pos, field;
703
704 tree name = TYPE_NAME (type);
705
706 if (TREE_CODE (name) == TYPE_DECL)
707 name = DECL_NAME (name);
708 name = concat_name (name, "ALIGN");
709 TYPE_NAME (record_type) = name;
710
711 /* Compute VOFFSET and then POS. The next byte position multiple of some
712 alignment after some address is obtained by "and"ing the alignment minus
713 1 with the two's complement of the address. */
714 voffset_st = size_binop (BIT_AND_EXPR,
715 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
716 size_int ((align / BITS_PER_UNIT) - 1));
717
718 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
719 pos = size_binop (MULT_EXPR,
720 convert (bitsizetype,
721 size_binop (PLUS_EXPR, room_st, voffset_st)),
722 bitsize_unit_node);
723
724 /* Craft the GCC record representation. We exceptionally do everything
725 manually here because 1) our generic circuitry is not quite ready to
726 handle the complex position/size expressions we are setting up, 2) we
727 have a strong simplifying factor at hand: we know the maximum possible
728 value of voffset, and 3) we have to set/reset at least the sizes in
729 accordance with this maximum value anyway, as we need them to convey
730 what should be "alloc"ated for this type.
731
732 Use -1 as the 'addressable' indication for the field to prevent the
733 creation of a bitfield. We don't need one, it would have damaging
734 consequences on the alignment computation, and create_field_decl would
735 make one without this special argument, for instance because of the
736 complex position expression. */
737 field = create_field_decl (get_identifier ("F"), type, record_type, size,
738 pos, 1, -1);
739 TYPE_FIELDS (record_type) = field;
740
741 TYPE_ALIGN (record_type) = base_align;
742 TYPE_USER_ALIGN (record_type) = 1;
743
744 TYPE_SIZE (record_type)
745 = size_binop (PLUS_EXPR,
746 size_binop (MULT_EXPR, convert (bitsizetype, size),
747 bitsize_unit_node),
748 bitsize_int (align + room * BITS_PER_UNIT));
749 TYPE_SIZE_UNIT (record_type)
750 = size_binop (PLUS_EXPR, size,
751 size_int (room + align / BITS_PER_UNIT));
752
753 SET_TYPE_MODE (record_type, BLKmode);
754 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
755
756 /* Declare it now since it will never be declared otherwise. This is
757 necessary to ensure that its subtrees are properly marked. */
74746d49 758 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
759
760 return record_type;
761}
762
763/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
764 as the field type of a packed record if IN_RECORD is true, or as the
765 component type of a packed array if IN_RECORD is false. See if we can
766 rewrite it either as a type that has a non-BLKmode, which we can pack
767 tighter in the packed record case, or as a smaller type. If so, return
768 the new type. If not, return the original type. */
769
770tree
771make_packable_type (tree type, bool in_record)
772{
773 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
774 unsigned HOST_WIDE_INT new_size;
775 tree new_type, old_field, field_list = NULL_TREE;
776 unsigned int align;
777
778 /* No point in doing anything if the size is zero. */
779 if (size == 0)
780 return type;
781
782 new_type = make_node (TREE_CODE (type));
783
784 /* Copy the name and flags from the old type to that of the new.
785 Note that we rely on the pointer equality created here for
786 TYPE_NAME to look through conversions in various places. */
787 TYPE_NAME (new_type) = TYPE_NAME (type);
788 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
789 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
790 if (TREE_CODE (type) == RECORD_TYPE)
791 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
792
793 /* If we are in a record and have a small size, set the alignment to
794 try for an integral mode. Otherwise set it to try for a smaller
795 type with BLKmode. */
796 if (in_record && size <= MAX_FIXED_MODE_SIZE)
797 {
798 align = ceil_pow2 (size);
799 TYPE_ALIGN (new_type) = align;
800 new_size = (size + align - 1) & -align;
801 }
802 else
803 {
804 unsigned HOST_WIDE_INT align;
805
806 /* Do not try to shrink the size if the RM size is not constant. */
807 if (TYPE_CONTAINS_TEMPLATE_P (type)
808 || !host_integerp (TYPE_ADA_SIZE (type), 1))
809 return type;
810
811 /* Round the RM size up to a unit boundary to get the minimal size
812 for a BLKmode record. Give up if it's already the size. */
813 new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
814 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
815 if (new_size == size)
816 return type;
817
818 align = new_size & -new_size;
819 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
820 }
821
822 TYPE_USER_ALIGN (new_type) = 1;
823
824 /* Now copy the fields, keeping the position and size as we don't want
825 to change the layout by propagating the packedness downwards. */
826 for (old_field = TYPE_FIELDS (type); old_field;
827 old_field = DECL_CHAIN (old_field))
828 {
829 tree new_field_type = TREE_TYPE (old_field);
830 tree new_field, new_size;
831
832 if (RECORD_OR_UNION_TYPE_P (new_field_type)
833 && !TYPE_FAT_POINTER_P (new_field_type)
834 && host_integerp (TYPE_SIZE (new_field_type), 1))
835 new_field_type = make_packable_type (new_field_type, true);
836
837 /* However, for the last field in a not already packed record type
838 that is of an aggregate type, we need to use the RM size in the
839 packable version of the record type, see finish_record_type. */
840 if (!DECL_CHAIN (old_field)
841 && !TYPE_PACKED (type)
842 && RECORD_OR_UNION_TYPE_P (new_field_type)
843 && !TYPE_FAT_POINTER_P (new_field_type)
844 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
845 && TYPE_ADA_SIZE (new_field_type))
846 new_size = TYPE_ADA_SIZE (new_field_type);
847 else
848 new_size = DECL_SIZE (old_field);
849
850 new_field
851 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
852 new_size, bit_position (old_field),
853 TYPE_PACKED (type),
854 !DECL_NONADDRESSABLE_P (old_field));
855
856 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
857 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
858 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
859 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
860
861 DECL_CHAIN (new_field) = field_list;
862 field_list = new_field;
863 }
864
865 finish_record_type (new_type, nreverse (field_list), 2, false);
866 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
867 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
868 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
869
870 /* If this is a padding record, we never want to make the size smaller
871 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
872 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
873 {
874 TYPE_SIZE (new_type) = TYPE_SIZE (type);
875 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
876 new_size = size;
877 }
878 else
879 {
880 TYPE_SIZE (new_type) = bitsize_int (new_size);
881 TYPE_SIZE_UNIT (new_type)
882 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
883 }
884
885 if (!TYPE_CONTAINS_TEMPLATE_P (type))
886 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
887
888 compute_record_mode (new_type);
889
890 /* Try harder to get a packable type if necessary, for example
891 in case the record itself contains a BLKmode field. */
892 if (in_record && TYPE_MODE (new_type) == BLKmode)
893 SET_TYPE_MODE (new_type,
894 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
895
896 /* If neither the mode nor the size has shrunk, return the old type. */
897 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
898 return type;
899
900 return new_type;
901}
902
903/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
904 If TYPE is the best type, return it. Otherwise, make a new type. We
905 only support new integral and pointer types. FOR_BIASED is true if
906 we are making a biased type. */
907
908tree
909make_type_from_size (tree type, tree size_tree, bool for_biased)
910{
911 unsigned HOST_WIDE_INT size;
912 bool biased_p;
913 tree new_type;
914
915 /* If size indicates an error, just return TYPE to avoid propagating
916 the error. Likewise if it's too large to represent. */
917 if (!size_tree || !host_integerp (size_tree, 1))
918 return type;
919
920 size = tree_low_cst (size_tree, 1);
921
922 switch (TREE_CODE (type))
923 {
924 case INTEGER_TYPE:
925 case ENUMERAL_TYPE:
926 case BOOLEAN_TYPE:
927 biased_p = (TREE_CODE (type) == INTEGER_TYPE
928 && TYPE_BIASED_REPRESENTATION_P (type));
929
930 /* Integer types with precision 0 are forbidden. */
931 if (size == 0)
932 size = 1;
933
934 /* Only do something if the type isn't a packed array type and doesn't
935 already have the proper size and the size isn't too large. */
936 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
937 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
938 || size > LONG_LONG_TYPE_SIZE)
939 break;
940
941 biased_p |= for_biased;
942 if (TYPE_UNSIGNED (type) || biased_p)
943 new_type = make_unsigned_type (size);
944 else
945 new_type = make_signed_type (size);
946 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
947 SET_TYPE_RM_MIN_VALUE (new_type,
948 convert (TREE_TYPE (new_type),
949 TYPE_MIN_VALUE (type)));
950 SET_TYPE_RM_MAX_VALUE (new_type,
951 convert (TREE_TYPE (new_type),
952 TYPE_MAX_VALUE (type)));
953 /* Copy the name to show that it's essentially the same type and
954 not a subrange type. */
955 TYPE_NAME (new_type) = TYPE_NAME (type);
956 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
957 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
958 return new_type;
959
960 case RECORD_TYPE:
961 /* Do something if this is a fat pointer, in which case we
962 may need to return the thin pointer. */
963 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
964 {
965 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
966 if (!targetm.valid_pointer_mode (p_mode))
967 p_mode = ptr_mode;
968 return
969 build_pointer_type_for_mode
970 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
971 p_mode, 0);
972 }
973 break;
974
975 case POINTER_TYPE:
976 /* Only do something if this is a thin pointer, in which case we
977 may need to return the fat pointer. */
978 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
979 return
980 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
981 break;
982
983 default:
984 break;
985 }
986
987 return type;
988}
989
990/* See if the data pointed to by the hash table slot is marked. */
991
992static int
993pad_type_hash_marked_p (const void *p)
994{
995 const_tree const type = ((const struct pad_type_hash *) p)->type;
996
997 return ggc_marked_p (type);
998}
999
1000/* Return the cached hash value. */
1001
1002static hashval_t
1003pad_type_hash_hash (const void *p)
1004{
1005 return ((const struct pad_type_hash *) p)->hash;
1006}
1007
1008/* Return 1 iff the padded types are equivalent. */
1009
1010static int
1011pad_type_hash_eq (const void *p1, const void *p2)
1012{
1013 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1014 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1015 tree type1, type2;
1016
1017 if (t1->hash != t2->hash)
1018 return 0;
1019
1020 type1 = t1->type;
1021 type2 = t2->type;
1022
1023 /* We consider that the padded types are equivalent if they pad the same
1024 type and have the same size, alignment and RM size. Taking the mode
1025 into account is redundant since it is determined by the others. */
1026 return
1027 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1028 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1029 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1030 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1031}
1032
1033/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1034 if needed. We have already verified that SIZE and TYPE are large enough.
1035 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1036 IS_COMPONENT_TYPE is true if this is being done for the component type of
1037 an array. IS_USER_TYPE is true if the original type needs to be completed.
1038 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1039 the RM size of the resulting type is to be set to SIZE too. */
1040
1041tree
1042maybe_pad_type (tree type, tree size, unsigned int align,
1043 Entity_Id gnat_entity, bool is_component_type,
1044 bool is_user_type, bool definition, bool set_rm_size)
1045{
1046 tree orig_size = TYPE_SIZE (type);
1047 tree record, field;
1048
1049 /* If TYPE is a padded type, see if it agrees with any size and alignment
1050 we were given. If so, return the original type. Otherwise, strip
1051 off the padding, since we will either be returning the inner type
1052 or repadding it. If no size or alignment is specified, use that of
1053 the original padded type. */
1054 if (TYPE_IS_PADDING_P (type))
1055 {
1056 if ((!size
1057 || operand_equal_p (round_up (size,
1058 MAX (align, TYPE_ALIGN (type))),
1059 round_up (TYPE_SIZE (type),
1060 MAX (align, TYPE_ALIGN (type))),
1061 0))
1062 && (align == 0 || align == TYPE_ALIGN (type)))
1063 return type;
1064
1065 if (!size)
1066 size = TYPE_SIZE (type);
1067 if (align == 0)
1068 align = TYPE_ALIGN (type);
1069
1070 type = TREE_TYPE (TYPE_FIELDS (type));
1071 orig_size = TYPE_SIZE (type);
1072 }
1073
1074 /* If the size is either not being changed or is being made smaller (which
1075 is not done here and is only valid for bitfields anyway), show the size
1076 isn't changing. Likewise, clear the alignment if it isn't being
1077 changed. Then return if we aren't doing anything. */
1078 if (size
1079 && (operand_equal_p (size, orig_size, 0)
1080 || (TREE_CODE (orig_size) == INTEGER_CST
1081 && tree_int_cst_lt (size, orig_size))))
1082 size = NULL_TREE;
1083
1084 if (align == TYPE_ALIGN (type))
1085 align = 0;
1086
1087 if (align == 0 && !size)
1088 return type;
1089
1090 /* If requested, complete the original type and give it a name. */
1091 if (is_user_type)
1092 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1093 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1094 !(TYPE_NAME (type)
1095 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1096 && DECL_IGNORED_P (TYPE_NAME (type))),
1097 gnat_entity);
1098
1099 /* We used to modify the record in place in some cases, but that could
1100 generate incorrect debugging information. So make a new record
1101 type and name. */
1102 record = make_node (RECORD_TYPE);
1103 TYPE_PADDING_P (record) = 1;
1104
1105 if (Present (gnat_entity))
1106 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1107
1108 TYPE_ALIGN (record) = align;
1109 TYPE_SIZE (record) = size ? size : orig_size;
1110 TYPE_SIZE_UNIT (record)
1111 = convert (sizetype,
1112 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1113 bitsize_unit_node));
1114
1115 /* If we are changing the alignment and the input type is a record with
1116 BLKmode and a small constant size, try to make a form that has an
1117 integral mode. This might allow the padding record to also have an
1118 integral mode, which will be much more efficient. There is no point
1119 in doing so if a size is specified unless it is also a small constant
1120 size and it is incorrect to do so if we cannot guarantee that the mode
1121 will be naturally aligned since the field must always be addressable.
1122
1123 ??? This might not always be a win when done for a stand-alone object:
1124 since the nominal and the effective type of the object will now have
1125 different modes, a VIEW_CONVERT_EXPR will be required for converting
1126 between them and it might be hard to overcome afterwards, including
1127 at the RTL level when the stand-alone object is accessed as a whole. */
1128 if (align != 0
1129 && RECORD_OR_UNION_TYPE_P (type)
1130 && TYPE_MODE (type) == BLKmode
1131 && !TYPE_BY_REFERENCE_P (type)
1132 && TREE_CODE (orig_size) == INTEGER_CST
1133 && !TREE_OVERFLOW (orig_size)
1134 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1135 && (!size
1136 || (TREE_CODE (size) == INTEGER_CST
1137 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1138 {
1139 tree packable_type = make_packable_type (type, true);
1140 if (TYPE_MODE (packable_type) != BLKmode
1141 && align >= TYPE_ALIGN (packable_type))
1142 type = packable_type;
1143 }
1144
1145 /* Now create the field with the original size. */
1146 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1147 bitsize_zero_node, 0, 1);
1148 DECL_INTERNAL_P (field) = 1;
1149
1150 /* Do not emit debug info until after the auxiliary record is built. */
1151 finish_record_type (record, field, 1, false);
1152
1153 /* Set the RM size if requested. */
1154 if (set_rm_size)
1155 {
1156 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1157
1158 /* If the padded type is complete and has constant size, we canonicalize
1159 it by means of the hash table. This is consistent with the language
1160 semantics and ensures that gigi and the middle-end have a common view
1161 of these padded types. */
1162 if (TREE_CONSTANT (TYPE_SIZE (record)))
1163 {
1164 hashval_t hashcode;
1165 struct pad_type_hash in, *h;
1166 void **loc;
1167
1168 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1169 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1170 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1171 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1172
1173 in.hash = hashcode;
1174 in.type = record;
1175 h = (struct pad_type_hash *)
1176 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1177 if (h)
1178 {
1179 record = h->type;
1180 goto built;
1181 }
1182
1183 h = ggc_alloc_pad_type_hash ();
1184 h->hash = hashcode;
1185 h->type = record;
1186 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1187 INSERT);
1188 *loc = (void *)h;
1189 }
1190 }
1191
1192 /* Unless debugging information isn't being written for the input type,
1193 write a record that shows what we are a subtype of and also make a
1194 variable that indicates our size, if still variable. */
1195 if (TREE_CODE (orig_size) != INTEGER_CST
1196 && TYPE_NAME (record)
1197 && TYPE_NAME (type)
1198 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1199 && DECL_IGNORED_P (TYPE_NAME (type))))
1200 {
1201 tree marker = make_node (RECORD_TYPE);
1202 tree name = TYPE_NAME (record);
1203 tree orig_name = TYPE_NAME (type);
1204
1205 if (TREE_CODE (name) == TYPE_DECL)
1206 name = DECL_NAME (name);
1207
1208 if (TREE_CODE (orig_name) == TYPE_DECL)
1209 orig_name = DECL_NAME (orig_name);
1210
1211 TYPE_NAME (marker) = concat_name (name, "XVS");
1212 finish_record_type (marker,
1213 create_field_decl (orig_name,
1214 build_reference_type (type),
1215 marker, NULL_TREE, NULL_TREE,
1216 0, 0),
1217 0, true);
1218
1219 add_parallel_type (record, marker);
1220
1221 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1222 TYPE_SIZE_UNIT (marker)
1223 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1224 TYPE_SIZE_UNIT (record), false, false, false,
1225 false, NULL, gnat_entity);
1226 }
1227
1228 rest_of_record_type_compilation (record);
1229
1230built:
1231 /* If the size was widened explicitly, maybe give a warning. Take the
1232 original size as the maximum size of the input if there was an
1233 unconstrained record involved and round it up to the specified alignment,
1234 if one was specified. But don't do it if we are just annotating types
1235 and the type is tagged, since tagged types aren't fully laid out in this
1236 mode. */
1237 if (!size
1238 || TREE_CODE (size) == COND_EXPR
1239 || TREE_CODE (size) == MAX_EXPR
1240 || No (gnat_entity)
1241 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1242 return record;
1243
1244 if (CONTAINS_PLACEHOLDER_P (orig_size))
1245 orig_size = max_size (orig_size, true);
1246
1247 if (align)
1248 orig_size = round_up (orig_size, align);
1249
1250 if (!operand_equal_p (size, orig_size, 0)
1251 && !(TREE_CODE (size) == INTEGER_CST
1252 && TREE_CODE (orig_size) == INTEGER_CST
1253 && (TREE_OVERFLOW (size)
1254 || TREE_OVERFLOW (orig_size)
1255 || tree_int_cst_lt (size, orig_size))))
1256 {
1257 Node_Id gnat_error_node = Empty;
1258
1259 if (Is_Packed_Array_Type (gnat_entity))
1260 gnat_entity = Original_Array_Type (gnat_entity);
1261
1262 if ((Ekind (gnat_entity) == E_Component
1263 || Ekind (gnat_entity) == E_Discriminant)
1264 && Present (Component_Clause (gnat_entity)))
1265 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1266 else if (Present (Size_Clause (gnat_entity)))
1267 gnat_error_node = Expression (Size_Clause (gnat_entity));
1268
1269 /* Generate message only for entities that come from source, since
1270 if we have an entity created by expansion, the message will be
1271 generated for some other corresponding source entity. */
1272 if (Comes_From_Source (gnat_entity))
1273 {
1274 if (Present (gnat_error_node))
1275 post_error_ne_tree ("{^ }bits of & unused?",
1276 gnat_error_node, gnat_entity,
1277 size_diffop (size, orig_size));
1278 else if (is_component_type)
1279 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1280 gnat_entity, gnat_entity,
1281 size_diffop (size, orig_size));
1282 }
1283 }
1284
1285 return record;
1286}
1287\f
1288/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1289 If this is a multi-dimensional array type, do this recursively.
1290
1291 OP may be
1292 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1293 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1294 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1295
1296void
1297relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1298{
1299 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1300 of a one-dimensional array, since the padding has the same alias set
1301 as the field type, but if it's a multi-dimensional array, we need to
1302 see the inner types. */
1303 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1304 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1305 || TYPE_PADDING_P (gnu_old_type)))
1306 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1307
1308 /* Unconstrained array types are deemed incomplete and would thus be given
1309 alias set 0. Retrieve the underlying array type. */
1310 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1311 gnu_old_type
1312 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1313 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1314 gnu_new_type
1315 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1316
1317 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1318 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1319 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1320 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1321
1322 switch (op)
1323 {
1324 case ALIAS_SET_COPY:
1325 /* The alias set shouldn't be copied between array types with different
1326 aliasing settings because this can break the aliasing relationship
1327 between the array type and its element type. */
1328#ifndef ENABLE_CHECKING
1329 if (flag_strict_aliasing)
1330#endif
1331 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1332 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1333 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1334 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1335
1336 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1337 break;
1338
1339 case ALIAS_SET_SUBSET:
1340 case ALIAS_SET_SUPERSET:
1341 {
1342 alias_set_type old_set = get_alias_set (gnu_old_type);
1343 alias_set_type new_set = get_alias_set (gnu_new_type);
1344
1345 /* Do nothing if the alias sets conflict. This ensures that we
1346 never call record_alias_subset several times for the same pair
1347 or at all for alias set 0. */
1348 if (!alias_sets_conflict_p (old_set, new_set))
1349 {
1350 if (op == ALIAS_SET_SUBSET)
1351 record_alias_subset (old_set, new_set);
1352 else
1353 record_alias_subset (new_set, old_set);
1354 }
1355 }
1356 break;
1357
1358 default:
1359 gcc_unreachable ();
1360 }
1361
1362 record_component_aliases (gnu_new_type);
1363}
1364\f
1aeb40dd
EB
1365/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1366 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
a1ab4c31
AC
1367
1368void
1aeb40dd 1369record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1370{
c172df28
AH
1371 tree type_decl = build_decl (input_location,
1372 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1373 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1374 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1375 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1376
10069d53
EB
1377 if (debug_hooks->type_decl)
1378 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1379}
1380\f
e3edbd56
EB
1381/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1382 finish constructing the record type as a fat pointer type. */
1383
1384void
1385finish_fat_pointer_type (tree record_type, tree field_list)
1386{
1387 /* Make sure we can put it into a register. */
5da8c011
EB
1388 if (STRICT_ALIGNMENT)
1389 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1390
1391 /* Show what it really is. */
1392 TYPE_FAT_POINTER_P (record_type) = 1;
1393
1394 /* Do not emit debug info for it since the types of its fields may still be
1395 incomplete at this point. */
1396 finish_record_type (record_type, field_list, 0, false);
1397
1398 /* Force type_contains_placeholder_p to return true on it. Although the
1399 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1400 type but the representation of the unconstrained array. */
1401 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1402}
1403
032d1b71 1404/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1405 finish constructing the record or union type. If REP_LEVEL is zero, this
1406 record has no representation clause and so will be entirely laid out here.
1407 If REP_LEVEL is one, this record has a representation clause and has been
1408 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1409 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1410 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1411 we need to write debug information about this type. */
a1ab4c31
AC
1412
1413void
032d1b71
EB
1414finish_record_type (tree record_type, tree field_list, int rep_level,
1415 bool debug_info_p)
a1ab4c31
AC
1416{
1417 enum tree_code code = TREE_CODE (record_type);
1418 tree name = TYPE_NAME (record_type);
1419 tree ada_size = bitsize_zero_node;
1420 tree size = bitsize_zero_node;
1421 bool had_size = TYPE_SIZE (record_type) != 0;
1422 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1423 bool had_align = TYPE_ALIGN (record_type) != 0;
1424 tree field;
1425
032d1b71 1426 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1427
10069d53
EB
1428 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1429 generate debug info and have a parallel type. */
1430 if (name && TREE_CODE (name) == TYPE_DECL)
1431 name = DECL_NAME (name);
1432 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1433
1434 /* Globally initialize the record first. If this is a rep'ed record,
1435 that just means some initializations; otherwise, layout the record. */
1436 if (rep_level > 0)
1437 {
1438 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1439
1440 if (!had_size_unit)
1441 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1442
a1ab4c31
AC
1443 if (!had_size)
1444 TYPE_SIZE (record_type) = bitsize_zero_node;
1445
1446 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1447 out just like a UNION_TYPE, since the size will be fixed. */
1448 else if (code == QUAL_UNION_TYPE)
1449 code = UNION_TYPE;
1450 }
1451 else
1452 {
1453 /* Ensure there isn't a size already set. There can be in an error
1454 case where there is a rep clause but all fields have errors and
1455 no longer have a position. */
1456 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1457
1458 /* Ensure we use the traditional GCC layout for bitfields when we need
1459 to pack the record type or have a representation clause. The other
1460 possible layout (Microsoft C compiler), if available, would prevent
1461 efficient packing in almost all cases. */
1462#ifdef TARGET_MS_BITFIELD_LAYOUT
1463 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1464 decl_attributes (&record_type,
1465 tree_cons (get_identifier ("gcc_struct"),
1466 NULL_TREE, NULL_TREE),
1467 ATTR_FLAG_TYPE_IN_PLACE);
1468#endif
1469
a1ab4c31
AC
1470 layout_type (record_type);
1471 }
1472
1473 /* At this point, the position and size of each field is known. It was
1474 either set before entry by a rep clause, or by laying out the type above.
1475
1476 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1477 to compute the Ada size; the GCC size and alignment (for rep'ed records
1478 that are not padding types); and the mode (for rep'ed records). We also
1479 clear the DECL_BIT_FIELD indication for the cases we know have not been
1480 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1481
1482 if (code == QUAL_UNION_TYPE)
032d1b71 1483 field_list = nreverse (field_list);
a1ab4c31 1484
910ad8de 1485 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1486 {
1487 tree type = TREE_TYPE (field);
1488 tree pos = bit_position (field);
1489 tree this_size = DECL_SIZE (field);
1490 tree this_ada_size;
1491
e1e5852c 1492 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1493 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1494 && !TYPE_CONTAINS_TEMPLATE_P (type)
1495 && TYPE_ADA_SIZE (type))
1496 this_ada_size = TYPE_ADA_SIZE (type);
1497 else
1498 this_ada_size = this_size;
1499
1500 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1501 if (DECL_BIT_FIELD (field)
1502 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1503 {
1504 unsigned int align = TYPE_ALIGN (type);
1505
1506 /* In the general case, type alignment is required. */
1507 if (value_factor_p (pos, align))
1508 {
1509 /* The enclosing record type must be sufficiently aligned.
1510 Otherwise, if no alignment was specified for it and it
1511 has been laid out already, bump its alignment to the
1512 desired one if this is compatible with its size. */
1513 if (TYPE_ALIGN (record_type) >= align)
1514 {
1515 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1516 DECL_BIT_FIELD (field) = 0;
1517 }
1518 else if (!had_align
1519 && rep_level == 0
1520 && value_factor_p (TYPE_SIZE (record_type), align))
1521 {
1522 TYPE_ALIGN (record_type) = align;
1523 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1524 DECL_BIT_FIELD (field) = 0;
1525 }
1526 }
1527
1528 /* In the non-strict alignment case, only byte alignment is. */
1529 if (!STRICT_ALIGNMENT
1530 && DECL_BIT_FIELD (field)
1531 && value_factor_p (pos, BITS_PER_UNIT))
1532 DECL_BIT_FIELD (field) = 0;
1533 }
1534
c1abd261
EB
1535 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1536 field is technically not addressable. Except that it can actually
1537 be addressed if it is BLKmode and happens to be properly aligned. */
1538 if (DECL_BIT_FIELD (field)
1539 && !(DECL_MODE (field) == BLKmode
1540 && value_factor_p (pos, BITS_PER_UNIT)))
1541 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1542
1543 /* A type must be as aligned as its most aligned field that is not
1544 a bit-field. But this is already enforced by layout_type. */
1545 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1546 TYPE_ALIGN (record_type)
1547 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1548
1549 switch (code)
1550 {
1551 case UNION_TYPE:
1552 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1553 size = size_binop (MAX_EXPR, size, this_size);
1554 break;
1555
1556 case QUAL_UNION_TYPE:
1557 ada_size
1558 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1559 this_ada_size, ada_size);
1560 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1561 this_size, size);
1562 break;
1563
1564 case RECORD_TYPE:
1565 /* Since we know here that all fields are sorted in order of
1566 increasing bit position, the size of the record is one
1567 higher than the ending bit of the last field processed
1568 unless we have a rep clause, since in that case we might
1569 have a field outside a QUAL_UNION_TYPE that has a higher ending
1570 position. So use a MAX in that case. Also, if this field is a
1571 QUAL_UNION_TYPE, we need to take into account the previous size in
1572 the case of empty variants. */
1573 ada_size
1574 = merge_sizes (ada_size, pos, this_ada_size,
1575 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1576 size
1577 = merge_sizes (size, pos, this_size,
1578 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1579 break;
1580
1581 default:
1582 gcc_unreachable ();
1583 }
1584 }
1585
1586 if (code == QUAL_UNION_TYPE)
032d1b71 1587 nreverse (field_list);
a1ab4c31
AC
1588
1589 if (rep_level < 2)
1590 {
1591 /* If this is a padding record, we never want to make the size smaller
1592 than what was specified in it, if any. */
315cff15 1593 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1594 size = TYPE_SIZE (record_type);
1595
1596 /* Now set any of the values we've just computed that apply. */
315cff15 1597 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1598 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1599 SET_TYPE_ADA_SIZE (record_type, ada_size);
1600
1601 if (rep_level > 0)
1602 {
1603 tree size_unit = had_size_unit
1604 ? TYPE_SIZE_UNIT (record_type)
1605 : convert (sizetype,
1606 size_binop (CEIL_DIV_EXPR, size,
1607 bitsize_unit_node));
1608 unsigned int align = TYPE_ALIGN (record_type);
1609
1610 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1611 TYPE_SIZE_UNIT (record_type)
1612 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1613
1614 compute_record_mode (record_type);
1615 }
1616 }
1617
032d1b71 1618 if (debug_info_p)
a1ab4c31
AC
1619 rest_of_record_type_compilation (record_type);
1620}
1621
a5695aa2
EB
1622/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1623
1624void
1625add_parallel_type (tree type, tree parallel_type)
1626{
1627 tree decl = TYPE_STUB_DECL (type);
1628
1629 while (DECL_PARALLEL_TYPE (decl))
1630 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1631
1632 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1633}
1634
1635/* Return true if TYPE has a parallel type. */
1636
1637static bool
1638has_parallel_type (tree type)
1639{
1640 tree decl = TYPE_STUB_DECL (type);
1641
1642 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1643}
1644
032d1b71
EB
1645/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1646 associated with it. It need not be invoked directly in most cases since
1647 finish_record_type takes care of doing so, but this can be necessary if
1648 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1649
1650void
1651rest_of_record_type_compilation (tree record_type)
1652{
a1ab4c31 1653 bool var_size = false;
fb88e1dd 1654 tree field;
a1ab4c31 1655
fb88e1dd
EB
1656 /* If this is a padded type, the bulk of the debug info has already been
1657 generated for the field's type. */
1658 if (TYPE_IS_PADDING_P (record_type))
1659 return;
1660
a5695aa2
EB
1661 /* If the type already has a parallel type (XVS type), then we're done. */
1662 if (has_parallel_type (record_type))
1663 return;
1664
fb88e1dd 1665 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1666 {
1667 /* We need to make an XVE/XVU record if any field has variable size,
1668 whether or not the record does. For example, if we have a union,
1669 it may be that all fields, rounded up to the alignment, have the
1670 same size, in which case we'll use that size. But the debug
1671 output routines (except Dwarf2) won't be able to output the fields,
1672 so we need to make the special record. */
1673 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1674 /* If a field has a non-constant qualifier, the record will have
1675 variable size too. */
fb88e1dd 1676 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1677 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1678 {
1679 var_size = true;
1680 break;
1681 }
1682 }
1683
fb88e1dd
EB
1684 /* If this record type is of variable size, make a parallel record type that
1685 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1686 if (var_size)
a1ab4c31
AC
1687 {
1688 tree new_record_type
1689 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1690 ? UNION_TYPE : TREE_CODE (record_type));
0fb2335d 1691 tree orig_name = TYPE_NAME (record_type), new_name;
a1ab4c31 1692 tree last_pos = bitsize_zero_node;
0fb2335d 1693 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1694
0fb2335d
EB
1695 if (TREE_CODE (orig_name) == TYPE_DECL)
1696 orig_name = DECL_NAME (orig_name);
1697
1698 new_name
1699 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1700 ? "XVU" : "XVE");
1701 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1702 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1703 TYPE_STUB_DECL (new_record_type)
0fb2335d 1704 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1705 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1706 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1707 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1708 TYPE_SIZE_UNIT (new_record_type)
1709 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1710
a1ab4c31
AC
1711 /* Now scan all the fields, replacing each field with a new
1712 field corresponding to the new encoding. */
1713 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1714 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1715 {
1716 tree field_type = TREE_TYPE (old_field);
1717 tree field_name = DECL_NAME (old_field);
1718 tree new_field;
1719 tree curpos = bit_position (old_field);
1720 bool var = false;
1721 unsigned int align = 0;
1722 tree pos;
1723
1724 /* See how the position was modified from the last position.
1725
1726 There are two basic cases we support: a value was added
1727 to the last position or the last position was rounded to
1728 a boundary and they something was added. Check for the
1729 first case first. If not, see if there is any evidence
1730 of rounding. If so, round the last position and try
1731 again.
1732
1733 If this is a union, the position can be taken as zero. */
1734
1735 /* Some computations depend on the shape of the position expression,
1736 so strip conversions to make sure it's exposed. */
1737 curpos = remove_conversions (curpos, true);
1738
1739 if (TREE_CODE (new_record_type) == UNION_TYPE)
1740 pos = bitsize_zero_node, align = 0;
1741 else
1742 pos = compute_related_constant (curpos, last_pos);
1743
1744 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1745 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1746 {
1747 tree offset = TREE_OPERAND (curpos, 0);
1748 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1749
f58e663b
EB
1750 /* An offset which is a bitwise AND with a mask increases the
1751 alignment according to the number of trailing zeros. */
a1ab4c31
AC
1752 offset = remove_conversions (offset, true);
1753 if (TREE_CODE (offset) == BIT_AND_EXPR
f58e663b 1754 && TREE_CODE (TREE_OPERAND (offset, 1)) == INTEGER_CST)
a1ab4c31 1755 {
f58e663b
EB
1756 unsigned HOST_WIDE_INT mask
1757 = TREE_INT_CST_LOW (TREE_OPERAND (offset, 1));
1758 unsigned int i;
1759
1760 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
1761 {
1762 if (mask & 1)
1763 break;
1764 mask >>= 1;
1765 align *= 2;
1766 }
a1ab4c31
AC
1767 }
1768
1769 pos = compute_related_constant (curpos,
1770 round_up (last_pos, align));
1771 }
1772 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1773 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1774 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1775 && host_integerp (TREE_OPERAND
1776 (TREE_OPERAND (curpos, 0), 1),
1777 1))
1778 {
1779 align
1780 = tree_low_cst
1781 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1782 pos = compute_related_constant (curpos,
1783 round_up (last_pos, align));
1784 }
1785 else if (potential_alignment_gap (prev_old_field, old_field,
1786 pos))
1787 {
1788 align = TYPE_ALIGN (field_type);
1789 pos = compute_related_constant (curpos,
1790 round_up (last_pos, align));
1791 }
1792
1793 /* If we can't compute a position, set it to zero.
1794
1795 ??? We really should abort here, but it's too much work
1796 to get this correct for all cases. */
1797
1798 if (!pos)
1799 pos = bitsize_zero_node;
1800
1801 /* See if this type is variable-sized and make a pointer type
1802 and indicate the indirection if so. Beware that the debug
1803 back-end may adjust the position computed above according
1804 to the alignment of the field type, i.e. the pointer type
1805 in this case, if we don't preventively counter that. */
1806 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1807 {
1808 field_type = build_pointer_type (field_type);
1809 if (align != 0 && TYPE_ALIGN (field_type) > align)
1810 {
1811 field_type = copy_node (field_type);
1812 TYPE_ALIGN (field_type) = align;
1813 }
1814 var = true;
1815 }
1816
1817 /* Make a new field name, if necessary. */
1818 if (var || align != 0)
1819 {
1820 char suffix[16];
1821
1822 if (align != 0)
1823 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1824 align / BITS_PER_UNIT);
1825 else
1826 strcpy (suffix, "XVL");
1827
0fb2335d 1828 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
1829 }
1830
da01bfee
EB
1831 new_field
1832 = create_field_decl (field_name, field_type, new_record_type,
1833 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 1834 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
1835 TYPE_FIELDS (new_record_type) = new_field;
1836
1837 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1838 zero. The only time it's not the last field of the record
1839 is when there are other components at fixed positions after
1840 it (meaning there was a rep clause for every field) and we
1841 want to be able to encode them. */
1842 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1843 (TREE_CODE (TREE_TYPE (old_field))
1844 == QUAL_UNION_TYPE)
1845 ? bitsize_zero_node
1846 : DECL_SIZE (old_field));
1847 prev_old_field = old_field;
1848 }
1849
fb88e1dd 1850 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 1851
a5695aa2 1852 add_parallel_type (record_type, new_record_type);
a1ab4c31 1853 }
a1ab4c31
AC
1854}
1855
a1ab4c31 1856/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
1857 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1858 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1859 replace a value of zero with the old size. If HAS_REP is true, we take the
1860 MAX of the end position of this field with LAST_SIZE. In all other cases,
1861 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1862
1863static tree
1864merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1865 bool has_rep)
1866{
1867 tree type = TREE_TYPE (last_size);
c6bd4220 1868 tree new_size;
a1ab4c31
AC
1869
1870 if (!special || TREE_CODE (size) != COND_EXPR)
1871 {
c6bd4220 1872 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 1873 if (has_rep)
c6bd4220 1874 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
1875 }
1876
1877 else
c6bd4220
EB
1878 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1879 integer_zerop (TREE_OPERAND (size, 1))
1880 ? last_size : merge_sizes (last_size, first_bit,
1881 TREE_OPERAND (size, 1),
1882 1, has_rep),
1883 integer_zerop (TREE_OPERAND (size, 2))
1884 ? last_size : merge_sizes (last_size, first_bit,
1885 TREE_OPERAND (size, 2),
1886 1, has_rep));
a1ab4c31
AC
1887
1888 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1889 when fed through substitute_in_expr) into thinking that a constant
1890 size is not constant. */
c6bd4220
EB
1891 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1892 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 1893
c6bd4220 1894 return new_size;
a1ab4c31
AC
1895}
1896
1897/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1898 related by the addition of a constant. Return that constant if so. */
1899
1900static tree
1901compute_related_constant (tree op0, tree op1)
1902{
1903 tree op0_var, op1_var;
1904 tree op0_con = split_plus (op0, &op0_var);
1905 tree op1_con = split_plus (op1, &op1_var);
1906 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1907
1908 if (operand_equal_p (op0_var, op1_var, 0))
1909 return result;
1910 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1911 return result;
1912 else
1913 return 0;
1914}
1915
1916/* Utility function of above to split a tree OP which may be a sum, into a
1917 constant part, which is returned, and a variable part, which is stored
1918 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1919 bitsizetype. */
1920
1921static tree
1922split_plus (tree in, tree *pvar)
1923{
722356ce
EB
1924 /* Strip conversions in order to ease the tree traversal and maximize the
1925 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
1926 to always return and set *pvar to bitsizetype trees, but it's worth
1927 the effort. */
722356ce 1928 in = remove_conversions (in, false);
a1ab4c31
AC
1929
1930 *pvar = convert (bitsizetype, in);
1931
1932 if (TREE_CODE (in) == INTEGER_CST)
1933 {
1934 *pvar = bitsize_zero_node;
1935 return convert (bitsizetype, in);
1936 }
1937 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1938 {
1939 tree lhs_var, rhs_var;
1940 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1941 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1942
1943 if (lhs_var == TREE_OPERAND (in, 0)
1944 && rhs_var == TREE_OPERAND (in, 1))
1945 return bitsize_zero_node;
1946
1947 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1948 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1949 }
1950 else
1951 return bitsize_zero_node;
1952}
1953\f
d47d0a8d
EB
1954/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1955 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1956 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1957 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1958 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1959 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1960 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1961 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1962 invisible reference. */
a1ab4c31
AC
1963
1964tree
1965create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
1966 bool return_unconstrained_p, bool return_by_direct_ref_p,
1967 bool return_by_invisi_ref_p)
a1ab4c31 1968{
e5b00edf
NF
1969 /* A list of the data type nodes of the subprogram formal parameters.
1970 This list is generated by traversing the input list of PARM_DECL
1971 nodes. */
9771b263 1972 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 1973 tree t, type;
a1ab4c31 1974
910ad8de 1975 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 1976 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 1977
e5b00edf 1978 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 1979
d47d0a8d
EB
1980 /* TYPE may have been shared since GCC hashes types. If it has a different
1981 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
1982 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1983 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
1984 {
1985 type = copy_type (type);
1986 TYPE_CI_CO_LIST (type) = cico_list;
1987 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1988 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1989 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1990 }
a1ab4c31 1991
a1ab4c31
AC
1992 return type;
1993}
1994\f
1995/* Return a copy of TYPE but safe to modify in any way. */
1996
1997tree
1998copy_type (tree type)
1999{
c6bd4220 2000 tree new_type = copy_node (type);
a1ab4c31 2001
90dcfecb
EB
2002 /* Unshare the language-specific data. */
2003 if (TYPE_LANG_SPECIFIC (type))
2004 {
2005 TYPE_LANG_SPECIFIC (new_type) = NULL;
2006 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2007 }
2008
2009 /* And the contents of the language-specific slot if needed. */
2010 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2011 && TYPE_RM_VALUES (type))
2012 {
2013 TYPE_RM_VALUES (new_type) = NULL_TREE;
2014 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2015 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2016 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2017 }
2018
a1ab4c31
AC
2019 /* copy_node clears this field instead of copying it, because it is
2020 aliased with TREE_CHAIN. */
c6bd4220 2021 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2022
c6bd4220
EB
2023 TYPE_POINTER_TO (new_type) = 0;
2024 TYPE_REFERENCE_TO (new_type) = 0;
2025 TYPE_MAIN_VARIANT (new_type) = new_type;
2026 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 2027
c6bd4220 2028 return new_type;
a1ab4c31
AC
2029}
2030\f
c1abd261
EB
2031/* Return a subtype of sizetype with range MIN to MAX and whose
2032 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2033 of the associated TYPE_DECL. */
a1ab4c31
AC
2034
2035tree
2036create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2037{
2038 /* First build a type for the desired range. */
523e82a7 2039 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2040
523e82a7 2041 /* Then set the index type. */
a1ab4c31 2042 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2043 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2044
a1ab4c31
AC
2045 return type;
2046}
84fb43a1
EB
2047
2048/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2049 sizetype is used. */
2050
2051tree
2052create_range_type (tree type, tree min, tree max)
2053{
2054 tree range_type;
2055
2056 if (type == NULL_TREE)
2057 type = sizetype;
2058
2059 /* First build a type with the base range. */
523e82a7
EB
2060 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2061 TYPE_MAX_VALUE (type));
84fb43a1
EB
2062
2063 /* Then set the actual range. */
523e82a7
EB
2064 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2065 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
84fb43a1
EB
2066
2067 return range_type;
2068}
a1ab4c31 2069\f
10069d53
EB
2070/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2071 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2072 its data type. */
2073
2074tree
2075create_type_stub_decl (tree type_name, tree type)
2076{
2077 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2078 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2079 emitted in DWARF. */
c172df28
AH
2080 tree type_decl = build_decl (input_location,
2081 TYPE_DECL, type_name, type);
10069d53 2082 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2083 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2084 return type_decl;
2085}
2086
2087/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2088 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2089 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2090 true if we need to write debug information about this type. GNAT_NODE
2091 is used for the position of the decl. */
a1ab4c31
AC
2092
2093tree
74746d49
EB
2094create_type_decl (tree type_name, tree type, bool artificial_p,
2095 bool debug_info_p, Node_Id gnat_node)
a1ab4c31 2096{
a1ab4c31 2097 enum tree_code code = TREE_CODE (type);
10069d53
EB
2098 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2099 tree type_decl;
a1ab4c31 2100
10069d53
EB
2101 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2102 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2103
10069d53
EB
2104 /* If the type hasn't been named yet, we're naming it; preserve an existing
2105 TYPE_STUB_DECL that has been attached to it for some purpose. */
2106 if (!named && TYPE_STUB_DECL (type))
2107 {
2108 type_decl = TYPE_STUB_DECL (type);
2109 DECL_NAME (type_decl) = type_name;
2110 }
2111 else
74746d49 2112 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
a1ab4c31 2113
10069d53 2114 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2115 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2116
2117 /* Add this decl to the current binding level. */
10069d53 2118 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2119
10069d53
EB
2120 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2121 This causes the name to be also viewed as a "tag" by the debug
2122 back-end, with the advantage that no DW_TAG_typedef is emitted
2123 for artificial "tagged" types in DWARF. */
2124 if (!named)
2125 TYPE_STUB_DECL (type) = type_decl;
2126
50741117
EB
2127 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2128 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2129 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2130 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2131
2132 return type_decl;
2133}
10069d53 2134\f
a1ab4c31
AC
2135/* Return a VAR_DECL or CONST_DECL node.
2136
2137 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2138 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2139 the GCC tree for an optional initial expression; NULL_TREE if none.
2140
2141 CONST_FLAG is true if this variable is constant, in which case we might
2142 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2143
2144 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2145 definition to be made visible outside of the current compilation unit, for
2146 instance variable definitions in a package specification.
2147
1e17ef87 2148 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2149 opposed to a definition: no storage is to be allocated for the variable).
2150
2151 STATIC_FLAG is only relevant when not at top level. In that case
2152 it indicates whether to always allocate storage to the variable.
2153
2154 GNAT_NODE is used for the position of the decl. */
2155
2156tree
2157create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2158 bool const_flag, bool public_flag, bool extern_flag,
2159 bool static_flag, bool const_decl_allowed_p,
2160 struct attrib *attr_list, Node_Id gnat_node)
2161{
50179d58
EB
2162 /* Whether the initializer is a constant initializer. At the global level
2163 or for an external object or an object to be allocated in static memory,
2164 we check that it is a valid constant expression for use in initializing
2165 a static variable; otherwise, we only check that it is constant. */
a1ab4c31
AC
2166 bool init_const
2167 = (var_init != 0
2168 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
50179d58 2169 && (global_bindings_p () || extern_flag || static_flag
a1ab4c31
AC
2170 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2171 : TREE_CONSTANT (var_init)));
2172
2173 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2174 case the initializer may be used in-lieu of the DECL node (as done in
2175 Identifier_to_gnu). This is useful to prevent the need of elaboration
2176 code when an identifier for which such a decl is made is in turn used as
2177 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2178 but extra constraints apply to this choice (see below) and are not
2179 relevant to the distinction we wish to make. */
2180 bool constant_p = const_flag && init_const;
2181
2182 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2183 and may be used for scalars in general but not for aggregates. */
2184 tree var_decl
c172df28
AH
2185 = build_decl (input_location,
2186 (constant_p && const_decl_allowed_p
a1ab4c31
AC
2187 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2188 var_name, type);
2189
2190 /* If this is external, throw away any initializations (they will be done
2191 elsewhere) unless this is a constant for which we would like to remain
2192 able to get the initializer. If we are defining a global here, leave a
2193 constant initialization and save any variable elaborations for the
2194 elaboration routine. If we are just annotating types, throw away the
2195 initialization if it isn't a constant. */
2196 if ((extern_flag && !constant_p)
2197 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2198 var_init = NULL_TREE;
2199
2200 /* At the global level, an initializer requiring code to be generated
2201 produces elaboration statements. Check that such statements are allowed,
2202 that is, not violating a No_Elaboration_Code restriction. */
3b9e8343 2203 if (global_bindings_p () && var_init != 0 && !init_const)
a1ab4c31 2204 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2205
8b7b0c36
JH
2206 DECL_INITIAL (var_decl) = var_init;
2207 TREE_READONLY (var_decl) = const_flag;
2208 DECL_EXTERNAL (var_decl) = extern_flag;
2209 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2210 TREE_CONSTANT (var_decl) = constant_p;
2211 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2212 = TYPE_VOLATILE (type);
a1ab4c31
AC
2213
2214 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2215 try to fiddle with DECL_COMMON. However, on platforms that don't
2216 support global BSS sections, uninitialized global variables would
2217 go in DATA instead, thus increasing the size of the executable. */
2218 if (!flag_no_common
2219 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2220 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2221 && !have_global_bss_p ())
2222 DECL_COMMON (var_decl) = 1;
a1ab4c31 2223
2231f17f
EB
2224 /* At the global binding level, we need to allocate static storage for the
2225 variable if it isn't external. Otherwise, we allocate automatic storage
2226 unless requested not to. */
a1ab4c31 2227 TREE_STATIC (var_decl)
2231f17f 2228 = !extern_flag && (static_flag || global_bindings_p ());
a1ab4c31 2229
5225a138
EB
2230 /* For an external constant whose initializer is not absolute, do not emit
2231 debug info. In DWARF this would mean a global relocation in a read-only
c01fe451 2232 section which runs afoul of the PE-COFF run-time relocation mechanism. */
5225a138
EB
2233 if (extern_flag
2234 && constant_p
50179d58 2235 && var_init
5225a138
EB
2236 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2237 != null_pointer_node)
2238 DECL_IGNORED_P (var_decl) = 1;
2239
a1ab4c31
AC
2240 if (TREE_SIDE_EFFECTS (var_decl))
2241 TREE_ADDRESSABLE (var_decl) = 1;
2242
74746d49
EB
2243 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2244 if (TREE_CODE (var_decl) == VAR_DECL)
2245 process_attributes (&var_decl, &attr_list, true, gnat_node);
2246
2247 /* Add this decl to the current binding level. */
2248 gnat_pushdecl (var_decl, gnat_node);
2249
58c8f770 2250 if (TREE_CODE (var_decl) == VAR_DECL)
a1ab4c31 2251 {
58c8f770
EB
2252 if (asm_name)
2253 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
74746d49 2254
a1ab4c31
AC
2255 if (global_bindings_p ())
2256 rest_of_decl_compilation (var_decl, true, 0);
2257 }
a1ab4c31
AC
2258
2259 return var_decl;
2260}
2261\f
2262/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2263
2264static bool
2265aggregate_type_contains_array_p (tree type)
2266{
2267 switch (TREE_CODE (type))
2268 {
2269 case RECORD_TYPE:
2270 case UNION_TYPE:
2271 case QUAL_UNION_TYPE:
2272 {
2273 tree field;
910ad8de 2274 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2275 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2276 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2277 return true;
2278 return false;
2279 }
2280
2281 case ARRAY_TYPE:
2282 return true;
2283
2284 default:
2285 gcc_unreachable ();
2286 }
2287}
2288
62f9f3ce 2289/* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
da01bfee
EB
2290 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2291 nonzero, it is the specified size of the field. If POS is nonzero, it is
2292 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2293 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2294 means we are allowed to take the address of the field; if it is negative,
2295 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2296
2297tree
2298create_field_decl (tree field_name, tree field_type, tree record_type,
da01bfee 2299 tree size, tree pos, int packed, int addressable)
a1ab4c31 2300{
c172df28
AH
2301 tree field_decl = build_decl (input_location,
2302 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
2303
2304 DECL_CONTEXT (field_decl) = record_type;
2305 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2306
2307 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2308 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2309 Likewise for an aggregate without specified position that contains an
2310 array, because in this case slices of variable length of this array
2311 must be handled by GCC and variable-sized objects need to be aligned
2312 to at least a byte boundary. */
2313 if (packed && (TYPE_MODE (field_type) == BLKmode
2314 || (!pos
2315 && AGGREGATE_TYPE_P (field_type)
2316 && aggregate_type_contains_array_p (field_type))))
2317 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2318
2319 /* If a size is specified, use it. Otherwise, if the record type is packed
2320 compute a size to use, which may differ from the object's natural size.
2321 We always set a size in this case to trigger the checks for bitfield
2322 creation below, which is typically required when no position has been
2323 specified. */
2324 if (size)
2325 size = convert (bitsizetype, size);
2326 else if (packed == 1)
2327 {
2328 size = rm_size (field_type);
62f9f3ce
EB
2329 if (TYPE_MODE (field_type) == BLKmode)
2330 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2331 }
2332
2333 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2334 specified for two reasons: first if the size differs from the natural
2335 size. Second, if the alignment is insufficient. There are a number of
2336 ways the latter can be true.
2337
2338 We never make a bitfield if the type of the field has a nonconstant size,
2339 because no such entity requiring bitfield operations should reach here.
2340
2341 We do *preventively* make a bitfield when there might be the need for it
2342 but we don't have all the necessary information to decide, as is the case
2343 of a field with no specified position in a packed record.
2344
2345 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2346 in layout_decl or finish_record_type to clear the bit_field indication if
2347 it is in fact not needed. */
2348 if (addressable >= 0
2349 && size
2350 && TREE_CODE (size) == INTEGER_CST
2351 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2352 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2353 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2354 || packed
2355 || (TYPE_ALIGN (record_type) != 0
2356 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2357 {
2358 DECL_BIT_FIELD (field_decl) = 1;
2359 DECL_SIZE (field_decl) = size;
2360 if (!packed && !pos)
feec4372
EB
2361 {
2362 if (TYPE_ALIGN (record_type) != 0
2363 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2364 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2365 else
2366 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2367 }
a1ab4c31
AC
2368 }
2369
2370 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2371
2372 /* Bump the alignment if need be, either for bitfield/packing purposes or
2373 to satisfy the type requirements if no such consideration applies. When
2374 we get the alignment from the type, indicate if this is from an explicit
2375 user request, which prevents stor-layout from lowering it later on. */
2376 {
d9223014 2377 unsigned int bit_align
a1ab4c31
AC
2378 = (DECL_BIT_FIELD (field_decl) ? 1
2379 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2380
2381 if (bit_align > DECL_ALIGN (field_decl))
2382 DECL_ALIGN (field_decl) = bit_align;
2383 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2384 {
2385 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2386 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2387 }
2388 }
2389
2390 if (pos)
2391 {
2392 /* We need to pass in the alignment the DECL is known to have.
2393 This is the lowest-order bit set in POS, but no more than
2394 the alignment of the record, if one is specified. Note
2395 that an alignment of 0 is taken as infinite. */
2396 unsigned int known_align;
2397
2398 if (host_integerp (pos, 1))
2399 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
2400 else
2401 known_align = BITS_PER_UNIT;
2402
2403 if (TYPE_ALIGN (record_type)
2404 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2405 known_align = TYPE_ALIGN (record_type);
2406
2407 layout_decl (field_decl, known_align);
2408 SET_DECL_OFFSET_ALIGN (field_decl,
2409 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
2410 : BITS_PER_UNIT);
2411 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2412 &DECL_FIELD_BIT_OFFSET (field_decl),
2413 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2414 }
2415
2416 /* In addition to what our caller says, claim the field is addressable if we
2417 know that its type is not suitable.
2418
2419 The field may also be "technically" nonaddressable, meaning that even if
2420 we attempt to take the field's address we will actually get the address
2421 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2422 value we have at this point is not accurate enough, so we don't account
2423 for this here and let finish_record_type decide. */
4c5a0615 2424 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
2425 addressable = 1;
2426
2427 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2428
2429 return field_decl;
2430}
2431\f
a8e05f92
EB
2432/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2433 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2434 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2435
2436tree
2437create_param_decl (tree param_name, tree param_type, bool readonly)
2438{
c172df28
AH
2439 tree param_decl = build_decl (input_location,
2440 PARM_DECL, param_name, param_type);
a1ab4c31 2441
a8e05f92
EB
2442 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2443 can lead to various ABI violations. */
2444 if (targetm.calls.promote_prototypes (NULL_TREE)
2445 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
2446 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2447 {
2448 /* We have to be careful about biased types here. Make a subtype
2449 of integer_type_node with the proper biasing. */
2450 if (TREE_CODE (param_type) == INTEGER_TYPE
2451 && TYPE_BIASED_REPRESENTATION_P (param_type))
2452 {
84fb43a1
EB
2453 tree subtype
2454 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2455 TREE_TYPE (subtype) = integer_type_node;
2456 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
2457 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2458 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 2459 param_type = subtype;
a1ab4c31
AC
2460 }
2461 else
2462 param_type = integer_type_node;
2463 }
2464
2465 DECL_ARG_TYPE (param_decl) = param_type;
2466 TREE_READONLY (param_decl) = readonly;
2467 return param_decl;
2468}
2469\f
74746d49
EB
2470/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2471 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2472 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2473
74746d49
EB
2474void
2475process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2476 Node_Id gnat_node)
a1ab4c31 2477{
74746d49
EB
2478 struct attrib *attr;
2479
2480 for (attr = *attr_list; attr; attr = attr->next)
2481 switch (attr->type)
a1ab4c31
AC
2482 {
2483 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2484 Sloc_to_locus (Sloc (gnat_node), &input_location);
2485 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2486 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2487 break;
2488
2489 case ATTR_LINK_ALIAS:
74746d49 2490 if (!DECL_EXTERNAL (*node))
a1ab4c31 2491 {
74746d49
EB
2492 TREE_STATIC (*node) = 1;
2493 assemble_alias (*node, attr->name);
a1ab4c31
AC
2494 }
2495 break;
2496
2497 case ATTR_WEAK_EXTERNAL:
2498 if (SUPPORTS_WEAK)
74746d49 2499 declare_weak (*node);
a1ab4c31
AC
2500 else
2501 post_error ("?weak declarations not supported on this target",
74746d49 2502 attr->error_point);
a1ab4c31
AC
2503 break;
2504
2505 case ATTR_LINK_SECTION:
677f3fa8 2506 if (targetm_common.have_named_sections)
a1ab4c31 2507 {
74746d49
EB
2508 DECL_SECTION_NAME (*node)
2509 = build_string (IDENTIFIER_LENGTH (attr->name),
2510 IDENTIFIER_POINTER (attr->name));
2511 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2512 }
2513 else
2514 post_error ("?section attributes are not supported for this target",
74746d49 2515 attr->error_point);
a1ab4c31
AC
2516 break;
2517
2518 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2519 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2520 TREE_USED (*node) = 1;
a1ab4c31
AC
2521 break;
2522
2523 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2524 DECL_STATIC_DESTRUCTOR (*node) = 1;
2525 TREE_USED (*node) = 1;
a1ab4c31 2526 break;
40a14772
TG
2527
2528 case ATTR_THREAD_LOCAL_STORAGE:
74746d49
EB
2529 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2530 DECL_COMMON (*node) = 0;
40a14772 2531 break;
a1ab4c31 2532 }
74746d49
EB
2533
2534 *attr_list = NULL;
a1ab4c31
AC
2535}
2536\f
feec4372 2537/* Record DECL as a global renaming pointer. */
a1ab4c31
AC
2538
2539void
2540record_global_renaming_pointer (tree decl)
2541{
15bf7d19 2542 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
9771b263 2543 vec_safe_push (global_renaming_pointers, decl);
a1ab4c31
AC
2544}
2545
2546/* Invalidate the global renaming pointers. */
2547
2548void
2549invalidate_global_renaming_pointers (void)
2550{
2551 unsigned int i;
2552 tree iter;
2553
9771b263
DN
2554 if (global_renaming_pointers == NULL)
2555 return;
2556
2557 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
a1ab4c31
AC
2558 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2559
9771b263 2560 vec_free (global_renaming_pointers);
a1ab4c31
AC
2561}
2562
2563/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2564 a power of 2. */
2565
2566bool
2567value_factor_p (tree value, HOST_WIDE_INT factor)
2568{
2569 if (host_integerp (value, 1))
2570 return tree_low_cst (value, 1) % factor == 0;
2571
2572 if (TREE_CODE (value) == MULT_EXPR)
2573 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2574 || value_factor_p (TREE_OPERAND (value, 1), factor));
2575
2576 return false;
2577}
2578
7d7fcb08 2579/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2580 unless we can prove these 2 fields are laid out in such a way that no gap
2581 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2582 is the distance in bits between the end of PREV_FIELD and the starting
2583 position of CURR_FIELD. It is ignored if null. */
2584
2585static bool
2586potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2587{
2588 /* If this is the first field of the record, there cannot be any gap */
2589 if (!prev_field)
2590 return false;
2591
78df6221 2592 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
2593 time when such a field is not the last field of the record is when
2594 there are other components at fixed positions after it (meaning there
2595 was a rep clause for every field), in which case we don't want the
2596 alignment constraint to override them. */
2597 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2598 return false;
2599
2600 /* If the distance between the end of prev_field and the beginning of
2601 curr_field is constant, then there is a gap if the value of this
2602 constant is not null. */
2603 if (offset && host_integerp (offset, 1))
2604 return !integer_zerop (offset);
2605
2606 /* If the size and position of the previous field are constant,
2607 then check the sum of this size and position. There will be a gap
2608 iff it is not multiple of the current field alignment. */
2609 if (host_integerp (DECL_SIZE (prev_field), 1)
2610 && host_integerp (bit_position (prev_field), 1))
2611 return ((tree_low_cst (bit_position (prev_field), 1)
2612 + tree_low_cst (DECL_SIZE (prev_field), 1))
2613 % DECL_ALIGN (curr_field) != 0);
2614
2615 /* If both the position and size of the previous field are multiples
2616 of the current field alignment, there cannot be any gap. */
2617 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2618 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2619 return false;
2620
2621 /* Fallback, return that there may be a potential gap */
2622 return true;
2623}
2624
88a94e2b
EB
2625/* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2626 of the decl. */
a1ab4c31
AC
2627
2628tree
88a94e2b 2629create_label_decl (tree label_name, Node_Id gnat_node)
a1ab4c31 2630{
88a94e2b
EB
2631 tree label_decl
2632 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
a1ab4c31 2633
88a94e2b
EB
2634 DECL_MODE (label_decl) = VOIDmode;
2635
2636 /* Add this decl to the current binding level. */
2637 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
2638
2639 return label_decl;
2640}
2641\f
7d7fcb08 2642/* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
a1ab4c31
AC
2643 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2644 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
7d76717d 2645 PARM_DECL nodes chained through the DECL_CHAIN field).
a1ab4c31 2646
0e24192c 2647 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
7d7fcb08
EB
2648 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2649 used for the position of the decl. */
a1ab4c31
AC
2650
2651tree
7d7fcb08 2652create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
0e24192c
EB
2653 tree param_decl_list, enum inline_status_t inline_status,
2654 bool public_flag, bool extern_flag, bool artificial_flag,
7d7fcb08 2655 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 2656{
d47d0a8d
EB
2657 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2658 subprog_type);
2659 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2660 TREE_TYPE (subprog_type));
7d7fcb08 2661 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 2662
d84b344a
EB
2663 /* If this is a non-inline function nested inside an inlined external
2664 function, we cannot honor both requests without cloning the nested
2665 function in the current unit since it is private to the other unit.
2666 We could inline the nested function as well but it's probably better
2667 to err on the side of too little inlining. */
0e24192c 2668 if (inline_status != is_enabled
5daed84a 2669 && !public_flag
d84b344a
EB
2670 && current_function_decl
2671 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 2672 && DECL_EXTERNAL (current_function_decl))
d84b344a 2673 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31 2674
7d7fcb08
EB
2675 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2676 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
2677
2678 switch (inline_status)
2679 {
2680 case is_suppressed:
2681 DECL_UNINLINABLE (subprog_decl) = 1;
2682 break;
2683
2684 case is_disabled:
2685 break;
2686
2687 case is_enabled:
2688 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2689 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2690 break;
2691
2692 default:
2693 gcc_unreachable ();
2694 }
7d7fcb08
EB
2695
2696 TREE_PUBLIC (subprog_decl) = public_flag;
2697 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
a1ab4c31
AC
2698 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2699 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
a1ab4c31 2700
d47d0a8d
EB
2701 DECL_ARTIFICIAL (result_decl) = 1;
2702 DECL_IGNORED_P (result_decl) = 1;
2703 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2704 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 2705
a1ab4c31
AC
2706 if (asm_name)
2707 {
2708 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2709
2710 /* The expand_main_function circuitry expects "main_identifier_node" to
2711 designate the DECL_NAME of the 'main' entry point, in turn expected
2712 to be declared as the "main" function literally by default. Ada
2713 program entry points are typically declared with a different name
2714 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 2715 system expectations. Force main_identifier_node in this case. */
a1ab4c31 2716 if (asm_name == main_identifier_node)
cfbb663c 2717 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
2718 }
2719
74746d49
EB
2720 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2721
a1ab4c31
AC
2722 /* Add this decl to the current binding level. */
2723 gnat_pushdecl (subprog_decl, gnat_node);
2724
2725 /* Output the assembler code and/or RTL for the declaration. */
2726 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2727
2728 return subprog_decl;
2729}
2730\f
2731/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2732 body. This routine needs to be invoked before processing the declarations
2733 appearing in the subprogram. */
2734
2735void
2736begin_subprog_body (tree subprog_decl)
2737{
2738 tree param_decl;
2739
a1ab4c31
AC
2740 announce_function (subprog_decl);
2741
0ae44446
JR
2742 /* This function is being defined. */
2743 TREE_STATIC (subprog_decl) = 1;
2744
58c8f770
EB
2745 current_function_decl = subprog_decl;
2746
a1ab4c31
AC
2747 /* Enter a new binding level and show that all the parameters belong to
2748 this function. */
2749 gnat_pushlevel ();
a09d56d8 2750
a1ab4c31 2751 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 2752 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
2753 DECL_CONTEXT (param_decl) = subprog_decl;
2754
2755 make_decl_rtl (subprog_decl);
a1ab4c31
AC
2756}
2757
71196d4e 2758/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
2759
2760void
a406865a 2761end_subprog_body (tree body)
a1ab4c31
AC
2762{
2763 tree fndecl = current_function_decl;
2764
bd9c7fb9 2765 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
2766 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2767 DECL_INITIAL (fndecl) = current_binding_level->block;
2768 gnat_poplevel ();
2769
a1ab4c31
AC
2770 /* Mark the RESULT_DECL as being in this subprogram. */
2771 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2772
a963da4d
EB
2773 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2774 if (TREE_CODE (body) == BIND_EXPR)
2775 {
2776 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2777 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2778 }
2779
a1ab4c31
AC
2780 DECL_SAVED_TREE (fndecl) = body;
2781
228ee426 2782 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
2783}
2784
2785/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 2786
71196d4e
EB
2787void
2788rest_of_subprog_body_compilation (tree subprog_decl)
2789{
a1ab4c31
AC
2790 /* We cannot track the location of errors past this point. */
2791 error_gnat_node = Empty;
2792
2793 /* If we're only annotating types, don't actually compile this function. */
2794 if (type_annotate_only)
2795 return;
2796
a406865a 2797 /* Dump functions before gimplification. */
71196d4e 2798 dump_function (TDI_original, subprog_decl);
a406865a 2799
228ee426 2800 if (!decl_function_context (subprog_decl))
71196d4e 2801 cgraph_finalize_function (subprog_decl, false);
a1ab4c31
AC
2802 else
2803 /* Register this function with cgraph just far enough to get it
2804 added to our parent's nested function list. */
71196d4e 2805 (void) cgraph_get_create_node (subprog_decl);
a1ab4c31
AC
2806}
2807
a1ab4c31
AC
2808tree
2809gnat_builtin_function (tree decl)
2810{
2811 gnat_pushdecl (decl, Empty);
2812 return decl;
2813}
2814
2815/* Return an integer type with the number of bits of precision given by
2816 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2817 it is a signed type. */
2818
2819tree
2820gnat_type_for_size (unsigned precision, int unsignedp)
2821{
2822 tree t;
2823 char type_name[20];
2824
2825 if (precision <= 2 * MAX_BITS_PER_WORD
2826 && signed_and_unsigned_types[precision][unsignedp])
2827 return signed_and_unsigned_types[precision][unsignedp];
2828
2829 if (unsignedp)
2830 t = make_unsigned_type (precision);
2831 else
2832 t = make_signed_type (precision);
2833
2834 if (precision <= 2 * MAX_BITS_PER_WORD)
2835 signed_and_unsigned_types[precision][unsignedp] = t;
2836
2837 if (!TYPE_NAME (t))
2838 {
c3d79c60 2839 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
2840 TYPE_NAME (t) = get_identifier (type_name);
2841 }
2842
2843 return t;
2844}
2845
2846/* Likewise for floating-point types. */
2847
2848static tree
2849float_type_for_precision (int precision, enum machine_mode mode)
2850{
2851 tree t;
2852 char type_name[20];
2853
2854 if (float_types[(int) mode])
2855 return float_types[(int) mode];
2856
2857 float_types[(int) mode] = t = make_node (REAL_TYPE);
2858 TYPE_PRECISION (t) = precision;
2859 layout_type (t);
2860
2861 gcc_assert (TYPE_MODE (t) == mode);
2862 if (!TYPE_NAME (t))
2863 {
2864 sprintf (type_name, "FLOAT_%d", precision);
2865 TYPE_NAME (t) = get_identifier (type_name);
2866 }
2867
2868 return t;
2869}
2870
2871/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2872 an unsigned type; otherwise a signed type is returned. */
2873
2874tree
2875gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2876{
2877 if (mode == BLKmode)
2878 return NULL_TREE;
2799d18c
EB
2879
2880 if (mode == VOIDmode)
a1ab4c31 2881 return void_type_node;
2799d18c
EB
2882
2883 if (COMPLEX_MODE_P (mode))
a1ab4c31 2884 return NULL_TREE;
2799d18c
EB
2885
2886 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 2887 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
2888
2889 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 2890 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
2891
2892 if (VECTOR_MODE_P (mode))
2893 {
2894 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2895 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2896 if (inner_type)
2897 return build_vector_type_for_mode (inner_type, mode);
2898 }
2899
2900 return NULL_TREE;
a1ab4c31
AC
2901}
2902
2903/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2904
2905tree
2906gnat_unsigned_type (tree type_node)
2907{
2908 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2909
2910 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2911 {
2912 type = copy_node (type);
2913 TREE_TYPE (type) = type_node;
2914 }
2915 else if (TREE_TYPE (type_node)
2916 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2917 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2918 {
2919 type = copy_node (type);
2920 TREE_TYPE (type) = TREE_TYPE (type_node);
2921 }
2922
2923 return type;
2924}
2925
2926/* Return the signed version of a TYPE_NODE, a scalar type. */
2927
2928tree
2929gnat_signed_type (tree type_node)
2930{
2931 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2932
2933 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2934 {
2935 type = copy_node (type);
2936 TREE_TYPE (type) = type_node;
2937 }
2938 else if (TREE_TYPE (type_node)
2939 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2940 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2941 {
2942 type = copy_node (type);
2943 TREE_TYPE (type) = TREE_TYPE (type_node);
2944 }
2945
2946 return type;
2947}
2948
2949/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2950 transparently converted to each other. */
2951
2952int
2953gnat_types_compatible_p (tree t1, tree t2)
2954{
2955 enum tree_code code;
2956
2957 /* This is the default criterion. */
2958 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2959 return 1;
2960
2961 /* We only check structural equivalence here. */
2962 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2963 return 0;
2964
7948ae37
OH
2965 /* Vector types are also compatible if they have the same number of subparts
2966 and the same form of (scalar) element type. */
2967 if (code == VECTOR_TYPE
2968 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2969 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2970 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2971 return 1;
2972
cfa0bd19 2973 /* Array types are also compatible if they are constrained and have the same
96bba5e6 2974 domain(s) and the same component type. */
a1ab4c31 2975 if (code == ARRAY_TYPE
0adef32b
JJ
2976 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2977 || (TYPE_DOMAIN (t1)
b4680ca1 2978 && TYPE_DOMAIN (t2)
0adef32b
JJ
2979 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2980 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2981 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 2982 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
2983 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2984 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2985 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
a1ab4c31
AC
2986 return 1;
2987
a1ab4c31
AC
2988 return 0;
2989}
523e82a7 2990
71196d4e
EB
2991/* Return true if EXPR is a useless type conversion. */
2992
2993bool
2994gnat_useless_type_conversion (tree expr)
2995{
2996 if (CONVERT_EXPR_P (expr)
2997 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2998 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2999 return gnat_types_compatible_p (TREE_TYPE (expr),
3000 TREE_TYPE (TREE_OPERAND (expr, 0)));
3001
3002 return false;
3003}
3004
523e82a7
EB
3005/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3006
3007bool
3008fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3009 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3010{
3011 return TYPE_CI_CO_LIST (t) == cico_list
3012 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3013 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3014 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3015}
a1ab4c31
AC
3016\f
3017/* EXP is an expression for the size of an object. If this size contains
3018 discriminant references, replace them with the maximum (if MAX_P) or
3019 minimum (if !MAX_P) possible value of the discriminant. */
3020
3021tree
3022max_size (tree exp, bool max_p)
3023{
3024 enum tree_code code = TREE_CODE (exp);
3025 tree type = TREE_TYPE (exp);
3026
3027 switch (TREE_CODE_CLASS (code))
3028 {
3029 case tcc_declaration:
3030 case tcc_constant:
3031 return exp;
3032
3033 case tcc_vl_exp:
3034 if (code == CALL_EXPR)
3035 {
f82a627c
EB
3036 tree t, *argarray;
3037 int n, i;
3038
3039 t = maybe_inline_call_in_expr (exp);
3040 if (t)
3041 return max_size (t, max_p);
a1ab4c31 3042
f82a627c
EB
3043 n = call_expr_nargs (exp);
3044 gcc_assert (n > 0);
2bb1fc26 3045 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3046 for (i = 0; i < n; i++)
3047 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3048 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3049 }
3050 break;
3051
3052 case tcc_reference:
3053 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3054 modify. Otherwise, we treat it like a variable. */
3055 if (!CONTAINS_PLACEHOLDER_P (exp))
3056 return exp;
3057
3058 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3059 return
3060 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3061
3062 case tcc_comparison:
3063 return max_p ? size_one_node : size_zero_node;
3064
3065 case tcc_unary:
ce3da0d0
EB
3066 if (code == NON_LVALUE_EXPR)
3067 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3068
ce3da0d0
EB
3069 return fold_build1 (code, type,
3070 max_size (TREE_OPERAND (exp, 0),
3071 code == NEGATE_EXPR ? !max_p : max_p));
3072
a1ab4c31 3073 case tcc_binary:
ce3da0d0
EB
3074 {
3075 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3076 tree rhs = max_size (TREE_OPERAND (exp, 1),
3077 code == MINUS_EXPR ? !max_p : max_p);
3078
3079 /* Special-case wanting the maximum value of a MIN_EXPR.
3080 In that case, if one side overflows, return the other. */
3081 if (max_p && code == MIN_EXPR)
3082 {
3083 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3084 return lhs;
3085
3086 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3087 return rhs;
3088 }
3089
3090 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3091 overflowing and the RHS a variable. */
3092 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3093 && TREE_CODE (lhs) == INTEGER_CST
3094 && TREE_OVERFLOW (lhs)
3095 && !TREE_CONSTANT (rhs))
3096 return lhs;
3097
3098 return size_binop (code, lhs, rhs);
3099 }
3100
a1ab4c31
AC
3101 case tcc_expression:
3102 switch (TREE_CODE_LENGTH (code))
3103 {
3104 case 1:
722356ce
EB
3105 if (code == SAVE_EXPR)
3106 return exp;
ce3da0d0
EB
3107
3108 return fold_build1 (code, type,
3109 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3110
3111 case 2:
3112 if (code == COMPOUND_EXPR)
3113 return max_size (TREE_OPERAND (exp, 1), max_p);
3114
ce3da0d0
EB
3115 return fold_build2 (code, type,
3116 max_size (TREE_OPERAND (exp, 0), max_p),
3117 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3118
3119 case 3:
722356ce 3120 if (code == COND_EXPR)
a1ab4c31
AC
3121 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3122 max_size (TREE_OPERAND (exp, 1), max_p),
3123 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3124
3125 default:
3126 break;
a1ab4c31
AC
3127 }
3128
3129 /* Other tree classes cannot happen. */
3130 default:
3131 break;
3132 }
3133
3134 gcc_unreachable ();
3135}
3136\f
3137/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3138 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3139 Return a constructor for the template. */
3140
3141tree
3142build_template (tree template_type, tree array_type, tree expr)
3143{
9771b263 3144 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3145 tree bound_list = NULL_TREE;
3146 tree field;
3147
3148 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3149 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3150 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3151 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3152
3153 if (TREE_CODE (array_type) == ARRAY_TYPE
3154 || (TREE_CODE (array_type) == INTEGER_TYPE
3155 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3156 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3157
3158 /* First make the list for a CONSTRUCTOR for the template. Go down the
3159 field list of the template instead of the type chain because this
3160 array might be an Ada array of arrays and we can't tell where the
3161 nested arrays stop being the underlying object. */
3162
3163 for (field = TYPE_FIELDS (template_type); field;
3164 (bound_list
3165 ? (bound_list = TREE_CHAIN (bound_list))
3166 : (array_type = TREE_TYPE (array_type))),
910ad8de 3167 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3168 {
3169 tree bounds, min, max;
3170
3171 /* If we have a bound list, get the bounds from there. Likewise
3172 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3173 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3174 This will give us a maximum range. */
3175 if (bound_list)
3176 bounds = TREE_VALUE (bound_list);
3177 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3178 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3179 else if (expr && TREE_CODE (expr) == PARM_DECL
3180 && DECL_BY_COMPONENT_PTR_P (expr))
3181 bounds = TREE_TYPE (field);
3182 else
3183 gcc_unreachable ();
3184
3185 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3186 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3187
3188 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3189 substitute it from OBJECT. */
3190 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3191 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3192
0e228dd9 3193 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3194 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3195 }
3196
0e228dd9 3197 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3198}
3199\f
31a5a547
EB
3200/* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3201 being built; the new decl is chained on to the front of the list. */
3202
3203static tree
3204make_descriptor_field (const char *name, tree type, tree rec_type,
3205 tree initial, tree field_list)
3206{
3207 tree field
3208 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3209 NULL_TREE, 0, 0);
3210
3211 DECL_INITIAL (field) = initial;
3212 DECL_CHAIN (field) = field_list;
3213 return field;
3214}
3215
58c8f770
EB
3216/* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3217 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3218 type contains in its DECL_INITIAL the expression to use when a constructor
3219 is made for the type. GNAT_ENTITY is an entity used to print out an error
3220 message if the mechanism cannot be applied to an object of that type and
3221 also for the name. */
a1ab4c31
AC
3222
3223tree
d628c015 3224build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
3225{
3226 tree record_type = make_node (RECORD_TYPE);
31a5a547 3227 tree pointer32_type, pointer64_type;
788e5046 3228 tree field_list = NULL_TREE;
31a5a547
EB
3229 int klass, ndim, i, dtype = 0;
3230 tree inner_type, tem;
a1ab4c31 3231 tree *idx_arr;
a1ab4c31
AC
3232
3233 /* If TYPE is an unconstrained array, use the underlying array type. */
3234 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3235 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3236
3237 /* If this is an array, compute the number of dimensions in the array,
3238 get the index types, and point to the inner type. */
3239 if (TREE_CODE (type) != ARRAY_TYPE)
3240 ndim = 0;
3241 else
3242 for (ndim = 1, inner_type = type;
3243 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3244 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3245 ndim++, inner_type = TREE_TYPE (inner_type))
3246 ;
3247
2bb1fc26 3248 idx_arr = XALLOCAVEC (tree, ndim);
a1ab4c31 3249
d628c015 3250 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
3251 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3252 for (i = ndim - 1, inner_type = type;
3253 i >= 0;
3254 i--, inner_type = TREE_TYPE (inner_type))
3255 idx_arr[i] = TYPE_DOMAIN (inner_type);
3256 else
3257 for (i = 0, inner_type = type;
3258 i < ndim;
3259 i++, inner_type = TREE_TYPE (inner_type))
3260 idx_arr[i] = TYPE_DOMAIN (inner_type);
3261
3262 /* Now get the DTYPE value. */
3263 switch (TREE_CODE (type))
3264 {
3265 case INTEGER_TYPE:
3266 case ENUMERAL_TYPE:
01ddebf2 3267 case BOOLEAN_TYPE:
a1ab4c31
AC
3268 if (TYPE_VAX_FLOATING_POINT_P (type))
3269 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3270 {
3271 case 6:
3272 dtype = 10;
3273 break;
3274 case 9:
3275 dtype = 11;
3276 break;
3277 case 15:
3278 dtype = 27;
3279 break;
3280 }
3281 else
3282 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3283 {
3284 case 8:
3285 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3286 break;
3287 case 16:
3288 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3289 break;
3290 case 32:
3291 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3292 break;
3293 case 64:
3294 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3295 break;
3296 case 128:
3297 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3298 break;
3299 }
3300 break;
3301
3302 case REAL_TYPE:
3303 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3304 break;
3305
3306 case COMPLEX_TYPE:
3307 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3308 && TYPE_VAX_FLOATING_POINT_P (type))
3309 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3310 {
3311 case 6:
3312 dtype = 12;
3313 break;
3314 case 9:
3315 dtype = 13;
3316 break;
3317 case 15:
3318 dtype = 29;
3319 }
3320 else
3321 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3322 break;
3323
3324 case ARRAY_TYPE:
3325 dtype = 14;
3326 break;
3327
3328 default:
3329 break;
3330 }
3331
3332 /* Get the CLASS value. */
3333 switch (mech)
3334 {
3335 case By_Descriptor_A:
d628c015 3336 case By_Short_Descriptor_A:
c6bd4220 3337 klass = 4;
a1ab4c31
AC
3338 break;
3339 case By_Descriptor_NCA:
d628c015 3340 case By_Short_Descriptor_NCA:
c6bd4220 3341 klass = 10;
a1ab4c31
AC
3342 break;
3343 case By_Descriptor_SB:
d628c015 3344 case By_Short_Descriptor_SB:
c6bd4220 3345 klass = 15;
a1ab4c31
AC
3346 break;
3347 case By_Descriptor:
d628c015 3348 case By_Short_Descriptor:
a1ab4c31 3349 case By_Descriptor_S:
d628c015 3350 case By_Short_Descriptor_S:
a1ab4c31 3351 default:
c6bd4220 3352 klass = 1;
a1ab4c31
AC
3353 break;
3354 }
3355
58c8f770
EB
3356 /* Make the type for a descriptor for VMS. The first four fields are the
3357 same for all types. */
82b481ed
EB
3358 field_list
3359 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3360 size_in_bytes ((mech == By_Descriptor_A
3361 || mech == By_Short_Descriptor_A)
3362 ? inner_type : type),
3363 field_list);
3364 field_list
3365 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3366 size_int (dtype), field_list);
3367 field_list
3368 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3369 size_int (klass), field_list);
a1ab4c31 3370
a1ab4c31 3371 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
31a5a547
EB
3372 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3373
3374 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3375 that we cannot build a template call to the CE routine as it would get a
3376 wrong source location; instead we use a second placeholder for it. */
3377 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3378 build0 (PLACEHOLDER_EXPR, type));
3379 tem = build3 (COND_EXPR, pointer32_type,
f542c405
EB
3380 Pmode != SImode
3381 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3382 build_int_cstu (pointer64_type, 0x80000000))
3383 : boolean_false_node,
31a5a547
EB
3384 build0 (PLACEHOLDER_EXPR, void_type_node),
3385 convert (pointer32_type, tem));
a1ab4c31 3386
82b481ed 3387 field_list
31a5a547 3388 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
82b481ed 3389 field_list);
a1ab4c31
AC
3390
3391 switch (mech)
3392 {
3393 case By_Descriptor:
d628c015 3394 case By_Short_Descriptor:
a1ab4c31 3395 case By_Descriptor_S:
d628c015 3396 case By_Short_Descriptor_S:
a1ab4c31
AC
3397 break;
3398
3399 case By_Descriptor_SB:
d628c015 3400 case By_Short_Descriptor_SB:
82b481ed
EB
3401 field_list
3402 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3403 record_type,
3404 (TREE_CODE (type) == ARRAY_TYPE
3405 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3406 : size_zero_node),
3407 field_list);
3408 field_list
3409 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3410 record_type,
3411 (TREE_CODE (type) == ARRAY_TYPE
3412 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3413 : size_zero_node),
3414 field_list);
a1ab4c31
AC
3415 break;
3416
3417 case By_Descriptor_A:
d628c015 3418 case By_Short_Descriptor_A:
a1ab4c31 3419 case By_Descriptor_NCA:
d628c015 3420 case By_Short_Descriptor_NCA:
82b481ed
EB
3421 field_list
3422 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3423 record_type, size_zero_node, field_list);
3424
3425 field_list
3426 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3427 record_type, size_zero_node, field_list);
3428
82b481ed
EB
3429 field_list
3430 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3431 record_type,
3432 size_int ((mech == By_Descriptor_NCA
3433 || mech == By_Short_Descriptor_NCA)
3434 ? 0
3435 /* Set FL_COLUMN, FL_COEFF, and
3436 FL_BOUNDS. */
3437 : (TREE_CODE (type) == ARRAY_TYPE
3438 && TYPE_CONVENTION_FORTRAN_P
3439 (type)
3440 ? 224 : 192)),
3441 field_list);
3442
3443 field_list
3444 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3445 record_type, size_int (ndim), field_list);
3446
3447 field_list
3448 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3449 record_type, size_in_bytes (type),
3450 field_list);
a1ab4c31
AC
3451
3452 /* Now build a pointer to the 0,0,0... element. */
3453 tem = build0 (PLACEHOLDER_EXPR, type);
3454 for (i = 0, inner_type = type; i < ndim;
3455 i++, inner_type = TREE_TYPE (inner_type))
3456 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3457 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3458 NULL_TREE, NULL_TREE);
3459
82b481ed
EB
3460 field_list
3461 = make_descriptor_field ("A0", pointer32_type, record_type,
3462 build1 (ADDR_EXPR, pointer32_type, tem),
3463 field_list);
a1ab4c31
AC
3464
3465 /* Next come the addressing coefficients. */
3466 tem = size_one_node;
3467 for (i = 0; i < ndim; i++)
3468 {
3469 char fname[3];
3470 tree idx_length
3471 = size_binop (MULT_EXPR, tem,
3472 size_binop (PLUS_EXPR,
3473 size_binop (MINUS_EXPR,
3474 TYPE_MAX_VALUE (idx_arr[i]),
3475 TYPE_MIN_VALUE (idx_arr[i])),
3476 size_int (1)));
3477
d628c015
DR
3478 fname[0] = ((mech == By_Descriptor_NCA ||
3479 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31 3480 fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3481 field_list
3482 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3483 record_type, idx_length, field_list);
a1ab4c31 3484
d628c015 3485 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
3486 tem = idx_length;
3487 }
3488
3489 /* Finally here are the bounds. */
3490 for (i = 0; i < ndim; i++)
3491 {
3492 char fname[3];
3493
3494 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3495 field_list
3496 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3497 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3498 field_list);
a1ab4c31
AC
3499
3500 fname[0] = 'U';
82b481ed
EB
3501 field_list
3502 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3503 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3504 field_list);
a1ab4c31
AC
3505 }
3506 break;
3507
3508 default:
3509 post_error ("unsupported descriptor type for &", gnat_entity);
3510 }
3511
10069d53 3512 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
788e5046 3513 finish_record_type (record_type, nreverse (field_list), 0, false);
a1ab4c31
AC
3514 return record_type;
3515}
3516
58c8f770
EB
3517/* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3518 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3519 type contains in its DECL_INITIAL the expression to use when a constructor
3520 is made for the type. GNAT_ENTITY is an entity used to print out an error
3521 message if the mechanism cannot be applied to an object of that type and
3522 also for the name. */
6ca2b0a0
DR
3523
3524tree
d628c015 3525build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0 3526{
31a5a547 3527 tree record_type = make_node (RECORD_TYPE);
6ca2b0a0 3528 tree pointer64_type;
31a5a547
EB
3529 tree field_list = NULL_TREE;
3530 int klass, ndim, i, dtype = 0;
3531 tree inner_type, tem;
6ca2b0a0 3532 tree *idx_arr;
6ca2b0a0
DR
3533
3534 /* If TYPE is an unconstrained array, use the underlying array type. */
3535 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3536 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3537
3538 /* If this is an array, compute the number of dimensions in the array,
3539 get the index types, and point to the inner type. */
3540 if (TREE_CODE (type) != ARRAY_TYPE)
3541 ndim = 0;
3542 else
3543 for (ndim = 1, inner_type = type;
3544 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3545 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3546 ndim++, inner_type = TREE_TYPE (inner_type))
3547 ;
3548
2bb1fc26 3549 idx_arr = XALLOCAVEC (tree, ndim);
6ca2b0a0
DR
3550
3551 if (mech != By_Descriptor_NCA
3552 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3553 for (i = ndim - 1, inner_type = type;
3554 i >= 0;
3555 i--, inner_type = TREE_TYPE (inner_type))
3556 idx_arr[i] = TYPE_DOMAIN (inner_type);
3557 else
3558 for (i = 0, inner_type = type;
3559 i < ndim;
3560 i++, inner_type = TREE_TYPE (inner_type))
3561 idx_arr[i] = TYPE_DOMAIN (inner_type);
3562
3563 /* Now get the DTYPE value. */
3564 switch (TREE_CODE (type))
3565 {
3566 case INTEGER_TYPE:
3567 case ENUMERAL_TYPE:
01ddebf2 3568 case BOOLEAN_TYPE:
6ca2b0a0
DR
3569 if (TYPE_VAX_FLOATING_POINT_P (type))
3570 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3571 {
3572 case 6:
3573 dtype = 10;
3574 break;
3575 case 9:
3576 dtype = 11;
3577 break;
3578 case 15:
3579 dtype = 27;
3580 break;
3581 }
3582 else
3583 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3584 {
3585 case 8:
3586 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3587 break;
3588 case 16:
3589 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3590 break;
3591 case 32:
3592 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3593 break;
3594 case 64:
3595 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3596 break;
3597 case 128:
3598 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3599 break;
3600 }
3601 break;
3602
3603 case REAL_TYPE:
3604 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3605 break;
3606
3607 case COMPLEX_TYPE:
3608 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3609 && TYPE_VAX_FLOATING_POINT_P (type))
3610 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3611 {
3612 case 6:
3613 dtype = 12;
3614 break;
3615 case 9:
3616 dtype = 13;
3617 break;
3618 case 15:
3619 dtype = 29;
3620 }
3621 else
3622 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3623 break;
3624
3625 case ARRAY_TYPE:
3626 dtype = 14;
3627 break;
3628
3629 default:
3630 break;
3631 }
3632
3633 /* Get the CLASS value. */
3634 switch (mech)
3635 {
3636 case By_Descriptor_A:
c6bd4220 3637 klass = 4;
6ca2b0a0
DR
3638 break;
3639 case By_Descriptor_NCA:
c6bd4220 3640 klass = 10;
6ca2b0a0
DR
3641 break;
3642 case By_Descriptor_SB:
c6bd4220 3643 klass = 15;
6ca2b0a0
DR
3644 break;
3645 case By_Descriptor:
3646 case By_Descriptor_S:
3647 default:
c6bd4220 3648 klass = 1;
6ca2b0a0
DR
3649 break;
3650 }
3651
58c8f770 3652 /* Make the type for a 64-bit descriptor for VMS. The first six fields
6ca2b0a0 3653 are the same for all types. */
31a5a547 3654 field_list
788e5046 3655 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
31a5a547
EB
3656 record_type, size_int (1), field_list);
3657 field_list
788e5046 3658 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
31a5a547
EB
3659 record_type, size_int (dtype), field_list);
3660 field_list
788e5046 3661 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
31a5a547
EB
3662 record_type, size_int (klass), field_list);
3663 field_list
788e5046 3664 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
f54ee980 3665 record_type, size_int (-1), field_list);
31a5a547 3666 field_list
788e5046 3667 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
31a5a547 3668 record_type,
788e5046 3669 size_in_bytes (mech == By_Descriptor_A
82b481ed 3670 ? inner_type : type),
31a5a547 3671 field_list);
6ca2b0a0
DR
3672
3673 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3674
31a5a547
EB
3675 field_list
3676 = make_descriptor_field ("POINTER", pointer64_type, record_type,
788e5046 3677 build_unary_op (ADDR_EXPR, pointer64_type,
82b481ed 3678 build0 (PLACEHOLDER_EXPR, type)),
31a5a547 3679 field_list);
6ca2b0a0
DR
3680
3681 switch (mech)
3682 {
3683 case By_Descriptor:
3684 case By_Descriptor_S:
3685 break;
3686
3687 case By_Descriptor_SB:
31a5a547 3688 field_list
788e5046 3689 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
31a5a547 3690 record_type,
788e5046
NF
3691 (TREE_CODE (type) == ARRAY_TYPE
3692 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
82b481ed 3693 : size_zero_node),
31a5a547
EB
3694 field_list);
3695 field_list
788e5046 3696 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
31a5a547 3697 record_type,
788e5046
NF
3698 (TREE_CODE (type) == ARRAY_TYPE
3699 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
82b481ed 3700 : size_zero_node),
31a5a547 3701 field_list);
6ca2b0a0
DR
3702 break;
3703
3704 case By_Descriptor_A:
3705 case By_Descriptor_NCA:
31a5a547 3706 field_list
788e5046 3707 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
31a5a547 3708 record_type, size_zero_node, field_list);
788e5046 3709
31a5a547 3710 field_list
788e5046 3711 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
31a5a547 3712 record_type, size_zero_node, field_list);
788e5046
NF
3713
3714 dtype = (mech == By_Descriptor_NCA
3715 ? 0
3716 /* Set FL_COLUMN, FL_COEFF, and
3717 FL_BOUNDS. */
3718 : (TREE_CODE (type) == ARRAY_TYPE
3719 && TYPE_CONVENTION_FORTRAN_P (type)
3720 ? 224 : 192));
31a5a547 3721 field_list
788e5046 3722 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
31a5a547
EB
3723 record_type, size_int (dtype),
3724 field_list);
6ca2b0a0 3725
31a5a547 3726 field_list
788e5046 3727 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
31a5a547 3728 record_type, size_int (ndim), field_list);
788e5046 3729
31a5a547 3730 field_list
788e5046 3731 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
31a5a547
EB
3732 record_type, size_int (0), field_list);
3733 field_list
788e5046 3734 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
31a5a547
EB
3735 record_type, size_in_bytes (type),
3736 field_list);
6ca2b0a0
DR
3737
3738 /* Now build a pointer to the 0,0,0... element. */
3739 tem = build0 (PLACEHOLDER_EXPR, type);
3740 for (i = 0, inner_type = type; i < ndim;
3741 i++, inner_type = TREE_TYPE (inner_type))
3742 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3743 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3744 NULL_TREE, NULL_TREE);
3745
31a5a547
EB
3746 field_list
3747 = make_descriptor_field ("A0", pointer64_type, record_type,
788e5046 3748 build1 (ADDR_EXPR, pointer64_type, tem),
31a5a547 3749 field_list);
6ca2b0a0
DR
3750
3751 /* Next come the addressing coefficients. */
3752 tem = size_one_node;
3753 for (i = 0; i < ndim; i++)
3754 {
3755 char fname[3];
3756 tree idx_length
3757 = size_binop (MULT_EXPR, tem,
3758 size_binop (PLUS_EXPR,
3759 size_binop (MINUS_EXPR,
3760 TYPE_MAX_VALUE (idx_arr[i]),
3761 TYPE_MIN_VALUE (idx_arr[i])),
3762 size_int (1)));
3763
3764 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3765 fname[1] = '0' + i, fname[2] = 0;
31a5a547 3766 field_list
788e5046 3767 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547 3768 record_type, idx_length, field_list);
6ca2b0a0
DR
3769
3770 if (mech == By_Descriptor_NCA)
3771 tem = idx_length;
3772 }
3773
3774 /* Finally here are the bounds. */
3775 for (i = 0; i < ndim; i++)
3776 {
3777 char fname[3];
3778
3779 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
31a5a547 3780 field_list
788e5046 3781 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3782 record_type,
3783 TYPE_MIN_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3784
3785 fname[0] = 'U';
31a5a547 3786 field_list
788e5046 3787 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3788 record_type,
3789 TYPE_MAX_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3790 }
3791 break;
3792
3793 default:
3794 post_error ("unsupported descriptor type for &", gnat_entity);
3795 }
3796
31a5a547
EB
3797 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3798 finish_record_type (record_type, nreverse (field_list), 0, false);
3799 return record_type;
6ca2b0a0
DR
3800}
3801
31a5a547
EB
3802/* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3803 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
a1ab4c31 3804
31a5a547
EB
3805tree
3806fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
a1ab4c31 3807{
9771b263 3808 vec<constructor_elt, va_gc> *v = NULL;
31a5a547 3809 tree field;
a1ab4c31 3810
31a5a547
EB
3811 gnu_expr = maybe_unconstrained_array (gnu_expr);
3812 gnu_expr = gnat_protect_expr (gnu_expr);
3813 gnat_mark_addressable (gnu_expr);
3814
3815 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3816 routine in case we have a 32-bit descriptor. */
3817 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3818 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3819 N_Raise_Constraint_Error),
3820 gnu_expr);
3821
3822 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3823 {
3824 tree value
3825 = convert (TREE_TYPE (field),
3826 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3827 gnu_expr));
3828 CONSTRUCTOR_APPEND_ELT (v, field, value);
3829 }
3830
3831 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
3832}
3833
d628c015
DR
3834/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3835 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3836 which the VMS descriptor is passed. */
a1ab4c31
AC
3837
3838static tree
d628c015
DR
3839convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3840{
3841 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3842 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3843 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3844 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
d628c015 3845 /* The POINTER field is the 6th field in the descriptor. */
910ad8de 3846 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
d628c015
DR
3847
3848 /* Retrieve the value of the POINTER field. */
3849 tree gnu_expr64
58c8f770 3850 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
d628c015
DR
3851
3852 if (POINTER_TYPE_P (gnu_type))
3853 return convert (gnu_type, gnu_expr64);
3854
315cff15 3855 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
d628c015
DR
3856 {
3857 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 3858 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
d628c015
DR
3859 tree template_type = TREE_TYPE (p_bounds_type);
3860 tree min_field = TYPE_FIELDS (template_type);
7d76717d 3861 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3862 tree template_tree, template_addr, aflags, dimct, t, u;
d628c015 3863 /* See the head comment of build_vms_descriptor. */
c6bd4220 3864 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
d628c015 3865 tree lfield, ufield;
9771b263 3866 vec<constructor_elt, va_gc> *v;
d628c015 3867
86060344 3868 /* Convert POINTER to the pointer-to-array type. */
d628c015
DR
3869 gnu_expr64 = convert (p_array_type, gnu_expr64);
3870
c6bd4220 3871 switch (iklass)
d628c015
DR
3872 {
3873 case 1: /* Class S */
3874 case 15: /* Class SB */
3875 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
9771b263 3876 vec_alloc (v, 2);
910ad8de 3877 t = DECL_CHAIN (DECL_CHAIN (klass));
d628c015 3878 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
3879 CONSTRUCTOR_APPEND_ELT (v, min_field,
3880 convert (TREE_TYPE (min_field),
3881 integer_one_node));
3882 CONSTRUCTOR_APPEND_ELT (v, max_field,
3883 convert (TREE_TYPE (max_field), t));
3884 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 3885 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
d628c015
DR
3886
3887 /* For class S, we are done. */
c6bd4220 3888 if (iklass == 1)
d628c015
DR
3889 break;
3890
3891 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
3892 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3893 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 3894 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
d628c015
DR
3895 /* If so, there is already a template in the descriptor and
3896 it is located right after the POINTER field. The fields are
3897 64bits so they must be repacked. */
7d76717d 3898 t = DECL_CHAIN (pointer);
d628c015
DR
3899 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3900 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3901
7d76717d 3902 t = DECL_CHAIN (t);
d628c015
DR
3903 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3904 ufield = convert
910ad8de 3905 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3906
3907 /* Build the template in the form of a constructor. */
9771b263 3908 vec_alloc (v, 2);
0e228dd9 3909 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
7d76717d 3910 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3911 ufield);
3912 template_tree = gnat_build_constructor (template_type, v);
d628c015
DR
3913
3914 /* Otherwise use the {1, LENGTH} template we build above. */
3915 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3916 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 3917 template_tree),
d628c015
DR
3918 template_addr);
3919 break;
3920
3921 case 4: /* Class A */
3922 /* The AFLAGS field is the 3rd field after the pointer in the
3923 descriptor. */
910ad8de 3924 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
d628c015
DR
3925 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3926 /* The DIMCT field is the next field in the descriptor after
3927 aflags. */
7d76717d 3928 t = DECL_CHAIN (t);
d628c015
DR
3929 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3930 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3931 or FL_COEFF or FL_BOUNDS not set. */
3932 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
3933 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3934 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3935 dimct,
3936 convert (TREE_TYPE (dimct),
3937 size_one_node)),
1139f2e8 3938 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
3939 build2 (BIT_AND_EXPR,
3940 TREE_TYPE (aflags),
3941 aflags, u),
3942 u));
3943 /* There is already a template in the descriptor and it is located
3944 in block 3. The fields are 64bits so they must be repacked. */
910ad8de 3945 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
d628c015
DR
3946 (t)))));
3947 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3948 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3949
7d76717d 3950 t = DECL_CHAIN (t);
d628c015
DR
3951 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3952 ufield = convert
910ad8de 3953 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3954
3955 /* Build the template in the form of a constructor. */
9771b263 3956 vec_alloc (v, 2);
0e228dd9 3957 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
910ad8de 3958 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
3959 ufield);
3960 template_tree = gnat_build_constructor (template_type, v);
f76d6e6f 3961 template_tree = build3 (COND_EXPR, template_type, u,
d628c015
DR
3962 build_call_raise (CE_Length_Check_Failed, Empty,
3963 N_Raise_Constraint_Error),
6bf68a93 3964 template_tree);
c6bd4220
EB
3965 template_addr
3966 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
d628c015
DR
3967 break;
3968
3969 case 10: /* Class NCA */
3970 default:
3971 post_error ("unsupported descriptor type for &", gnat_subprog);
3972 template_addr = integer_zero_node;
3973 break;
3974 }
3975
3976 /* Build the fat pointer in the form of a constructor. */
9771b263 3977 vec_alloc (v, 2);
0e228dd9 3978 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
910ad8de 3979 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9
NF
3980 template_addr);
3981 return gnat_build_constructor (gnu_type, v);
d628c015
DR
3982 }
3983
3984 else
3985 gcc_unreachable ();
3986}
3987
3988/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3989 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3990 which the VMS descriptor is passed. */
3991
3992static tree
3993convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3994{
3995 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3996 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3997 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3998 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
a1ab4c31 3999 /* The POINTER field is the 4th field in the descriptor. */
910ad8de 4000 tree pointer = DECL_CHAIN (klass);
a1ab4c31
AC
4001
4002 /* Retrieve the value of the POINTER field. */
d628c015 4003 tree gnu_expr32
a1ab4c31
AC
4004 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
4005
4006 if (POINTER_TYPE_P (gnu_type))
d628c015 4007 return convert (gnu_type, gnu_expr32);
a1ab4c31 4008
315cff15 4009 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
a1ab4c31
AC
4010 {
4011 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 4012 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
a1ab4c31
AC
4013 tree template_type = TREE_TYPE (p_bounds_type);
4014 tree min_field = TYPE_FIELDS (template_type);
7d76717d 4015 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 4016 tree template_tree, template_addr, aflags, dimct, t, u;
a1ab4c31 4017 /* See the head comment of build_vms_descriptor. */
c6bd4220 4018 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
9771b263 4019 vec<constructor_elt, va_gc> *v;
a1ab4c31 4020
86060344 4021 /* Convert POINTER to the pointer-to-array type. */
d628c015 4022 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31 4023
c6bd4220 4024 switch (iklass)
a1ab4c31
AC
4025 {
4026 case 1: /* Class S */
4027 case 15: /* Class SB */
4028 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
9771b263 4029 vec_alloc (v, 2);
a1ab4c31
AC
4030 t = TYPE_FIELDS (desc_type);
4031 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
4032 CONSTRUCTOR_APPEND_ELT (v, min_field,
4033 convert (TREE_TYPE (min_field),
4034 integer_one_node));
4035 CONSTRUCTOR_APPEND_ELT (v, max_field,
4036 convert (TREE_TYPE (max_field), t));
4037 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 4038 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
a1ab4c31
AC
4039
4040 /* For class S, we are done. */
c6bd4220 4041 if (iklass == 1)
a1ab4c31
AC
4042 break;
4043
4044 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
4045 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4046 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 4047 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
a1ab4c31
AC
4048 /* If so, there is already a template in the descriptor and
4049 it is located right after the POINTER field. */
7d76717d 4050 t = DECL_CHAIN (pointer);
c6bd4220
EB
4051 template_tree
4052 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
a1ab4c31
AC
4053 /* Otherwise use the {1, LENGTH} template we build above. */
4054 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4055 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 4056 template_tree),
a1ab4c31
AC
4057 template_addr);
4058 break;
4059
4060 case 4: /* Class A */
4061 /* The AFLAGS field is the 7th field in the descriptor. */
910ad8de 4062 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
a1ab4c31
AC
4063 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4064 /* The DIMCT field is the 8th field in the descriptor. */
7d76717d 4065 t = DECL_CHAIN (t);
a1ab4c31
AC
4066 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4067 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4068 or FL_COEFF or FL_BOUNDS not set. */
4069 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
4070 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4071 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4072 dimct,
4073 convert (TREE_TYPE (dimct),
4074 size_one_node)),
1139f2e8 4075 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4076 build2 (BIT_AND_EXPR,
4077 TREE_TYPE (aflags),
4078 aflags, u),
4079 u));
a1ab4c31
AC
4080 /* There is already a template in the descriptor and it is
4081 located at the start of block 3 (12th field). */
910ad8de 4082 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
c6bd4220
EB
4083 template_tree
4084 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
f76d6e6f 4085 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
d628c015
DR
4086 build_call_raise (CE_Length_Check_Failed, Empty,
4087 N_Raise_Constraint_Error),
6bf68a93 4088 template_tree);
c6bd4220
EB
4089 template_addr
4090 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
a1ab4c31
AC
4091 break;
4092
4093 case 10: /* Class NCA */
4094 default:
4095 post_error ("unsupported descriptor type for &", gnat_subprog);
4096 template_addr = integer_zero_node;
4097 break;
4098 }
4099
4100 /* Build the fat pointer in the form of a constructor. */
9771b263 4101 vec_alloc (v, 2);
0e228dd9 4102 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
910ad8de 4103 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9 4104 template_addr);
d628c015 4105
0e228dd9 4106 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
4107 }
4108
4109 else
4110 gcc_unreachable ();
4111}
4112
a981c964
EB
4113/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4114 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
491f54a7
EB
4115 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4116 descriptor is passed. */
d628c015 4117
f3d34576 4118tree
a981c964 4119convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
491f54a7 4120 Entity_Id gnat_subprog)
d628c015
DR
4121{
4122 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4123 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4124 tree mbo = TYPE_FIELDS (desc_type);
4125 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
910ad8de 4126 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
491f54a7 4127 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 4128
a981c964
EB
4129 /* If the field name is not MBO, it must be 32-bit and no alternate.
4130 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 4131 if (strcmp (mbostr, "MBO") != 0)
0c700259 4132 {
491f54a7 4133 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
0c700259
EB
4134 return ret;
4135 }
d628c015 4136
a981c964 4137 /* Build the test for 64-bit descriptor. */
d628c015
DR
4138 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4139 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964 4140 is64bit
1139f2e8
EB
4141 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4142 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4143 convert (integer_type_node, mbo),
4144 integer_one_node),
1139f2e8 4145 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4146 convert (integer_type_node, mbmo),
4147 integer_minus_one_node));
4148
4149 /* Build the 2 possible end results. */
491f54a7 4150 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4151 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
491f54a7 4152 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4153 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015 4154}
a1ab4c31 4155\f
928dfa4b
EB
4156/* Build a type to be used to represent an aliased object whose nominal type
4157 is an unconstrained array. This consists of a RECORD_TYPE containing a
4158 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4159 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4160 an arbitrary unconstrained object. Use NAME as the name of the record.
4161 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
4162
4163tree
928dfa4b
EB
4164build_unc_object_type (tree template_type, tree object_type, tree name,
4165 bool debug_info_p)
a1ab4c31
AC
4166{
4167 tree type = make_node (RECORD_TYPE);
da01bfee
EB
4168 tree template_field
4169 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4170 NULL_TREE, NULL_TREE, 0, 1);
4171 tree array_field
4172 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4173 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
4174
4175 TYPE_NAME (type) = name;
4176 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 4177 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
4178 finish_record_type (type, template_field, 0, true);
4179
4180 /* Declare it now since it will never be declared otherwise. This is
4181 necessary to ensure that its subtrees are properly marked. */
74746d49 4182 create_type_decl (name, type, true, debug_info_p, Empty);
a1ab4c31
AC
4183
4184 return type;
4185}
4186
4187/* Same, taking a thin or fat pointer type instead of a template type. */
4188
4189tree
4190build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 4191 tree name, bool debug_info_p)
a1ab4c31
AC
4192{
4193 tree template_type;
4194
315cff15 4195 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
4196
4197 template_type
315cff15 4198 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 4199 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 4200 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
4201
4202 return
4203 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 4204}
a1ab4c31 4205\f
229077b0
EB
4206/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4207 In the normal case this is just two adjustments, but we have more to
4208 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
4209
4210void
4211update_pointer_to (tree old_type, tree new_type)
4212{
4213 tree ptr = TYPE_POINTER_TO (old_type);
4214 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 4215 tree t;
a1ab4c31
AC
4216
4217 /* If this is the main variant, process all the other variants first. */
4218 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
4219 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4220 update_pointer_to (t, new_type);
a1ab4c31 4221
229077b0 4222 /* If no pointers and no references, we are done. */
a1ab4c31
AC
4223 if (!ptr && !ref)
4224 return;
4225
4226 /* Merge the old type qualifiers in the new type.
4227
4228 Each old variant has qualifiers for specific reasons, and the new
229077b0 4229 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
4230 information grabbed at some point, and merging the two simply unifies
4231 these inputs into the final type description.
4232
4233 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
4234 type designating it; after the designated type's freeze, we get here with
4235 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4236 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
4237 designated type, because that's what it really is.
4238
229077b0
EB
4239 We might also get here for a non-dummy OLD_TYPE variant with different
4240 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 4241 to private record type elaboration (see the comments around the call to
229077b0
EB
4242 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4243 the qualifiers in those cases too, to avoid accidentally discarding the
4244 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4245 new_type
4246 = build_qualified_type (new_type,
4247 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4248
4249 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
4250 if (old_type == new_type)
4251 return;
4252
4253 /* Otherwise, first handle the simple case. */
4254 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4255 {
aeecf17c
EB
4256 tree new_ptr, new_ref;
4257
4258 /* If pointer or reference already points to new type, nothing to do.
4259 This can happen as update_pointer_to can be invoked multiple times
4260 on the same couple of types because of the type variants. */
4261 if ((ptr && TREE_TYPE (ptr) == new_type)
4262 || (ref && TREE_TYPE (ref) == new_type))
4263 return;
4264
4265 /* Chain PTR and its variants at the end. */
4266 new_ptr = TYPE_POINTER_TO (new_type);
4267 if (new_ptr)
4268 {
4269 while (TYPE_NEXT_PTR_TO (new_ptr))
4270 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4271 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4272 }
4273 else
4274 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 4275
aeecf17c 4276 /* Now adjust them. */
a1ab4c31 4277 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 4278 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
4279 {
4280 TREE_TYPE (t) = new_type;
4281 if (TYPE_NULL_BOUNDS (t))
4282 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4283 }
de9528f0 4284
aeecf17c
EB
4285 /* Chain REF and its variants at the end. */
4286 new_ref = TYPE_REFERENCE_TO (new_type);
4287 if (new_ref)
4288 {
4289 while (TYPE_NEXT_REF_TO (new_ref))
4290 new_ref = TYPE_NEXT_REF_TO (new_ref);
4291 TYPE_NEXT_REF_TO (new_ref) = ref;
4292 }
4293 else
4294 TYPE_REFERENCE_TO (new_type) = ref;
4295
4296 /* Now adjust them. */
a1ab4c31 4297 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
4298 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4299 TREE_TYPE (t) = new_type;
de9528f0
EB
4300
4301 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 4302 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4303 }
4304
aeecf17c
EB
4305 /* Now deal with the unconstrained array case. In this case the pointer
4306 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
4307 Turn them into pointers to the correct types using update_pointer_to.
4308 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
4309 else
4310 {
e3edbd56 4311 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
4312
4313 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4314
e3edbd56 4315 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
4316 since update_pointer_to can be invoked multiple times on the same
4317 couple of types because of the type variants. */
4318 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4319 return;
4320
a1ab4c31 4321 update_pointer_to
e3edbd56
EB
4322 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4323 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 4324
a1ab4c31 4325 update_pointer_to
e3edbd56
EB
4326 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4327 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 4328
e3edbd56
EB
4329 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4330 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 4331
e3edbd56 4332 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4333 }
4334}
4335\f
8df2e902
EB
4336/* Convert EXPR, a pointer to a constrained array, into a pointer to an
4337 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
4338
4339static tree
4340convert_to_fat_pointer (tree type, tree expr)
4341{
910ad8de 4342 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 4343 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 4344 tree etype = TREE_TYPE (expr);
6bf68a93 4345 tree template_tree;
9771b263
DN
4346 vec<constructor_elt, va_gc> *v;
4347 vec_alloc (v, 2);
a1ab4c31 4348
50179d58
EB
4349 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4350 array (compare_fat_pointers ensures that this is the full discriminant)
4351 and a valid pointer to the bounds. This latter property is necessary
4352 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 4353 if (integer_zerop (expr))
0e228dd9 4354 {
50179d58
EB
4355 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4356 tree null_bounds, t;
4357
4358 if (TYPE_NULL_BOUNDS (ptr_template_type))
4359 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4360 else
4361 {
4362 /* The template type can still be dummy at this point so we build an
4363 empty constructor. The middle-end will fill it in with zeros. */
9771b263
DN
4364 t = build_constructor (template_type,
4365 NULL);
50179d58
EB
4366 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4367 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4368 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4369 }
4370
0e228dd9 4371 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
4372 fold_convert (p_array_type, null_pointer_node));
4373 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4374 t = build_constructor (type, v);
4375 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4376 TREE_CONSTANT (t) = 0;
4377 TREE_STATIC (t) = 1;
4378
4379 return t;
0e228dd9 4380 }
a1ab4c31 4381
0d7de0e1
EB
4382 /* If EXPR is a thin pointer, make template and data from the record. */
4383 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 4384 {
0d7de0e1 4385 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 4386
7d7a1fe8 4387 expr = gnat_protect_expr (expr);
a1ab4c31
AC
4388 if (TREE_CODE (expr) == ADDR_EXPR)
4389 expr = TREE_OPERAND (expr, 0);
4390 else
2b45154d
EB
4391 {
4392 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4393 the thin pointer value has been shifted so we first need to shift
4394 it back to get the template address. */
4395 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4396 expr
4397 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4398 fold_build1 (NEGATE_EXPR, sizetype,
4399 byte_position
4400 (DECL_CHAIN (field))));
4401 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
4402 }
a1ab4c31 4403
0d7de0e1 4404 template_tree = build_component_ref (expr, NULL_TREE, field, false);
a1ab4c31
AC
4405 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4406 build_component_ref (expr, NULL_TREE,
0d7de0e1 4407 DECL_CHAIN (field), false));
a1ab4c31 4408 }
8df2e902
EB
4409
4410 /* Otherwise, build the constructor for the template. */
a1ab4c31 4411 else
6bf68a93 4412 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
a1ab4c31 4413
8df2e902 4414 /* The final result is a constructor for the fat pointer.
a1ab4c31 4415
8df2e902
EB
4416 If EXPR is an argument of a foreign convention subprogram, the type it
4417 points to is directly the component type. In this case, the expression
a1ab4c31 4418 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4419 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4420 required, for instance because it ensures that possible later folding of
8df2e902 4421 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4422 same type as the initial reference.
4423
8df2e902
EB
4424 Note that the call to "build_template" above is still fine because it
4425 will only refer to the provided TEMPLATE_TYPE in this case. */
0e228dd9
NF
4426 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4427 convert (p_array_type, expr));
910ad8de 4428 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4429 build_unary_op (ADDR_EXPR, NULL_TREE,
4430 template_tree));
4431 return gnat_build_constructor (type, v);
a1ab4c31
AC
4432}
4433\f
a1ab4c31
AC
4434/* Create an expression whose value is that of EXPR,
4435 converted to type TYPE. The TREE_TYPE of the value
4436 is always TYPE. This function implements all reasonable
4437 conversions; callers should filter out those that are
4438 not permitted by the language being compiled. */
4439
4440tree
4441convert (tree type, tree expr)
4442{
a1ab4c31
AC
4443 tree etype = TREE_TYPE (expr);
4444 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4445 enum tree_code code = TREE_CODE (type);
a1ab4c31 4446
c34f3839
EB
4447 /* If the expression is already of the right type, we are done. */
4448 if (etype == type)
a1ab4c31
AC
4449 return expr;
4450
4451 /* If both input and output have padding and are of variable size, do this
4452 as an unchecked conversion. Likewise if one is a mere variant of the
4453 other, so we avoid a pointless unpad/repad sequence. */
4454 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4455 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4456 && (!TREE_CONSTANT (TYPE_SIZE (type))
4457 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4458 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4459 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4460 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4461 ;
4462
f88facfe
EB
4463 /* If the output type has padding, convert to the inner type and make a
4464 constructor to build the record, unless a variable size is involved. */
315cff15 4465 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4466 {
9771b263 4467 vec<constructor_elt, va_gc> *v;
0e228dd9 4468
a1ab4c31
AC
4469 /* If we previously converted from another type and our type is
4470 of variable size, remove the conversion to avoid the need for
f88facfe 4471 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4472 original and packable version. */
4473 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4474 && (!TREE_CONSTANT (TYPE_SIZE (type))
4475 || (ecode == RECORD_TYPE
4476 && TYPE_NAME (etype)
4477 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4478 expr = TREE_OPERAND (expr, 0);
4479
4480 /* If we are just removing the padding from expr, convert the original
4481 object if we have variable size in order to avoid the need for some
f88facfe 4482 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4483 of the other, so we avoid a pointless unpad/repad sequence. */
4484 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4485 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4486 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4487 || TYPE_MAIN_VARIANT (type)
4488 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4489 || (ecode == RECORD_TYPE
4490 && TYPE_NAME (etype)
4491 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4492 return convert (type, TREE_OPERAND (expr, 0));
4493
431cfac1
EB
4494 /* If the inner type is of self-referential size and the expression type
4495 is a record, do this as an unchecked conversion. But first pad the
4496 expression if possible to have the same size on both sides. */
c34f3839 4497 if (ecode == RECORD_TYPE
f88facfe 4498 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4499 {
980a0501 4500 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4501 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4502 false, false, false, true),
4503 expr);
431cfac1
EB
4504 return unchecked_convert (type, expr, false);
4505 }
a1ab4c31 4506
f88facfe
EB
4507 /* If we are converting between array types with variable size, do the
4508 final conversion as an unchecked conversion, again to avoid the need
4509 for some variable-sized temporaries. If valid, this conversion is
4510 very likely purely technical and without real effects. */
c34f3839 4511 if (ecode == ARRAY_TYPE
f88facfe
EB
4512 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4513 && !TREE_CONSTANT (TYPE_SIZE (etype))
4514 && !TREE_CONSTANT (TYPE_SIZE (type)))
4515 return unchecked_convert (type,
4516 convert (TREE_TYPE (TYPE_FIELDS (type)),
4517 expr),
4518 false);
4519
9771b263 4520 vec_alloc (v, 1);
0e228dd9
NF
4521 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4522 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4523 return gnat_build_constructor (type, v);
a1ab4c31
AC
4524 }
4525
4526 /* If the input type has padding, remove it and convert to the output type.
4527 The conditions ordering is arranged to ensure that the output type is not
4528 a padding type here, as it is not clear whether the conversion would
4529 always be correct if this was to happen. */
315cff15 4530 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4531 {
4532 tree unpadded;
4533
4534 /* If we have just converted to this padded type, just get the
4535 inner expression. */
4536 if (TREE_CODE (expr) == CONSTRUCTOR
9771b263
DN
4537 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4538 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4539 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
a1ab4c31
AC
4540
4541 /* Otherwise, build an explicit component reference. */
4542 else
4543 unpadded
4544 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4545
4546 return convert (type, unpadded);
4547 }
4548
4549 /* If the input is a biased type, adjust first. */
4550 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4551 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4552 fold_convert (TREE_TYPE (etype),
4553 expr),
4554 TYPE_MIN_VALUE (etype)));
4555
4556 /* If the input is a justified modular type, we need to extract the actual
4557 object before converting it to any other type with the exceptions of an
4558 unconstrained array or of a mere type variant. It is useful to avoid the
4559 extraction and conversion in the type variant case because it could end
4560 up replacing a VAR_DECL expr by a constructor and we might be about the
4561 take the address of the result. */
4562 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4563 && code != UNCONSTRAINED_ARRAY_TYPE
4564 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4565 return convert (type, build_component_ref (expr, NULL_TREE,
4566 TYPE_FIELDS (etype), false));
4567
4568 /* If converting to a type that contains a template, convert to the data
4569 type and then build the template. */
4570 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4571 {
910ad8de 4572 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4573 vec<constructor_elt, va_gc> *v;
4574 vec_alloc (v, 2);
a1ab4c31
AC
4575
4576 /* If the source already has a template, get a reference to the
4577 associated array only, as we are going to rebuild a template
4578 for the target type anyway. */
4579 expr = maybe_unconstrained_array (expr);
4580
0e228dd9
NF
4581 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4582 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4583 obj_type, NULL_TREE));
910ad8de 4584 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4585 convert (obj_type, expr));
4586 return gnat_build_constructor (type, v);
a1ab4c31
AC
4587 }
4588
a1c7d797 4589 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4590 switch (TREE_CODE (expr))
4591 {
4592 case ERROR_MARK:
4593 return expr;
4594
4595 case NULL_EXPR:
4596 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4597 conversion in gnat_expand_expr. NULL_EXPR does not represent
4598 and actual value, so no conversion is needed. */
4599 expr = copy_node (expr);
4600 TREE_TYPE (expr) = type;
4601 return expr;
4602
4603 case STRING_CST:
4604 /* If we are converting a STRING_CST to another constrained array type,
4605 just make a new one in the proper type. */
4606 if (code == ecode && AGGREGATE_TYPE_P (etype)
4607 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4608 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4609 {
4610 expr = copy_node (expr);
4611 TREE_TYPE (expr) = type;
4612 return expr;
4613 }
4614 break;
4615
7948ae37
OH
4616 case VECTOR_CST:
4617 /* If we are converting a VECTOR_CST to a mere variant type, just make
4618 a new one in the proper type. */
4619 if (code == ecode && gnat_types_compatible_p (type, etype))
4620 {
4621 expr = copy_node (expr);
4622 TREE_TYPE (expr) = type;
4623 return expr;
4624 }
4625
a1ab4c31
AC
4626 case CONSTRUCTOR:
4627 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4628 a new one in the proper type. */
4629 if (code == ecode && gnat_types_compatible_p (type, etype))
4630 {
4631 expr = copy_node (expr);
4632 TREE_TYPE (expr) = type;
9771b263 4633 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4634 return expr;
4635 }
4636
cb3d597d
EB
4637 /* Likewise for a conversion between original and packable version, or
4638 conversion between types of the same size and with the same list of
4639 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4640 if (code == ecode
4641 && code == RECORD_TYPE
cb3d597d
EB
4642 && (TYPE_NAME (type) == TYPE_NAME (etype)
4643 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4644
a1ab4c31 4645 {
9771b263
DN
4646 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4647 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4648 vec<constructor_elt, va_gc> *v;
4649 vec_alloc (v, len);
a1ab4c31
AC
4650 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4651 unsigned HOST_WIDE_INT idx;
4652 tree index, value;
4653
db868e1e
OH
4654 /* Whether we need to clear TREE_CONSTANT et al. on the output
4655 constructor when we convert in place. */
4656 bool clear_constant = false;
4657
a1ab4c31
AC
4658 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4659 {
cb3d597d
EB
4660 /* We expect only simple constructors. */
4661 if (!SAME_FIELD_P (index, efield))
4662 break;
4663 /* The field must be the same. */
4664 if (!SAME_FIELD_P (efield, field))
a1ab4c31 4665 break;
f32682ca 4666 constructor_elt elt = {field, convert (TREE_TYPE (field), value)};
9771b263 4667 v->quick_push (elt);
db868e1e
OH
4668
4669 /* If packing has made this field a bitfield and the input
4670 value couldn't be emitted statically any more, we need to
4671 clear TREE_CONSTANT on our output. */
ced57283
EB
4672 if (!clear_constant
4673 && TREE_CONSTANT (expr)
db868e1e
OH
4674 && !CONSTRUCTOR_BITFIELD_P (efield)
4675 && CONSTRUCTOR_BITFIELD_P (field)
4676 && !initializer_constant_valid_for_bitfield_p (value))
4677 clear_constant = true;
4678
910ad8de
NF
4679 efield = DECL_CHAIN (efield);
4680 field = DECL_CHAIN (field);
a1ab4c31
AC
4681 }
4682
db868e1e
OH
4683 /* If we have been able to match and convert all the input fields
4684 to their output type, convert in place now. We'll fallback to a
4685 view conversion downstream otherwise. */
a1ab4c31
AC
4686 if (idx == len)
4687 {
4688 expr = copy_node (expr);
4689 TREE_TYPE (expr) = type;
4690 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4691 if (clear_constant)
ced57283 4692 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4693 return expr;
4694 }
4695 }
7948ae37
OH
4696
4697 /* Likewise for a conversion between array type and vector type with a
4698 compatible representative array. */
4699 else if (code == VECTOR_TYPE
4700 && ecode == ARRAY_TYPE
4701 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4702 etype))
4703 {
9771b263
DN
4704 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4705 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4706 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4707 unsigned HOST_WIDE_INT ix;
4708 tree value;
4709
4710 /* Build a VECTOR_CST from a *constant* array constructor. */
4711 if (TREE_CONSTANT (expr))
4712 {
4713 bool constant_p = true;
4714
4715 /* Iterate through elements and check if all constructor
4716 elements are *_CSTs. */
4717 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4718 if (!CONSTANT_CLASS_P (value))
4719 {
4720 constant_p = false;
4721 break;
4722 }
4723
4724 if (constant_p)
4725 return build_vector_from_ctor (type,
4726 CONSTRUCTOR_ELTS (expr));
4727 }
4728
4729 /* Otherwise, build a regular vector constructor. */
9771b263 4730 vec_alloc (v, len);
7948ae37
OH
4731 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4732 {
f32682ca 4733 constructor_elt elt = {NULL_TREE, value};
9771b263 4734 v->quick_push (elt);
7948ae37
OH
4735 }
4736 expr = copy_node (expr);
4737 TREE_TYPE (expr) = type;
4738 CONSTRUCTOR_ELTS (expr) = v;
4739 return expr;
4740 }
a1ab4c31
AC
4741 break;
4742
4743 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4744 /* First retrieve the underlying array. */
4745 expr = maybe_unconstrained_array (expr);
4746 etype = TREE_TYPE (expr);
4747 ecode = TREE_CODE (etype);
4748 break;
a1ab4c31
AC
4749
4750 case VIEW_CONVERT_EXPR:
4751 {
4752 /* GCC 4.x is very sensitive to type consistency overall, and view
4753 conversions thus are very frequent. Even though just "convert"ing
4754 the inner operand to the output type is fine in most cases, it
4755 might expose unexpected input/output type mismatches in special
4756 circumstances so we avoid such recursive calls when we can. */
4757 tree op0 = TREE_OPERAND (expr, 0);
4758
4759 /* If we are converting back to the original type, we can just
4760 lift the input conversion. This is a common occurrence with
4761 switches back-and-forth amongst type variants. */
4762 if (type == TREE_TYPE (op0))
4763 return op0;
4764
7948ae37
OH
4765 /* Otherwise, if we're converting between two aggregate or vector
4766 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4767 target type in place or to just convert the inner expression. */
4768 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4769 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4770 {
4771 /* If we are converting between mere variants, we can just
4772 substitute the VIEW_CONVERT_EXPR in place. */
4773 if (gnat_types_compatible_p (type, etype))
4774 return build1 (VIEW_CONVERT_EXPR, type, op0);
4775
4776 /* Otherwise, we may just bypass the input view conversion unless
4777 one of the types is a fat pointer, which is handled by
4778 specialized code below which relies on exact type matching. */
315cff15
EB
4779 else if (!TYPE_IS_FAT_POINTER_P (type)
4780 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4781 return convert (type, op0);
4782 }
ad1d36ba
EB
4783
4784 break;
a1ab4c31 4785 }
a1ab4c31 4786
a1ab4c31
AC
4787 default:
4788 break;
4789 }
4790
4791 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4792 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4793 return convert_to_fat_pointer (type, expr);
4794
7948ae37
OH
4795 /* If we are converting between two aggregate or vector types that are mere
4796 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4797 to a vector type from its representative array type. */
4798 else if ((code == ecode
4799 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4800 && gnat_types_compatible_p (type, etype))
4801 || (code == VECTOR_TYPE
4802 && ecode == ARRAY_TYPE
4803 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4804 etype)))
a1ab4c31
AC
4805 return build1 (VIEW_CONVERT_EXPR, type, expr);
4806
76af763d
EB
4807 /* If we are converting between tagged types, try to upcast properly. */
4808 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4809 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4810 {
4811 tree child_etype = etype;
4812 do {
4813 tree field = TYPE_FIELDS (child_etype);
4814 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4815 return build_component_ref (expr, NULL_TREE, field, false);
4816 child_etype = TREE_TYPE (field);
4817 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4818 }
4819
bb1f7929
EB
4820 /* If we are converting from a smaller form of record type back to it, just
4821 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4822 size on both sides. */
4823 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4824 && smaller_form_type_p (etype, type))
4825 {
4826 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4827 false, false, false, true),
4828 expr);
4829 return build1 (VIEW_CONVERT_EXPR, type, expr);
4830 }
4831
a1ab4c31 4832 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4833 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4834 return fold_convert (type, expr);
4835
4836 switch (code)
4837 {
4838 case VOID_TYPE:
4839 return fold_build1 (CONVERT_EXPR, type, expr);
4840
a1ab4c31
AC
4841 case INTEGER_TYPE:
4842 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4843 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4844 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4845 return unchecked_convert (type, expr, false);
4846 else if (TYPE_BIASED_REPRESENTATION_P (type))
4847 return fold_convert (type,
4848 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4849 convert (TREE_TYPE (type), expr),
4850 TYPE_MIN_VALUE (type)));
4851
4852 /* ... fall through ... */
4853
4854 case ENUMERAL_TYPE:
01ddebf2 4855 case BOOLEAN_TYPE:
a1ab4c31
AC
4856 /* If we are converting an additive expression to an integer type
4857 with lower precision, be wary of the optimization that can be
4858 applied by convert_to_integer. There are 2 problematic cases:
4859 - if the first operand was originally of a biased type,
4860 because we could be recursively called to convert it
4861 to an intermediate type and thus rematerialize the
4862 additive operator endlessly,
4863 - if the expression contains a placeholder, because an
4864 intermediate conversion that changes the sign could
4865 be inserted and thus introduce an artificial overflow
4866 at compile time when the placeholder is substituted. */
4867 if (code == INTEGER_TYPE
4868 && ecode == INTEGER_TYPE
4869 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4870 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4871 {
4872 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4873
4874 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4875 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4876 || CONTAINS_PLACEHOLDER_P (expr))
4877 return build1 (NOP_EXPR, type, expr);
4878 }
4879
4880 return fold (convert_to_integer (type, expr));
4881
4882 case POINTER_TYPE:
4883 case REFERENCE_TYPE:
0d7de0e1 4884 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4885 for differing offsets from the base pointer, depending on whether
4886 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4887 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4888 {
2b45154d
EB
4889 tree etype_pos
4890 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4891 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4892 : size_zero_node;
4893 tree type_pos
4894 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4895 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4896 : size_zero_node;
4897 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4898
a1ab4c31 4899 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4900 if (integer_zerop (byte_diff))
4901 return expr;
4902
4903 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4904 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4905 }
4906
0d7de0e1
EB
4907 /* If converting fat pointer to normal or thin pointer, get the pointer
4908 to the array and then convert it. */
4909 if (TYPE_IS_FAT_POINTER_P (etype))
86060344
EB
4910 expr
4911 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4912
4913 return fold (convert_to_pointer (type, expr));
4914
4915 case REAL_TYPE:
4916 return fold (convert_to_real (type, expr));
4917
4918 case RECORD_TYPE:
4919 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4920 {
9771b263
DN
4921 vec<constructor_elt, va_gc> *v;
4922 vec_alloc (v, 1);
0e228dd9
NF
4923
4924 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4925 convert (TREE_TYPE (TYPE_FIELDS (type)),
4926 expr));
4927 return gnat_build_constructor (type, v);
4928 }
a1ab4c31
AC
4929
4930 /* ... fall through ... */
4931
4932 case ARRAY_TYPE:
4933 /* In these cases, assume the front-end has validated the conversion.
4934 If the conversion is valid, it will be a bit-wise conversion, so
4935 it can be viewed as an unchecked conversion. */
4936 return unchecked_convert (type, expr, false);
4937
4938 case UNION_TYPE:
4939 /* This is a either a conversion between a tagged type and some
4940 subtype, which we have to mark as a UNION_TYPE because of
4941 overlapping fields or a conversion of an Unchecked_Union. */
4942 return unchecked_convert (type, expr, false);
4943
4944 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4945 /* If the input is a VECTOR_TYPE, convert to the representative
4946 array type first. */
4947 if (ecode == VECTOR_TYPE)
4948 {
4949 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4950 etype = TREE_TYPE (expr);
4951 ecode = TREE_CODE (etype);
4952 }
4953
a1ab4c31
AC
4954 /* If EXPR is a constrained array, take its address, convert it to a
4955 fat pointer, and then dereference it. Likewise if EXPR is a
4956 record containing both a template and a constrained array.
4957 Note that a record representing a justified modular type
4958 always represents a packed constrained array. */
4959 if (ecode == ARRAY_TYPE
4960 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4961 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4962 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4963 return
4964 build_unary_op
4965 (INDIRECT_REF, NULL_TREE,
4966 convert_to_fat_pointer (TREE_TYPE (type),
4967 build_unary_op (ADDR_EXPR,
4968 NULL_TREE, expr)));
4969
4970 /* Do something very similar for converting one unconstrained
4971 array to another. */
4972 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4973 return
4974 build_unary_op (INDIRECT_REF, NULL_TREE,
4975 convert (TREE_TYPE (type),
4976 build_unary_op (ADDR_EXPR,
4977 NULL_TREE, expr)));
4978 else
4979 gcc_unreachable ();
4980
4981 case COMPLEX_TYPE:
4982 return fold (convert_to_complex (type, expr));
4983
4984 default:
4985 gcc_unreachable ();
4986 }
4987}
15bf7d19
EB
4988
4989/* Create an expression whose value is that of EXPR converted to the common
4990 index type, which is sizetype. EXPR is supposed to be in the base type
4991 of the GNAT index type. Calling it is equivalent to doing
4992
4993 convert (sizetype, expr)
4994
4995 but we try to distribute the type conversion with the knowledge that EXPR
4996 cannot overflow in its type. This is a best-effort approach and we fall
4997 back to the above expression as soon as difficulties are encountered.
4998
4999 This is necessary to overcome issues that arise when the GNAT base index
5000 type and the GCC common index type (sizetype) don't have the same size,
5001 which is quite frequent on 64-bit architectures. In this case, and if
5002 the GNAT base index type is signed but the iteration type of the loop has
5003 been forced to unsigned, the loop scalar evolution engine cannot compute
5004 a simple evolution for the general induction variables associated with the
5005 array indices, because it will preserve the wrap-around semantics in the
5006 unsigned type of their "inner" part. As a result, many loop optimizations
5007 are blocked.
5008
5009 The solution is to use a special (basic) induction variable that is at
5010 least as large as sizetype, and to express the aforementioned general
5011 induction variables in terms of this induction variable, eliminating
5012 the problematic intermediate truncation to the GNAT base index type.
5013 This is possible as long as the original expression doesn't overflow
5014 and if the middle-end hasn't introduced artificial overflows in the
5015 course of the various simplification it can make to the expression. */
5016
5017tree
5018convert_to_index_type (tree expr)
5019{
5020 enum tree_code code = TREE_CODE (expr);
5021 tree type = TREE_TYPE (expr);
5022
5023 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5024 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5025 if (TYPE_UNSIGNED (type) || !optimize)
5026 return convert (sizetype, expr);
5027
5028 switch (code)
5029 {
5030 case VAR_DECL:
5031 /* The main effect of the function: replace a loop parameter with its
5032 associated special induction variable. */
5033 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5034 expr = DECL_INDUCTION_VAR (expr);
5035 break;
5036
5037 CASE_CONVERT:
5038 {
5039 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5040 /* Bail out as soon as we suspect some sort of type frobbing. */
5041 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5042 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5043 break;
5044 }
5045
5046 /* ... fall through ... */
5047
5048 case NON_LVALUE_EXPR:
5049 return fold_build1 (code, sizetype,
5050 convert_to_index_type (TREE_OPERAND (expr, 0)));
5051
5052 case PLUS_EXPR:
5053 case MINUS_EXPR:
5054 case MULT_EXPR:
5055 return fold_build2 (code, sizetype,
5056 convert_to_index_type (TREE_OPERAND (expr, 0)),
5057 convert_to_index_type (TREE_OPERAND (expr, 1)));
5058
5059 case COMPOUND_EXPR:
5060 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5061 convert_to_index_type (TREE_OPERAND (expr, 1)));
5062
5063 case COND_EXPR:
5064 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5065 convert_to_index_type (TREE_OPERAND (expr, 1)),
5066 convert_to_index_type (TREE_OPERAND (expr, 2)));
5067
5068 default:
5069 break;
5070 }
5071
5072 return convert (sizetype, expr);
5073}
a1ab4c31
AC
5074\f
5075/* Remove all conversions that are done in EXP. This includes converting
5076 from a padded type or to a justified modular type. If TRUE_ADDRESS
5077 is true, always return the address of the containing object even if
5078 the address is not bit-aligned. */
5079
5080tree
5081remove_conversions (tree exp, bool true_address)
5082{
5083 switch (TREE_CODE (exp))
5084 {
5085 case CONSTRUCTOR:
5086 if (true_address
5087 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5088 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5089 return
9771b263 5090 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
a1ab4c31
AC
5091 break;
5092
5093 case COMPONENT_REF:
315cff15 5094 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
5095 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5096 break;
5097
a1ab4c31 5098 CASE_CONVERT:
722356ce
EB
5099 case VIEW_CONVERT_EXPR:
5100 case NON_LVALUE_EXPR:
a1ab4c31
AC
5101 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5102
5103 default:
5104 break;
5105 }
5106
5107 return exp;
5108}
5109\f
5110/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 5111 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
5112 likewise return an expression pointing to the underlying array. */
5113
5114tree
5115maybe_unconstrained_array (tree exp)
5116{
5117 enum tree_code code = TREE_CODE (exp);
1aa291f7 5118 tree type = TREE_TYPE (exp);
a1ab4c31 5119
1aa291f7 5120 switch (TREE_CODE (type))
a1ab4c31
AC
5121 {
5122 case UNCONSTRAINED_ARRAY_TYPE:
5123 if (code == UNCONSTRAINED_ARRAY_REF)
5124 {
7e169899 5125 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
5126 const bool no_trap = TREE_THIS_NOTRAP (exp);
5127
7e169899 5128 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
5129 type = TREE_TYPE (exp);
5130
7e169899
EB
5131 if (TREE_CODE (exp) == COND_EXPR)
5132 {
5133 tree op1
5134 = build_unary_op (INDIRECT_REF, NULL_TREE,
5135 build_component_ref (TREE_OPERAND (exp, 1),
5136 NULL_TREE,
1aa291f7 5137 TYPE_FIELDS (type),
7e169899
EB
5138 false));
5139 tree op2
5140 = build_unary_op (INDIRECT_REF, NULL_TREE,
5141 build_component_ref (TREE_OPERAND (exp, 2),
5142 NULL_TREE,
1aa291f7 5143 TYPE_FIELDS (type),
7e169899
EB
5144 false));
5145
5146 exp = build3 (COND_EXPR,
1aa291f7 5147 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
5148 TREE_OPERAND (exp, 0), op1, op2);
5149 }
5150 else
a1c7d797
EB
5151 {
5152 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5153 build_component_ref (exp, NULL_TREE,
1aa291f7 5154 TYPE_FIELDS (type),
a1c7d797
EB
5155 false));
5156 TREE_READONLY (exp) = read_only;
5157 TREE_THIS_NOTRAP (exp) = no_trap;
5158 }
a1ab4c31
AC
5159 }
5160
5161 else if (code == NULL_EXPR)
1aa291f7
EB
5162 exp = build1 (NULL_EXPR,
5163 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5164 TREE_OPERAND (exp, 0));
5165 break;
a1ab4c31
AC
5166
5167 case RECORD_TYPE:
1aa291f7
EB
5168 /* If this is a padded type and it contains a template, convert to the
5169 unpadded type first. */
5170 if (TYPE_PADDING_P (type)
5171 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5172 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 5173 {
1aa291f7
EB
5174 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5175 type = TREE_TYPE (exp);
5176 }
5177
5178 if (TYPE_CONTAINS_TEMPLATE_P (type))
5179 {
5180 exp = build_component_ref (exp, NULL_TREE,
5181 DECL_CHAIN (TYPE_FIELDS (type)),
5182 false);
5183 type = TREE_TYPE (exp);
5184
5185 /* If the array type is padded, convert to the unpadded type. */
5186 if (TYPE_IS_PADDING_P (type))
5187 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 5188 }
a1ab4c31
AC
5189 break;
5190
5191 default:
5192 break;
5193 }
5194
5195 return exp;
5196}
5197\f
afcea859 5198/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 5199 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
5200
5201static bool
5202can_fold_for_view_convert_p (tree expr)
5203{
5204 tree t1, t2;
5205
5206 /* The folder will fold NOP_EXPRs between integral types with the same
5207 precision (in the middle-end's sense). We cannot allow it if the
5208 types don't have the same precision in the Ada sense as well. */
5209 if (TREE_CODE (expr) != NOP_EXPR)
5210 return true;
5211
5212 t1 = TREE_TYPE (expr);
5213 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5214
5215 /* Defer to the folder for non-integral conversions. */
5216 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5217 return true;
5218
5219 /* Only fold conversions that preserve both precisions. */
5220 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5221 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5222 return true;
5223
5224 return false;
5225}
5226
a1ab4c31 5227/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
5228 If NOTRUNC_P is true, truncation operations should be suppressed.
5229
5230 Special care is required with (source or target) integral types whose
5231 precision is not equal to their size, to make sure we fetch or assign
5232 the value bits whose location might depend on the endianness, e.g.
5233
5234 Rmsize : constant := 8;
5235 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5236
5237 type Bit_Array is array (1 .. Rmsize) of Boolean;
5238 pragma Pack (Bit_Array);
5239
5240 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5241
5242 Value : Int := 2#1000_0001#;
5243 Vbits : Bit_Array := To_Bit_Array (Value);
5244
5245 we expect the 8 bits at Vbits'Address to always contain Value, while
5246 their original location depends on the endianness, at Value'Address
84fb43a1 5247 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
5248
5249tree
5250unchecked_convert (tree type, tree expr, bool notrunc_p)
5251{
5252 tree etype = TREE_TYPE (expr);
c34f3839
EB
5253 enum tree_code ecode = TREE_CODE (etype);
5254 enum tree_code code = TREE_CODE (type);
980a0501 5255 int c;
a1ab4c31 5256
c34f3839 5257 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
5258 if (etype == type)
5259 return expr;
5260
5261 /* If both types types are integral just do a normal conversion.
5262 Likewise for a conversion to an unconstrained array. */
5263 if ((((INTEGRAL_TYPE_P (type)
c34f3839 5264 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
0d7de0e1 5265 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 5266 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
a1ab4c31 5267 && ((INTEGRAL_TYPE_P (etype)
c34f3839 5268 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
315cff15 5269 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
5270 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5271 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 5272 {
c34f3839 5273 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
5274 {
5275 tree ntype = copy_type (etype);
a1ab4c31
AC
5276 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5277 TYPE_MAIN_VARIANT (ntype) = ntype;
5278 expr = build1 (NOP_EXPR, ntype, expr);
5279 }
5280
c34f3839 5281 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 5282 {
afcea859 5283 tree rtype = copy_type (type);
a1ab4c31
AC
5284 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5285 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
5286 expr = convert (rtype, expr);
5287 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 5288 }
afcea859
EB
5289 else
5290 expr = convert (type, expr);
a1ab4c31
AC
5291 }
5292
afcea859 5293 /* If we are converting to an integral type whose precision is not equal
416de7d5
EB
5294 to its size, first unchecked convert to a record type that contains an
5295 field of the given precision. Then extract the field. */
980a0501
EB
5296 else if (INTEGRAL_TYPE_P (type)
5297 && TYPE_RM_SIZE (type)
a1ab4c31
AC
5298 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5299 GET_MODE_BITSIZE (TYPE_MODE (type))))
5300 {
5301 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
5302 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5303 tree field_type, field;
5304
5305 if (TYPE_UNSIGNED (type))
5306 field_type = make_unsigned_type (prec);
5307 else
5308 field_type = make_signed_type (prec);
5309 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5310
5311 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5312 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5313
5314 TYPE_FIELDS (rec_type) = field;
5315 layout_type (rec_type);
5316
5317 expr = unchecked_convert (rec_type, expr, notrunc_p);
3cd64bab 5318 expr = build_component_ref (expr, NULL_TREE, field, false);
416de7d5 5319 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
5320 }
5321
416de7d5
EB
5322 /* Similarly if we are converting from an integral type whose precision is
5323 not equal to its size, first copy into a field of the given precision
5324 and unchecked convert the record type. */
980a0501
EB
5325 else if (INTEGRAL_TYPE_P (etype)
5326 && TYPE_RM_SIZE (etype)
5327 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5328 GET_MODE_BITSIZE (TYPE_MODE (etype))))
a1ab4c31
AC
5329 {
5330 tree rec_type = make_node (RECORD_TYPE);
416de7d5 5331 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
5332 vec<constructor_elt, va_gc> *v;
5333 vec_alloc (v, 1);
416de7d5
EB
5334 tree field_type, field;
5335
5336 if (TYPE_UNSIGNED (etype))
5337 field_type = make_unsigned_type (prec);
5338 else
5339 field_type = make_signed_type (prec);
5340 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5341
5342 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5343 NULL_TREE, NULL_TREE, 1, 0);
a1ab4c31
AC
5344
5345 TYPE_FIELDS (rec_type) = field;
5346 layout_type (rec_type);
5347
416de7d5 5348 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
5349 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5350 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
5351 expr = unchecked_convert (type, expr, notrunc_p);
5352 }
5353
980a0501
EB
5354 /* If we are converting from a scalar type to a type with a different size,
5355 we need to pad to have the same size on both sides.
5356
5357 ??? We cannot do it unconditionally because unchecked conversions are
5358 used liberally by the front-end to implement polymorphism, e.g. in:
5359
5360 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5361 return p___size__4 (p__object!(S191s.all));
5362
5363 so we skip all expressions that are references. */
5364 else if (!REFERENCE_CLASS_P (expr)
5365 && !AGGREGATE_TYPE_P (etype)
5366 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5367 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5368 {
5369 if (c < 0)
5370 {
5371 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5372 false, false, false, true),
5373 expr);
5374 expr = unchecked_convert (type, expr, notrunc_p);
5375 }
5376 else
5377 {
5378 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5379 false, false, false, true);
5380 expr = unchecked_convert (rec_type, expr, notrunc_p);
5381 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5382 false);
5383 }
5384 }
5385
7948ae37
OH
5386 /* We have a special case when we are converting between two unconstrained
5387 array types. In that case, take the address, convert the fat pointer
5388 types, and dereference. */
c34f3839 5389 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5390 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5391 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5392 build_unary_op (ADDR_EXPR, NULL_TREE,
5393 expr)));
7948ae37
OH
5394
5395 /* Another special case is when we are converting to a vector type from its
5396 representative array type; this a regular conversion. */
c34f3839
EB
5397 else if (code == VECTOR_TYPE
5398 && ecode == ARRAY_TYPE
7948ae37
OH
5399 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5400 etype))
5401 expr = convert (type, expr);
5402
a1ab4c31
AC
5403 else
5404 {
5405 expr = maybe_unconstrained_array (expr);
5406 etype = TREE_TYPE (expr);
c34f3839 5407 ecode = TREE_CODE (etype);
afcea859
EB
5408 if (can_fold_for_view_convert_p (expr))
5409 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5410 else
5411 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5412 }
5413
afcea859
EB
5414 /* If the result is an integral type whose precision is not equal to its
5415 size, sign- or zero-extend the result. We need not do this if the input
5416 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
5417 is a biased type or if both the input and output are unsigned. */
5418 if (!notrunc_p
5419 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 5420 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
5421 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5422 GET_MODE_BITSIZE (TYPE_MODE (type)))
5423 && !(INTEGRAL_TYPE_P (etype)
5424 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5425 && operand_equal_p (TYPE_RM_SIZE (type),
5426 (TYPE_RM_SIZE (etype) != 0
5427 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5428 0))
5429 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5430 {
c34f3839
EB
5431 tree base_type
5432 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
5433 tree shift_expr
5434 = convert (base_type,
5435 size_binop (MINUS_EXPR,
5436 bitsize_int
5437 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5438 TYPE_RM_SIZE (type)));
5439 expr
5440 = convert (type,
5441 build_binary_op (RSHIFT_EXPR, base_type,
5442 build_binary_op (LSHIFT_EXPR, base_type,
5443 convert (base_type, expr),
5444 shift_expr),
5445 shift_expr));
5446 }
5447
5448 /* An unchecked conversion should never raise Constraint_Error. The code
5449 below assumes that GCC's conversion routines overflow the same way that
5450 the underlying hardware does. This is probably true. In the rare case
5451 when it is false, we can rely on the fact that such conversions are
5452 erroneous anyway. */
5453 if (TREE_CODE (expr) == INTEGER_CST)
5454 TREE_OVERFLOW (expr) = 0;
5455
5456 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5457 show no longer constant. */
5458 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5459 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5460 OEP_ONLY_CONST))
5461 TREE_CONSTANT (expr) = 0;
5462
5463 return expr;
5464}
5465\f
feec4372 5466/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5467 the latter being a record type as predicated by Is_Record_Type. */
5468
5469enum tree_code
5470tree_code_for_record_type (Entity_Id gnat_type)
5471{
b1a785fb 5472 Node_Id component_list, component;
a1ab4c31 5473
b1a785fb
EB
5474 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5475 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5476 if (!Is_Unchecked_Union (gnat_type))
5477 return RECORD_TYPE;
5478
b1a785fb
EB
5479 gnat_type = Implementation_Base_Type (gnat_type);
5480 component_list
5481 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5482
a1ab4c31
AC
5483 for (component = First_Non_Pragma (Component_Items (component_list));
5484 Present (component);
5485 component = Next_Non_Pragma (component))
5486 if (Ekind (Defining_Entity (component)) == E_Component)
5487 return RECORD_TYPE;
5488
5489 return UNION_TYPE;
5490}
5491
caa9d12a
EB
5492/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5493 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5494 according to the presence of an alignment clause on the type or, if it
5495 is an array, on the component type. */
5496
5497bool
5498is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5499{
5500 gnat_type = Underlying_Type (gnat_type);
5501
5502 *align_clause = Present (Alignment_Clause (gnat_type));
5503
5504 if (Is_Array_Type (gnat_type))
5505 {
5506 gnat_type = Underlying_Type (Component_Type (gnat_type));
5507 if (Present (Alignment_Clause (gnat_type)))
5508 *align_clause = true;
5509 }
5510
5511 if (!Is_Floating_Point_Type (gnat_type))
5512 return false;
5513
5514 if (UI_To_Int (Esize (gnat_type)) != 64)
5515 return false;
5516
5517 return true;
5518}
5519
5520/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5521 size is greater or equal to 64 bits, or an array of such a type. Set
5522 ALIGN_CLAUSE according to the presence of an alignment clause on the
5523 type or, if it is an array, on the component type. */
5524
5525bool
5526is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5527{
5528 gnat_type = Underlying_Type (gnat_type);
5529
5530 *align_clause = Present (Alignment_Clause (gnat_type));
5531
5532 if (Is_Array_Type (gnat_type))
5533 {
5534 gnat_type = Underlying_Type (Component_Type (gnat_type));
5535 if (Present (Alignment_Clause (gnat_type)))
5536 *align_clause = true;
5537 }
5538
5539 if (!Is_Scalar_Type (gnat_type))
5540 return false;
5541
5542 if (UI_To_Int (Esize (gnat_type)) < 64)
5543 return false;
5544
5545 return true;
5546}
5547
a1ab4c31
AC
5548/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5549 component of an aggregate type. */
5550
5551bool
5552type_for_nonaliased_component_p (tree gnu_type)
5553{
5554 /* If the type is passed by reference, we may have pointers to the
5555 component so it cannot be made non-aliased. */
5556 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5557 return false;
5558
5559 /* We used to say that any component of aggregate type is aliased
5560 because the front-end may take 'Reference of it. The front-end
5561 has been enhanced in the meantime so as to use a renaming instead
5562 in most cases, but the back-end can probably take the address of
5563 such a component too so we go for the conservative stance.
5564
5565 For instance, we might need the address of any array type, even
5566 if normally passed by copy, to construct a fat pointer if the
5567 component is used as an actual for an unconstrained formal.
5568
5569 Likewise for record types: even if a specific record subtype is
5570 passed by copy, the parent type might be passed by ref (e.g. if
5571 it's of variable size) and we might take the address of a child
5572 component to pass to a parent formal. We have no way to check
5573 for such conditions here. */
5574 if (AGGREGATE_TYPE_P (gnu_type))
5575 return false;
5576
5577 return true;
5578}
5579
bb1f7929
EB
5580/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5581
5582bool
5583smaller_form_type_p (tree type, tree orig_type)
5584{
5585 tree size, osize;
5586
5587 /* We're not interested in variants here. */
5588 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5589 return false;
5590
5591 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5592 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5593 return false;
5594
5595 size = TYPE_SIZE (type);
5596 osize = TYPE_SIZE (orig_type);
5597
5598 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5599 return false;
5600
5601 return tree_int_cst_lt (size, osize) != 0;
5602}
5603
a1ab4c31
AC
5604/* Perform final processing on global variables. */
5605
65444786
EB
5606static GTY (()) tree dummy_global;
5607
a1ab4c31
AC
5608void
5609gnat_write_global_declarations (void)
5610{
10e4d056
EB
5611 unsigned int i;
5612 tree iter;
5613
65444786
EB
5614 /* If we have declared types as used at the global level, insert them in
5615 the global hash table. We use a dummy variable for this purpose. */
9771b263 5616 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
65444786 5617 {
35e8bcf4 5618 struct varpool_node *node;
d3c268ab
EB
5619 char *label;
5620
5621 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5622 dummy_global
d3c268ab
EB
5623 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5624 void_type_node);
65444786
EB
5625 TREE_STATIC (dummy_global) = 1;
5626 TREE_ASM_WRITTEN (dummy_global) = 1;
5d59b5e1 5627 node = varpool_node_for_decl (dummy_global);
ead84f73 5628 node->symbol.force_output = 1;
65444786 5629
9771b263 5630 while (!types_used_by_cur_var_decl->is_empty ())
65444786 5631 {
9771b263 5632 tree t = types_used_by_cur_var_decl->pop ();
65444786
EB
5633 types_used_by_var_decl_insert (t, dummy_global);
5634 }
5635 }
5636
10e4d056
EB
5637 /* Output debug information for all global type declarations first. This
5638 ensures that global types whose compilation hasn't been finalized yet,
5639 for example pointers to Taft amendment types, have their compilation
5640 finalized in the right context. */
9771b263 5641 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5642 if (TREE_CODE (iter) == TYPE_DECL)
5643 debug_hooks->global_decl (iter);
5644
65d630d4
JH
5645 /* Proceed to optimize and emit assembly. */
5646 finalize_compilation_unit ();
a1ab4c31 5647
10e4d056
EB
5648 /* After cgraph has had a chance to emit everything that's going to
5649 be emitted, output debug information for the rest of globals. */
5650 if (!seen_error ())
5651 {
5652 timevar_push (TV_SYMOUT);
9771b263 5653 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5654 if (TREE_CODE (iter) != TYPE_DECL)
5655 debug_hooks->global_decl (iter);
5656 timevar_pop (TV_SYMOUT);
5657 }
a1ab4c31
AC
5658}
5659
5660/* ************************************************************************
5661 * * GCC builtins support *
5662 * ************************************************************************ */
5663
5664/* The general scheme is fairly simple:
5665
5666 For each builtin function/type to be declared, gnat_install_builtins calls
5667 internal facilities which eventually get to gnat_push_decl, which in turn
5668 tracks the so declared builtin function decls in the 'builtin_decls' global
5669 datastructure. When an Intrinsic subprogram declaration is processed, we
5670 search this global datastructure to retrieve the associated BUILT_IN DECL
5671 node. */
5672
5673/* Search the chain of currently available builtin declarations for a node
5674 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5675 found, if any, or NULL_TREE otherwise. */
5676tree
5677builtin_decl_for (tree name)
5678{
5679 unsigned i;
5680 tree decl;
5681
9771b263 5682 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5683 if (DECL_NAME (decl) == name)
5684 return decl;
5685
5686 return NULL_TREE;
5687}
5688
5689/* The code below eventually exposes gnat_install_builtins, which declares
5690 the builtin types and functions we might need, either internally or as
5691 user accessible facilities.
5692
5693 ??? This is a first implementation shot, still in rough shape. It is
5694 heavily inspired from the "C" family implementation, with chunks copied
5695 verbatim from there.
5696
5697 Two obvious TODO candidates are
5698 o Use a more efficient name/decl mapping scheme
5699 o Devise a middle-end infrastructure to avoid having to copy
5700 pieces between front-ends. */
5701
5702/* ----------------------------------------------------------------------- *
5703 * BUILTIN ELEMENTARY TYPES *
5704 * ----------------------------------------------------------------------- */
5705
5706/* Standard data types to be used in builtin argument declarations. */
5707
5708enum c_tree_index
5709{
5710 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5711 CTI_STRING_TYPE,
5712 CTI_CONST_STRING_TYPE,
5713
5714 CTI_MAX
5715};
5716
5717static tree c_global_trees[CTI_MAX];
5718
5719#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5720#define string_type_node c_global_trees[CTI_STRING_TYPE]
5721#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5722
5723/* ??? In addition some attribute handlers, we currently don't support a
5724 (small) number of builtin-types, which in turns inhibits support for a
5725 number of builtin functions. */
5726#define wint_type_node void_type_node
5727#define intmax_type_node void_type_node
5728#define uintmax_type_node void_type_node
5729
5730/* Build the void_list_node (void_type_node having been created). */
5731
5732static tree
5733build_void_list_node (void)
5734{
5735 tree t = build_tree_list (NULL_TREE, void_type_node);
5736 return t;
5737}
5738
5739/* Used to help initialize the builtin-types.def table. When a type of
5740 the correct size doesn't exist, use error_mark_node instead of NULL.
5741 The later results in segfaults even when a decl using the type doesn't
5742 get invoked. */
5743
5744static tree
5745builtin_type_for_size (int size, bool unsignedp)
5746{
ced57283 5747 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5748 return type ? type : error_mark_node;
5749}
5750
5751/* Build/push the elementary type decls that builtin functions/types
5752 will need. */
5753
5754static void
5755install_builtin_elementary_types (void)
5756{
728936bb 5757 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5758 pid_type_node = integer_type_node;
5759 void_list_node = build_void_list_node ();
5760
5761 string_type_node = build_pointer_type (char_type_node);
5762 const_string_type_node
5763 = build_pointer_type (build_qualified_type
5764 (char_type_node, TYPE_QUAL_CONST));
5765}
5766
5767/* ----------------------------------------------------------------------- *
5768 * BUILTIN FUNCTION TYPES *
5769 * ----------------------------------------------------------------------- */
5770
5771/* Now, builtin function types per se. */
5772
5773enum c_builtin_type
5774{
5775#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5776#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5777#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5778#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5779#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5780#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5781#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5782#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5783#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
acf0174b 5784#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
a1ab4c31
AC
5785#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5786#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5787#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5788#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5789#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5790#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5791 NAME,
5792#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5793#include "builtin-types.def"
5794#undef DEF_PRIMITIVE_TYPE
5795#undef DEF_FUNCTION_TYPE_0
5796#undef DEF_FUNCTION_TYPE_1
5797#undef DEF_FUNCTION_TYPE_2
5798#undef DEF_FUNCTION_TYPE_3
5799#undef DEF_FUNCTION_TYPE_4
5800#undef DEF_FUNCTION_TYPE_5
5801#undef DEF_FUNCTION_TYPE_6
5802#undef DEF_FUNCTION_TYPE_7
acf0174b 5803#undef DEF_FUNCTION_TYPE_8
a1ab4c31
AC
5804#undef DEF_FUNCTION_TYPE_VAR_0
5805#undef DEF_FUNCTION_TYPE_VAR_1
5806#undef DEF_FUNCTION_TYPE_VAR_2
5807#undef DEF_FUNCTION_TYPE_VAR_3
5808#undef DEF_FUNCTION_TYPE_VAR_4
5809#undef DEF_FUNCTION_TYPE_VAR_5
5810#undef DEF_POINTER_TYPE
5811 BT_LAST
5812};
5813
5814typedef enum c_builtin_type builtin_type;
5815
5816/* A temporary array used in communication with def_fn_type. */
5817static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5818
5819/* A helper function for install_builtin_types. Build function type
5820 for DEF with return type RET and N arguments. If VAR is true, then the
5821 function should be variadic after those N arguments.
5822
5823 Takes special care not to ICE if any of the types involved are
5824 error_mark_node, which indicates that said type is not in fact available
5825 (see builtin_type_for_size). In which case the function type as a whole
5826 should be error_mark_node. */
5827
5828static void
5829def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5830{
e5b00edf
NF
5831 tree t;
5832 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5833 va_list list;
5834 int i;
5835
5836 va_start (list, n);
5837 for (i = 0; i < n; ++i)
5838 {
c6bd4220 5839 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5840 t = builtin_types[a];
5841 if (t == error_mark_node)
5842 goto egress;
e5b00edf 5843 args[i] = t;
a1ab4c31 5844 }
a1ab4c31 5845
a1ab4c31
AC
5846 t = builtin_types[ret];
5847 if (t == error_mark_node)
5848 goto egress;
e5b00edf
NF
5849 if (var)
5850 t = build_varargs_function_type_array (t, n, args);
5851 else
5852 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5853
5854 egress:
5855 builtin_types[def] = t;
0edf1bb2 5856 va_end (list);
a1ab4c31
AC
5857}
5858
5859/* Build the builtin function types and install them in the builtin_types
5860 array for later use in builtin function decls. */
5861
5862static void
5863install_builtin_function_types (void)
5864{
5865 tree va_list_ref_type_node;
5866 tree va_list_arg_type_node;
5867
5868 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5869 {
5870 va_list_arg_type_node = va_list_ref_type_node =
5871 build_pointer_type (TREE_TYPE (va_list_type_node));
5872 }
5873 else
5874 {
5875 va_list_arg_type_node = va_list_type_node;
5876 va_list_ref_type_node = build_reference_type (va_list_type_node);
5877 }
5878
5879#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5880 builtin_types[ENUM] = VALUE;
5881#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5882 def_fn_type (ENUM, RETURN, 0, 0);
5883#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5884 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5885#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5886 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5887#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5888 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5889#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5890 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5891#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5892 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5893#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5894 ARG6) \
5895 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5896#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5897 ARG6, ARG7) \
5898 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
5899#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5900 ARG6, ARG7, ARG8) \
5901 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5902 ARG7, ARG8);
a1ab4c31
AC
5903#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5904 def_fn_type (ENUM, RETURN, 1, 0);
5905#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5906 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5907#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5908 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5909#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5910 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5911#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5912 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5913#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5914 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5915#define DEF_POINTER_TYPE(ENUM, TYPE) \
5916 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5917
5918#include "builtin-types.def"
5919
5920#undef DEF_PRIMITIVE_TYPE
5921#undef DEF_FUNCTION_TYPE_1
5922#undef DEF_FUNCTION_TYPE_2
5923#undef DEF_FUNCTION_TYPE_3
5924#undef DEF_FUNCTION_TYPE_4
5925#undef DEF_FUNCTION_TYPE_5
5926#undef DEF_FUNCTION_TYPE_6
5927#undef DEF_FUNCTION_TYPE_VAR_0
5928#undef DEF_FUNCTION_TYPE_VAR_1
5929#undef DEF_FUNCTION_TYPE_VAR_2
5930#undef DEF_FUNCTION_TYPE_VAR_3
5931#undef DEF_FUNCTION_TYPE_VAR_4
5932#undef DEF_FUNCTION_TYPE_VAR_5
5933#undef DEF_POINTER_TYPE
5934 builtin_types[(int) BT_LAST] = NULL_TREE;
5935}
5936
5937/* ----------------------------------------------------------------------- *
5938 * BUILTIN ATTRIBUTES *
5939 * ----------------------------------------------------------------------- */
5940
5941enum built_in_attribute
5942{
5943#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5944#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5945#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5946#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5947#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5948#include "builtin-attrs.def"
5949#undef DEF_ATTR_NULL_TREE
5950#undef DEF_ATTR_INT
e384e6b5 5951#undef DEF_ATTR_STRING
a1ab4c31
AC
5952#undef DEF_ATTR_IDENT
5953#undef DEF_ATTR_TREE_LIST
5954 ATTR_LAST
5955};
5956
5957static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5958
5959static void
5960install_builtin_attributes (void)
5961{
5962 /* Fill in the built_in_attributes array. */
5963#define DEF_ATTR_NULL_TREE(ENUM) \
5964 built_in_attributes[(int) ENUM] = NULL_TREE;
5965#define DEF_ATTR_INT(ENUM, VALUE) \
5966 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5967#define DEF_ATTR_STRING(ENUM, VALUE) \
5968 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5969#define DEF_ATTR_IDENT(ENUM, STRING) \
5970 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5971#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5972 built_in_attributes[(int) ENUM] \
5973 = tree_cons (built_in_attributes[(int) PURPOSE], \
5974 built_in_attributes[(int) VALUE], \
5975 built_in_attributes[(int) CHAIN]);
5976#include "builtin-attrs.def"
5977#undef DEF_ATTR_NULL_TREE
5978#undef DEF_ATTR_INT
e384e6b5 5979#undef DEF_ATTR_STRING
a1ab4c31
AC
5980#undef DEF_ATTR_IDENT
5981#undef DEF_ATTR_TREE_LIST
5982}
5983
5984/* Handle a "const" attribute; arguments as in
5985 struct attribute_spec.handler. */
5986
5987static tree
5988handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5989 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5990 bool *no_add_attrs)
5991{
5992 if (TREE_CODE (*node) == FUNCTION_DECL)
5993 TREE_READONLY (*node) = 1;
5994 else
5995 *no_add_attrs = true;
5996
5997 return NULL_TREE;
5998}
5999
6000/* Handle a "nothrow" attribute; arguments as in
6001 struct attribute_spec.handler. */
6002
6003static tree
6004handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6005 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6006 bool *no_add_attrs)
6007{
6008 if (TREE_CODE (*node) == FUNCTION_DECL)
6009 TREE_NOTHROW (*node) = 1;
6010 else
6011 *no_add_attrs = true;
6012
6013 return NULL_TREE;
6014}
6015
6016/* Handle a "pure" attribute; arguments as in
6017 struct attribute_spec.handler. */
6018
6019static tree
6020handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6021 int ARG_UNUSED (flags), bool *no_add_attrs)
6022{
6023 if (TREE_CODE (*node) == FUNCTION_DECL)
6024 DECL_PURE_P (*node) = 1;
6025 /* ??? TODO: Support types. */
6026 else
6027 {
7948ae37
OH
6028 warning (OPT_Wattributes, "%qs attribute ignored",
6029 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6030 *no_add_attrs = true;
6031 }
6032
6033 return NULL_TREE;
6034}
6035
6036/* Handle a "no vops" attribute; arguments as in
6037 struct attribute_spec.handler. */
6038
6039static tree
6040handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6041 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6042 bool *ARG_UNUSED (no_add_attrs))
6043{
6044 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6045 DECL_IS_NOVOPS (*node) = 1;
6046 return NULL_TREE;
6047}
6048
6049/* Helper for nonnull attribute handling; fetch the operand number
6050 from the attribute argument list. */
6051
6052static bool
6053get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6054{
6055 /* Verify the arg number is a constant. */
6056 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6057 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6058 return false;
6059
6060 *valp = TREE_INT_CST_LOW (arg_num_expr);
6061 return true;
6062}
6063
6064/* Handle the "nonnull" attribute. */
6065static tree
6066handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6067 tree args, int ARG_UNUSED (flags),
6068 bool *no_add_attrs)
6069{
6070 tree type = *node;
6071 unsigned HOST_WIDE_INT attr_arg_num;
6072
6073 /* If no arguments are specified, all pointer arguments should be
6074 non-null. Verify a full prototype is given so that the arguments
6075 will have the correct types when we actually check them later. */
6076 if (!args)
6077 {
f4da8dce 6078 if (!prototype_p (type))
a1ab4c31
AC
6079 {
6080 error ("nonnull attribute without arguments on a non-prototype");
6081 *no_add_attrs = true;
6082 }
6083 return NULL_TREE;
6084 }
6085
6086 /* Argument list specified. Verify that each argument number references
6087 a pointer argument. */
6088 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6089 {
a1ab4c31
AC
6090 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6091
6092 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6093 {
6094 error ("nonnull argument has invalid operand number (argument %lu)",
6095 (unsigned long) attr_arg_num);
6096 *no_add_attrs = true;
6097 return NULL_TREE;
6098 }
6099
d7d058c5 6100 if (prototype_p (type))
a1ab4c31 6101 {
d7d058c5
NF
6102 function_args_iterator iter;
6103 tree argument;
6104
6105 function_args_iter_init (&iter, type);
6106 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 6107 {
d7d058c5 6108 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
6109 if (!argument || ck_num == arg_num)
6110 break;
a1ab4c31
AC
6111 }
6112
6113 if (!argument
d7d058c5 6114 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 6115 {
58c8f770
EB
6116 error ("nonnull argument with out-of-range operand number "
6117 "(argument %lu, operand %lu)",
a1ab4c31
AC
6118 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6119 *no_add_attrs = true;
6120 return NULL_TREE;
6121 }
6122
d7d058c5 6123 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 6124 {
58c8f770
EB
6125 error ("nonnull argument references non-pointer operand "
6126 "(argument %lu, operand %lu)",
a1ab4c31
AC
6127 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6128 *no_add_attrs = true;
6129 return NULL_TREE;
6130 }
6131 }
6132 }
6133
6134 return NULL_TREE;
6135}
6136
6137/* Handle a "sentinel" attribute. */
6138
6139static tree
6140handle_sentinel_attribute (tree *node, tree name, tree args,
6141 int ARG_UNUSED (flags), bool *no_add_attrs)
6142{
f4da8dce 6143 if (!prototype_p (*node))
a1ab4c31
AC
6144 {
6145 warning (OPT_Wattributes,
7948ae37
OH
6146 "%qs attribute requires prototypes with named arguments",
6147 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6148 *no_add_attrs = true;
6149 }
6150 else
6151 {
dcf0c47e 6152 if (!stdarg_p (*node))
a1ab4c31
AC
6153 {
6154 warning (OPT_Wattributes,
7948ae37
OH
6155 "%qs attribute only applies to variadic functions",
6156 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6157 *no_add_attrs = true;
6158 }
6159 }
6160
6161 if (args)
6162 {
6163 tree position = TREE_VALUE (args);
6164
6165 if (TREE_CODE (position) != INTEGER_CST)
6166 {
6167 warning (0, "requested position is not an integer constant");
6168 *no_add_attrs = true;
6169 }
6170 else
6171 {
6172 if (tree_int_cst_lt (position, integer_zero_node))
6173 {
6174 warning (0, "requested position is less than zero");
6175 *no_add_attrs = true;
6176 }
6177 }
6178 }
6179
6180 return NULL_TREE;
6181}
6182
6183/* Handle a "noreturn" attribute; arguments as in
6184 struct attribute_spec.handler. */
6185
6186static tree
6187handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6188 int ARG_UNUSED (flags), bool *no_add_attrs)
6189{
6190 tree type = TREE_TYPE (*node);
6191
6192 /* See FIXME comment in c_common_attribute_table. */
6193 if (TREE_CODE (*node) == FUNCTION_DECL)
6194 TREE_THIS_VOLATILE (*node) = 1;
6195 else if (TREE_CODE (type) == POINTER_TYPE
6196 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6197 TREE_TYPE (*node)
6198 = build_pointer_type
6199 (build_type_variant (TREE_TYPE (type),
6200 TYPE_READONLY (TREE_TYPE (type)), 1));
6201 else
6202 {
7948ae37
OH
6203 warning (OPT_Wattributes, "%qs attribute ignored",
6204 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6205 *no_add_attrs = true;
6206 }
6207
6208 return NULL_TREE;
6209}
6210
0d6e14fd
JH
6211/* Handle a "leaf" attribute; arguments as in
6212 struct attribute_spec.handler. */
6213
6214static tree
6215handle_leaf_attribute (tree *node, tree name,
6216 tree ARG_UNUSED (args),
6217 int ARG_UNUSED (flags), bool *no_add_attrs)
6218{
6219 if (TREE_CODE (*node) != FUNCTION_DECL)
6220 {
6221 warning (OPT_Wattributes, "%qE attribute ignored", name);
6222 *no_add_attrs = true;
6223 }
6224 if (!TREE_PUBLIC (*node))
6225 {
32a5388a 6226 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd
JH
6227 *no_add_attrs = true;
6228 }
6229
6230 return NULL_TREE;
6231}
6232
a1ab4c31
AC
6233/* Handle a "malloc" attribute; arguments as in
6234 struct attribute_spec.handler. */
6235
6236static tree
6237handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6238 int ARG_UNUSED (flags), bool *no_add_attrs)
6239{
6240 if (TREE_CODE (*node) == FUNCTION_DECL
6241 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6242 DECL_IS_MALLOC (*node) = 1;
6243 else
6244 {
7948ae37
OH
6245 warning (OPT_Wattributes, "%qs attribute ignored",
6246 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6247 *no_add_attrs = true;
6248 }
6249
6250 return NULL_TREE;
6251}
6252
6253/* Fake handler for attributes we don't properly support. */
6254
6255tree
6256fake_attribute_handler (tree * ARG_UNUSED (node),
6257 tree ARG_UNUSED (name),
6258 tree ARG_UNUSED (args),
6259 int ARG_UNUSED (flags),
6260 bool * ARG_UNUSED (no_add_attrs))
6261{
6262 return NULL_TREE;
6263}
6264
6265/* Handle a "type_generic" attribute. */
6266
6267static tree
6268handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6269 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6270 bool * ARG_UNUSED (no_add_attrs))
6271{
a1ab4c31
AC
6272 /* Ensure we have a function type. */
6273 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 6274
a1ab4c31 6275 /* Ensure we have a variadic function. */
dcf0c47e 6276 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
6277
6278 return NULL_TREE;
6279}
6280
2724e58f
OH
6281/* Handle a "vector_size" attribute; arguments as in
6282 struct attribute_spec.handler. */
6283
6284static tree
6285handle_vector_size_attribute (tree *node, tree name, tree args,
6286 int ARG_UNUSED (flags),
6287 bool *no_add_attrs)
6288{
6289 unsigned HOST_WIDE_INT vecsize, nunits;
6290 enum machine_mode orig_mode;
6291 tree type = *node, new_type, size;
6292
6293 *no_add_attrs = true;
6294
6295 size = TREE_VALUE (args);
6296
6297 if (!host_integerp (size, 1))
6298 {
7948ae37
OH
6299 warning (OPT_Wattributes, "%qs attribute ignored",
6300 IDENTIFIER_POINTER (name));
2724e58f
OH
6301 return NULL_TREE;
6302 }
6303
6304 /* Get the vector size (in bytes). */
6305 vecsize = tree_low_cst (size, 1);
6306
6307 /* We need to provide for vector pointers, vector arrays, and
6308 functions returning vectors. For example:
6309
6310 __attribute__((vector_size(16))) short *foo;
6311
6312 In this case, the mode is SI, but the type being modified is
6313 HI, so we need to look further. */
6314
6315 while (POINTER_TYPE_P (type)
6316 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6317 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6318 type = TREE_TYPE (type);
6319
6320 /* Get the mode of the type being modified. */
6321 orig_mode = TYPE_MODE (type);
6322
6323 if ((!INTEGRAL_TYPE_P (type)
6324 && !SCALAR_FLOAT_TYPE_P (type)
6325 && !FIXED_POINT_TYPE_P (type))
6326 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6327 && GET_MODE_CLASS (orig_mode) != MODE_INT
6328 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
6329 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
6330 || TREE_CODE (type) == BOOLEAN_TYPE)
6331 {
7948ae37
OH
6332 error ("invalid vector type for attribute %qs",
6333 IDENTIFIER_POINTER (name));
2724e58f
OH
6334 return NULL_TREE;
6335 }
6336
6337 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
6338 {
6339 error ("vector size not an integral multiple of component size");
6340 return NULL;
6341 }
6342
6343 if (vecsize == 0)
6344 {
6345 error ("zero vector size");
6346 return NULL;
6347 }
6348
6349 /* Calculate how many units fit in the vector. */
6350 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
6351 if (nunits & (nunits - 1))
6352 {
6353 error ("number of components of the vector not a power of two");
6354 return NULL_TREE;
6355 }
6356
6357 new_type = build_vector_type (type, nunits);
6358
6359 /* Build back pointers if needed. */
9b469089 6360 *node = reconstruct_complex_type (*node, new_type);
2724e58f
OH
6361
6362 return NULL_TREE;
6363}
6364
7948ae37
OH
6365/* Handle a "vector_type" attribute; arguments as in
6366 struct attribute_spec.handler. */
6367
6368static tree
6369handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6370 int ARG_UNUSED (flags),
6371 bool *no_add_attrs)
6372{
6373 /* Vector representative type and size. */
6374 tree rep_type = *node;
6375 tree rep_size = TYPE_SIZE_UNIT (rep_type);
7948ae37
OH
6376
6377 /* Vector size in bytes and number of units. */
6378 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6379
6380 /* Vector element type and mode. */
6381 tree elem_type;
6382 enum machine_mode elem_mode;
6383
6384 *no_add_attrs = true;
6385
7948ae37
OH
6386 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6387 {
6388 error ("attribute %qs applies to array types only",
6389 IDENTIFIER_POINTER (name));
6390 return NULL_TREE;
6391 }
6392
6393 /* Silently punt on variable sizes. We can't make vector types for them,
6394 need to ignore them on front-end generated subtypes of unconstrained
6395 bases, and this attribute is for binding implementors, not end-users, so
6396 we should never get there from legitimate explicit uses. */
6397
6398 if (!host_integerp (rep_size, 1))
6399 return NULL_TREE;
6400
6401 /* Get the element type/mode and check this is something we know
6402 how to make vectors of. */
6403
6404 elem_type = TREE_TYPE (rep_type);
6405 elem_mode = TYPE_MODE (elem_type);
6406
6407 if ((!INTEGRAL_TYPE_P (elem_type)
6408 && !SCALAR_FLOAT_TYPE_P (elem_type)
6409 && !FIXED_POINT_TYPE_P (elem_type))
6410 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6411 && GET_MODE_CLASS (elem_mode) != MODE_INT
6412 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
6413 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
6414 {
6415 error ("invalid element type for attribute %qs",
6416 IDENTIFIER_POINTER (name));
6417 return NULL_TREE;
6418 }
6419
6420 /* Sanity check the vector size and element type consistency. */
6421
6422 vec_bytes = tree_low_cst (rep_size, 1);
6423
6424 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
6425 {
6426 error ("vector size not an integral multiple of component size");
6427 return NULL;
6428 }
6429
6430 if (vec_bytes == 0)
6431 {
6432 error ("zero vector size");
6433 return NULL;
6434 }
6435
6436 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
6437 if (vec_units & (vec_units - 1))
6438 {
6439 error ("number of components of the vector not a power of two");
6440 return NULL_TREE;
6441 }
6442
6443 /* Build the vector type and replace. */
6444
6445 *node = build_vector_type (elem_type, vec_units);
7948ae37
OH
6446 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6447
6448 return NULL_TREE;
6449}
6450
a1ab4c31
AC
6451/* ----------------------------------------------------------------------- *
6452 * BUILTIN FUNCTIONS *
6453 * ----------------------------------------------------------------------- */
6454
6455/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6456 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6457 if nonansi_p and flag_no_nonansi_builtin. */
6458
6459static void
6460def_builtin_1 (enum built_in_function fncode,
6461 const char *name,
6462 enum built_in_class fnclass,
6463 tree fntype, tree libtype,
6464 bool both_p, bool fallback_p,
6465 bool nonansi_p ATTRIBUTE_UNUSED,
6466 tree fnattrs, bool implicit_p)
6467{
6468 tree decl;
6469 const char *libname;
6470
6471 /* Preserve an already installed decl. It most likely was setup in advance
6472 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 6473 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
6474 return;
6475
6476 gcc_assert ((!both_p && !fallback_p)
6477 || !strncmp (name, "__builtin_",
6478 strlen ("__builtin_")));
6479
6480 libname = name + strlen ("__builtin_");
6481 decl = add_builtin_function (name, fntype, fncode, fnclass,
6482 (fallback_p ? libname : NULL),
6483 fnattrs);
6484 if (both_p)
6485 /* ??? This is normally further controlled by command-line options
6486 like -fno-builtin, but we don't have them for Ada. */
6487 add_builtin_function (libname, libtype, fncode, fnclass,
6488 NULL, fnattrs);
6489
e79983f4 6490 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6491}
6492
6493static int flag_isoc94 = 0;
6494static int flag_isoc99 = 0;
6495
6496/* Install what the common builtins.def offers. */
6497
6498static void
6499install_builtin_functions (void)
6500{
6501#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6502 NONANSI_P, ATTRS, IMPLICIT, COND) \
6503 if (NAME && COND) \
6504 def_builtin_1 (ENUM, NAME, CLASS, \
6505 builtin_types[(int) TYPE], \
6506 builtin_types[(int) LIBTYPE], \
6507 BOTH_P, FALLBACK_P, NONANSI_P, \
6508 built_in_attributes[(int) ATTRS], IMPLICIT);
6509#include "builtins.def"
6510#undef DEF_BUILTIN
6511}
6512
6513/* ----------------------------------------------------------------------- *
6514 * BUILTIN FUNCTIONS *
6515 * ----------------------------------------------------------------------- */
6516
6517/* Install the builtin functions we might need. */
6518
6519void
6520gnat_install_builtins (void)
6521{
6522 install_builtin_elementary_types ();
6523 install_builtin_function_types ();
6524 install_builtin_attributes ();
6525
6526 /* Install builtins used by generic middle-end pieces first. Some of these
6527 know about internal specificities and control attributes accordingly, for
6528 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6529 the generic definition from builtins.def. */
384c400a 6530 build_common_builtin_nodes ();
a1ab4c31
AC
6531
6532 /* Now, install the target specific builtins, such as the AltiVec family on
6533 ppc, and the common set as exposed by builtins.def. */
6534 targetm.init_builtins ();
6535 install_builtin_functions ();
6536}
6537
6538#include "gt-ada-utils.h"
6539#include "gtype-ada.h"
This page took 2.603848 seconds and 5 git commands to generate.