]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
Fix copy'n'pasto.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
44e9e3ec 9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
d8a2d370
DN
31#include "stringpool.h"
32#include "stor-layout.h"
33#include "attribs.h"
34#include "varasm.h"
a1ab4c31 35#include "flags.h"
a1ab4c31 36#include "toplev.h"
718f9c0f 37#include "diagnostic-core.h"
a1ab4c31
AC
38#include "output.h"
39#include "ggc.h"
40#include "debug.h"
41#include "convert.h"
42#include "target.h"
677f3fa8 43#include "common/common-target.h"
8713b7e4 44#include "langhooks.h"
a1ab4c31 45#include "cgraph.h"
10e4d056 46#include "diagnostic.h"
7ee2468b 47#include "timevar.h"
8713b7e4 48#include "tree-dump.h"
a1ab4c31
AC
49#include "tree-inline.h"
50#include "tree-iterator.h"
a1ab4c31
AC
51
52#include "ada.h"
53#include "types.h"
54#include "atree.h"
55#include "elists.h"
56#include "namet.h"
57#include "nlists.h"
58#include "stringt.h"
59#include "uintp.h"
60#include "fe.h"
61#include "sinfo.h"
62#include "einfo.h"
63#include "ada-tree.h"
64#include "gigi.h"
65
a1ab4c31
AC
66/* If nonzero, pretend we are allocating at global level. */
67int force_global;
68
caa9d12a
EB
69/* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72int double_float_alignment;
73
74/* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77int double_scalar_alignment;
78
a1ab4c31
AC
79/* Tree nodes for the various types and decls we create. */
80tree gnat_std_decls[(int) ADT_LAST];
81
82/* Functions to call for each of the possible raise reasons. */
83tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84
ca8e13e8 85/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
86tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
87
a1ab4c31
AC
88/* Forward declarations for handlers of attributes. */
89static tree handle_const_attribute (tree *, tree, tree, int, bool *);
90static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
91static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
92static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
93static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
94static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
95static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 96static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
97static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 99static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 100static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
101
102/* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106/* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108const struct attribute_spec gnat_internal_attribute_table[] =
109{
62d784f7
KT
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
127 false },
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
132
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
138
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
62d784f7
KT
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 144
62d784f7 145 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
146};
147
148/* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152
153#define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155
156#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158
159#define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161
162/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164
165#define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167
168#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170
171#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173
174/* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
176
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181
182/* Likewise for float types, but record these by mode. */
183static GTY(()) tree float_types[NUM_MACHINE_MODES];
184
185/* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
187
d1b38208 188struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
196};
197
198/* The binding level currently in effect. */
199static GTY(()) struct gnat_binding_level *current_binding_level;
200
201/* A chain of gnat_binding_level structures awaiting reuse. */
202static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203
228ee426
EB
204/* The context to be used for global declarations. */
205static GTY(()) tree global_context;
206
a1ab4c31 207/* An array of global declarations. */
9771b263 208static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
209
210/* An array of builtin function declarations. */
9771b263 211static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31
AC
212
213/* An array of global renaming pointers. */
9771b263 214static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
a1ab4c31
AC
215
216/* A chain of unused BLOCK nodes. */
217static GTY((deletable)) tree free_block_chain;
218
842d4ee2
EB
219static int pad_type_hash_marked_p (const void *p);
220static hashval_t pad_type_hash_hash (const void *p);
221static int pad_type_hash_eq (const void *p1, const void *p2);
222
223/* A hash table of padded types. It is modelled on the generic type
224 hash table in tree.c, which must thus be used as a reference. */
225struct GTY(()) pad_type_hash {
226 unsigned long hash;
227 tree type;
228};
229
230static GTY ((if_marked ("pad_type_hash_marked_p"),
231 param_is (struct pad_type_hash)))
232 htab_t pad_type_hash_table;
233
a1ab4c31
AC
234static tree merge_sizes (tree, tree, tree, bool, bool);
235static tree compute_related_constant (tree, tree);
236static tree split_plus (tree, tree *);
a1ab4c31
AC
237static tree float_type_for_precision (int, enum machine_mode);
238static tree convert_to_fat_pointer (tree, tree);
5c475ba9 239static unsigned int scale_by_factor_of (tree, unsigned int);
a1ab4c31
AC
240static bool potential_alignment_gap (tree, tree, tree);
241\f
842d4ee2 242/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
243
244void
842d4ee2 245init_gnat_utils (void)
a1ab4c31 246{
842d4ee2 247 /* Initialize the association of GNAT nodes to GCC trees. */
766090c2 248 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
249
250 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
766090c2 251 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
252
253 /* Initialize the hash table of padded types. */
4116e7d0
EB
254 pad_type_hash_table
255 = htab_create_ggc (512, pad_type_hash_hash, pad_type_hash_eq, 0);
a1ab4c31
AC
256}
257
842d4ee2 258/* Destroy data structures of the utils.c module. */
f04b8d69
EB
259
260void
842d4ee2 261destroy_gnat_utils (void)
f04b8d69 262{
842d4ee2 263 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
264 ggc_free (associate_gnat_to_gnu);
265 associate_gnat_to_gnu = NULL;
f04b8d69 266
842d4ee2
EB
267 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
268 ggc_free (dummy_node_table);
269 dummy_node_table = NULL;
270
271 /* Destroy the hash table of padded types. */
272 htab_delete (pad_type_hash_table);
273 pad_type_hash_table = NULL;
274
275 /* Invalidate the global renaming pointers. */
276 invalidate_global_renaming_pointers ();
277}
278\f
a1d8cc63
EB
279/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
280 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
281 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 282
a1d8cc63 283 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
284
285void
286save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
287{
288 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 289 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
290 means GNAT_ENTITY is defined twice, but occasionally is due to some
291 Gigi problem. */
292 gcc_assert (!(gnu_decl
293 && (PRESENT_GNU_TREE (gnat_entity)
294 || (!no_check && !DECL_P (gnu_decl)))));
295
296 SET_GNU_TREE (gnat_entity, gnu_decl);
297}
298
a1d8cc63
EB
299/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
300 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
301
302 In some cases, such as delayed elaboration or expressions that need to
303 be elaborated only once, GNAT_ENTITY is really not an entity. */
304
305tree
306get_gnu_tree (Entity_Id gnat_entity)
307{
308 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
309 return GET_GNU_TREE (gnat_entity);
310}
311
312/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
313
314bool
315present_gnu_tree (Entity_Id gnat_entity)
316{
317 return PRESENT_GNU_TREE (gnat_entity);
318}
319\f
a1ab4c31
AC
320/* Make a dummy type corresponding to GNAT_TYPE. */
321
322tree
323make_dummy_type (Entity_Id gnat_type)
324{
325 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
326 tree gnu_type;
327
328 /* If there is an equivalent type, get its underlying type. */
329 if (Present (gnat_underlying))
a0b8b1b7 330 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
a1ab4c31
AC
331
332 /* If there was no equivalent type (can only happen when just annotating
333 types) or underlying type, go back to the original type. */
334 if (No (gnat_underlying))
335 gnat_underlying = gnat_type;
336
337 /* If it there already a dummy type, use that one. Else make one. */
338 if (PRESENT_DUMMY_NODE (gnat_underlying))
339 return GET_DUMMY_NODE (gnat_underlying);
340
341 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
342 an ENUMERAL_TYPE. */
343 gnu_type = make_node (Is_Record_Type (gnat_underlying)
344 ? tree_code_for_record_type (gnat_underlying)
345 : ENUMERAL_TYPE);
346 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
347 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
348 TYPE_STUB_DECL (gnu_type)
349 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a0b8b1b7
EB
350 if (Is_By_Reference_Type (gnat_underlying))
351 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31
AC
352
353 SET_DUMMY_NODE (gnat_underlying, gnu_type);
354
355 return gnu_type;
356}
e3edbd56
EB
357
358/* Return the dummy type that was made for GNAT_TYPE, if any. */
359
360tree
361get_dummy_type (Entity_Id gnat_type)
362{
363 return GET_DUMMY_NODE (gnat_type);
364}
365
366/* Build dummy fat and thin pointer types whose designated type is specified
367 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
368
369void
370build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
371{
372 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
373 tree gnu_fat_type, fields, gnu_object_type;
374
375 gnu_template_type = make_node (RECORD_TYPE);
376 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
377 TYPE_DUMMY_P (gnu_template_type) = 1;
378 gnu_ptr_template = build_pointer_type (gnu_template_type);
379
380 gnu_array_type = make_node (ENUMERAL_TYPE);
381 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
382 TYPE_DUMMY_P (gnu_array_type) = 1;
383 gnu_ptr_array = build_pointer_type (gnu_array_type);
384
385 gnu_fat_type = make_node (RECORD_TYPE);
386 /* Build a stub DECL to trigger the special processing for fat pointer types
387 in gnat_pushdecl. */
388 TYPE_NAME (gnu_fat_type)
389 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
390 gnu_fat_type);
391 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
392 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
393 DECL_CHAIN (fields)
394 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
395 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
396 finish_fat_pointer_type (gnu_fat_type, fields);
397 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
398 /* Suppress debug info until after the type is completed. */
399 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
400
401 gnu_object_type = make_node (RECORD_TYPE);
402 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
403 TYPE_DUMMY_P (gnu_object_type) = 1;
404
405 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
406 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
407}
a1ab4c31 408\f
c99c0026 409/* Return true if we are in the global binding level. */
a1ab4c31 410
c99c0026 411bool
a1ab4c31
AC
412global_bindings_p (void)
413{
c99c0026 414 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
415}
416
a09d56d8 417/* Enter a new binding level. */
a1ab4c31
AC
418
419void
c6bd4220 420gnat_pushlevel (void)
a1ab4c31
AC
421{
422 struct gnat_binding_level *newlevel = NULL;
423
424 /* Reuse a struct for this binding level, if there is one. */
425 if (free_binding_level)
426 {
427 newlevel = free_binding_level;
428 free_binding_level = free_binding_level->chain;
429 }
430 else
766090c2 431 newlevel = ggc_alloc<gnat_binding_level> ();
a1ab4c31
AC
432
433 /* Use a free BLOCK, if any; otherwise, allocate one. */
434 if (free_block_chain)
435 {
436 newlevel->block = free_block_chain;
437 free_block_chain = BLOCK_CHAIN (free_block_chain);
438 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
439 }
440 else
441 newlevel->block = make_node (BLOCK);
442
443 /* Point the BLOCK we just made to its parent. */
444 if (current_binding_level)
445 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
446
a09d56d8
EB
447 BLOCK_VARS (newlevel->block) = NULL_TREE;
448 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
449 TREE_USED (newlevel->block) = 1;
450
a09d56d8 451 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
452 newlevel->chain = current_binding_level;
453 newlevel->jmpbuf_decl = NULL_TREE;
454 current_binding_level = newlevel;
455}
456
457/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
458 and point FNDECL to this BLOCK. */
459
460void
461set_current_block_context (tree fndecl)
462{
463 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
464 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 465 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
466}
467
468/* Set the jmpbuf_decl for the current binding level to DECL. */
469
470void
471set_block_jmpbuf_decl (tree decl)
472{
473 current_binding_level->jmpbuf_decl = decl;
474}
475
476/* Get the jmpbuf_decl, if any, for the current binding level. */
477
478tree
c6bd4220 479get_block_jmpbuf_decl (void)
a1ab4c31
AC
480{
481 return current_binding_level->jmpbuf_decl;
482}
483
a09d56d8 484/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
485
486void
c6bd4220 487gnat_poplevel (void)
a1ab4c31
AC
488{
489 struct gnat_binding_level *level = current_binding_level;
490 tree block = level->block;
491
492 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 493 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
494
495 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
496 are no variables free the block and merge its subblocks into those of its
a09d56d8 497 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
498 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
499 ;
500 else if (BLOCK_VARS (block) == NULL_TREE)
501 {
502 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
503 = block_chainon (BLOCK_SUBBLOCKS (block),
504 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
505 BLOCK_CHAIN (block) = free_block_chain;
506 free_block_chain = block;
507 }
508 else
509 {
510 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
511 BLOCK_SUBBLOCKS (level->chain->block) = block;
512 TREE_USED (block) = 1;
513 set_block_for_group (block);
514 }
515
516 /* Free this binding structure. */
517 current_binding_level = level->chain;
518 level->chain = free_binding_level;
519 free_binding_level = level;
520}
521
2231f17f
EB
522/* Exit a binding level and discard the associated BLOCK. */
523
524void
525gnat_zaplevel (void)
526{
527 struct gnat_binding_level *level = current_binding_level;
528 tree block = level->block;
529
530 BLOCK_CHAIN (block) = free_block_chain;
531 free_block_chain = block;
532
533 /* Free this binding structure. */
534 current_binding_level = level->chain;
535 level->chain = free_binding_level;
536 free_binding_level = level;
537}
a1ab4c31 538\f
4708440c
EB
539/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
540
541static void
542gnat_set_type_context (tree type, tree context)
543{
544 tree decl = TYPE_STUB_DECL (type);
545
546 TYPE_CONTEXT (type) = context;
547
548 while (decl && DECL_PARALLEL_TYPE (decl))
549 {
550 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl)) = context;
551 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
552 }
553}
554
228ee426
EB
555/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
556 for location information and flag propagation. */
a1ab4c31
AC
557
558void
559gnat_pushdecl (tree decl, Node_Id gnat_node)
560{
228ee426 561 /* If DECL is public external or at top level, it has global context. */
bd9c7fb9 562 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
228ee426
EB
563 {
564 if (!global_context)
565 global_context = build_translation_unit_decl (NULL_TREE);
566 DECL_CONTEXT (decl) = global_context;
567 }
a1ab4c31
AC
568 else
569 {
570 DECL_CONTEXT (decl) = current_function_decl;
571
9f62cb92
JJ
572 /* Functions imported in another function are not really nested.
573 For really nested functions mark them initially as needing
574 a static chain for uses of that flag before unnesting;
575 lower_nested_functions will then recompute it. */
576 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
577 DECL_STATIC_CHAIN (decl) = 1;
a1ab4c31
AC
578 }
579
228ee426 580 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
581
582 /* Set the location of DECL and emit a declaration for it. */
583 if (Present (gnat_node))
584 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 585
a1ab4c31
AC
586 add_decl_expr (decl, gnat_node);
587
588 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
589 order. The list will be reversed later. Put global declarations in the
590 globals list and local ones in the current block. But skip TYPE_DECLs
591 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
592 with the debugger and aren't needed anyway. */
593 if (!(TREE_CODE (decl) == TYPE_DECL
594 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 595 {
9083aacd 596 if (DECL_EXTERNAL (decl))
a1ab4c31 597 {
a1ab4c31 598 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 599 vec_safe_push (builtin_decls, decl);
a1ab4c31 600 }
9083aacd 601 else if (global_bindings_p ())
9771b263 602 vec_safe_push (global_decls, decl);
9083aacd 603 else
a1ab4c31 604 {
a963da4d
EB
605 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
606 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
607 }
608 }
609
610 /* For the declaration of a type, set its name if it either is not already
10069d53 611 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
612 We'd rather have the type named with a real name and all the pointer
613 types to the same object have the same POINTER_TYPE node. Code in the
614 equivalent function of c-decl.c makes a copy of the type node here, but
615 that may cause us trouble with incomplete types. We make an exception
616 for fat pointer types because the compiler automatically builds them
617 for unconstrained array types and the debugger uses them to represent
618 both these and pointers to these. */
619 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
620 {
621 tree t = TREE_TYPE (decl);
622
10069d53 623 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
1aeb40dd 624 {
4cb12a90 625 /* Array and pointer types aren't "tagged" types so we force the
1aeb40dd
EB
626 type to be associated with its typedef in the DWARF back-end,
627 in order to make sure that the latter is always preserved. */
4cb12a90
EB
628 if (!DECL_ARTIFICIAL (decl)
629 && (TREE_CODE (t) == ARRAY_TYPE
630 || TREE_CODE (t) == POINTER_TYPE))
1aeb40dd
EB
631 {
632 tree tt = build_distinct_type_copy (t);
4cb12a90
EB
633 if (TREE_CODE (t) == POINTER_TYPE)
634 TYPE_NEXT_PTR_TO (t) = tt;
1aeb40dd 635 TYPE_NAME (tt) = DECL_NAME (decl);
4708440c 636 gnat_set_type_context (tt, DECL_CONTEXT (decl));
1aeb40dd
EB
637 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
638 DECL_ORIGINAL_TYPE (decl) = tt;
639 }
640 }
315cff15 641 else if (TYPE_IS_FAT_POINTER_P (t))
a1ab4c31 642 {
e3edbd56 643 /* We need a variant for the placeholder machinery to work. */
a1ab4c31
AC
644 tree tt = build_variant_type_copy (t);
645 TYPE_NAME (tt) = decl;
4708440c 646 gnat_set_type_context (tt, DECL_CONTEXT (decl));
a1ab4c31
AC
647 TREE_USED (tt) = TREE_USED (t);
648 TREE_TYPE (decl) = tt;
40c88b94
EB
649 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
650 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
651 else
652 DECL_ORIGINAL_TYPE (decl) = t;
40c88b94 653 DECL_ARTIFICIAL (decl) = 0;
e3edbd56 654 t = NULL_TREE;
a1ab4c31
AC
655 }
656 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
657 ;
658 else
659 t = NULL_TREE;
660
e3edbd56
EB
661 /* Propagate the name to all the anonymous variants. This is needed
662 for the type qualifiers machinery to work properly. */
a1ab4c31
AC
663 if (t)
664 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
e3edbd56 665 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
666 {
667 TYPE_NAME (t) = decl;
4708440c 668 gnat_set_type_context (t, DECL_CONTEXT (decl));
d4d05b52 669 }
a1ab4c31
AC
670 }
671}
672\f
842d4ee2
EB
673/* Create a record type that contains a SIZE bytes long field of TYPE with a
674 starting bit position so that it is aligned to ALIGN bits, and leaving at
675 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
676 record is guaranteed to get. GNAT_NODE is used for the position of the
677 associated TYPE_DECL. */
842d4ee2
EB
678
679tree
680make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 681 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
682{
683 /* We will be crafting a record type with one field at a position set to be
684 the next multiple of ALIGN past record'address + room bytes. We use a
685 record placeholder to express record'address. */
686 tree record_type = make_node (RECORD_TYPE);
687 tree record = build0 (PLACEHOLDER_EXPR, record_type);
688
689 tree record_addr_st
690 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
691
692 /* The diagram below summarizes the shape of what we manipulate:
693
694 <--------- pos ---------->
695 { +------------+-------------+-----------------+
696 record =>{ |############| ... | field (type) |
697 { +------------+-------------+-----------------+
698 |<-- room -->|<- voffset ->|<---- size ----->|
699 o o
700 | |
701 record_addr vblock_addr
702
703 Every length is in sizetype bytes there, except "pos" which has to be
704 set as a bit position in the GCC tree for the record. */
705 tree room_st = size_int (room);
706 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
707 tree voffset_st, pos, field;
708
9dba4b55 709 tree name = TYPE_IDENTIFIER (type);
842d4ee2 710
842d4ee2
EB
711 name = concat_name (name, "ALIGN");
712 TYPE_NAME (record_type) = name;
713
714 /* Compute VOFFSET and then POS. The next byte position multiple of some
715 alignment after some address is obtained by "and"ing the alignment minus
716 1 with the two's complement of the address. */
717 voffset_st = size_binop (BIT_AND_EXPR,
718 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
719 size_int ((align / BITS_PER_UNIT) - 1));
720
721 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
722 pos = size_binop (MULT_EXPR,
723 convert (bitsizetype,
724 size_binop (PLUS_EXPR, room_st, voffset_st)),
725 bitsize_unit_node);
726
727 /* Craft the GCC record representation. We exceptionally do everything
728 manually here because 1) our generic circuitry is not quite ready to
729 handle the complex position/size expressions we are setting up, 2) we
730 have a strong simplifying factor at hand: we know the maximum possible
731 value of voffset, and 3) we have to set/reset at least the sizes in
732 accordance with this maximum value anyway, as we need them to convey
733 what should be "alloc"ated for this type.
734
735 Use -1 as the 'addressable' indication for the field to prevent the
736 creation of a bitfield. We don't need one, it would have damaging
737 consequences on the alignment computation, and create_field_decl would
738 make one without this special argument, for instance because of the
739 complex position expression. */
740 field = create_field_decl (get_identifier ("F"), type, record_type, size,
741 pos, 1, -1);
742 TYPE_FIELDS (record_type) = field;
743
744 TYPE_ALIGN (record_type) = base_align;
745 TYPE_USER_ALIGN (record_type) = 1;
746
747 TYPE_SIZE (record_type)
748 = size_binop (PLUS_EXPR,
749 size_binop (MULT_EXPR, convert (bitsizetype, size),
750 bitsize_unit_node),
751 bitsize_int (align + room * BITS_PER_UNIT));
752 TYPE_SIZE_UNIT (record_type)
753 = size_binop (PLUS_EXPR, size,
754 size_int (room + align / BITS_PER_UNIT));
755
756 SET_TYPE_MODE (record_type, BLKmode);
757 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
758
759 /* Declare it now since it will never be declared otherwise. This is
760 necessary to ensure that its subtrees are properly marked. */
74746d49 761 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
762
763 return record_type;
764}
765
766/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
767 as the field type of a packed record if IN_RECORD is true, or as the
768 component type of a packed array if IN_RECORD is false. See if we can
769 rewrite it either as a type that has a non-BLKmode, which we can pack
770 tighter in the packed record case, or as a smaller type. If so, return
771 the new type. If not, return the original type. */
772
773tree
774make_packable_type (tree type, bool in_record)
775{
ae7e9ddd 776 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
842d4ee2
EB
777 unsigned HOST_WIDE_INT new_size;
778 tree new_type, old_field, field_list = NULL_TREE;
779 unsigned int align;
780
781 /* No point in doing anything if the size is zero. */
782 if (size == 0)
783 return type;
784
785 new_type = make_node (TREE_CODE (type));
786
787 /* Copy the name and flags from the old type to that of the new.
788 Note that we rely on the pointer equality created here for
789 TYPE_NAME to look through conversions in various places. */
790 TYPE_NAME (new_type) = TYPE_NAME (type);
791 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
792 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
793 if (TREE_CODE (type) == RECORD_TYPE)
794 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
795
796 /* If we are in a record and have a small size, set the alignment to
797 try for an integral mode. Otherwise set it to try for a smaller
798 type with BLKmode. */
799 if (in_record && size <= MAX_FIXED_MODE_SIZE)
800 {
801 align = ceil_pow2 (size);
802 TYPE_ALIGN (new_type) = align;
803 new_size = (size + align - 1) & -align;
804 }
805 else
806 {
807 unsigned HOST_WIDE_INT align;
808
809 /* Do not try to shrink the size if the RM size is not constant. */
810 if (TYPE_CONTAINS_TEMPLATE_P (type)
cc269bb6 811 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
842d4ee2
EB
812 return type;
813
814 /* Round the RM size up to a unit boundary to get the minimal size
815 for a BLKmode record. Give up if it's already the size. */
eb1ce453 816 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
842d4ee2
EB
817 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
818 if (new_size == size)
819 return type;
820
821 align = new_size & -new_size;
822 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
823 }
824
825 TYPE_USER_ALIGN (new_type) = 1;
826
827 /* Now copy the fields, keeping the position and size as we don't want
828 to change the layout by propagating the packedness downwards. */
829 for (old_field = TYPE_FIELDS (type); old_field;
830 old_field = DECL_CHAIN (old_field))
831 {
832 tree new_field_type = TREE_TYPE (old_field);
833 tree new_field, new_size;
834
835 if (RECORD_OR_UNION_TYPE_P (new_field_type)
836 && !TYPE_FAT_POINTER_P (new_field_type)
cc269bb6 837 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
842d4ee2
EB
838 new_field_type = make_packable_type (new_field_type, true);
839
840 /* However, for the last field in a not already packed record type
841 that is of an aggregate type, we need to use the RM size in the
842 packable version of the record type, see finish_record_type. */
843 if (!DECL_CHAIN (old_field)
844 && !TYPE_PACKED (type)
845 && RECORD_OR_UNION_TYPE_P (new_field_type)
846 && !TYPE_FAT_POINTER_P (new_field_type)
847 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
848 && TYPE_ADA_SIZE (new_field_type))
849 new_size = TYPE_ADA_SIZE (new_field_type);
850 else
851 new_size = DECL_SIZE (old_field);
852
853 new_field
854 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
855 new_size, bit_position (old_field),
856 TYPE_PACKED (type),
857 !DECL_NONADDRESSABLE_P (old_field));
858
859 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
860 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
861 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
862 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
863
864 DECL_CHAIN (new_field) = field_list;
865 field_list = new_field;
866 }
867
868 finish_record_type (new_type, nreverse (field_list), 2, false);
869 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
44e9e3ec
EB
870 if (TYPE_STUB_DECL (type))
871 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
872 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
842d4ee2
EB
873
874 /* If this is a padding record, we never want to make the size smaller
875 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
876 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
877 {
878 TYPE_SIZE (new_type) = TYPE_SIZE (type);
879 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
880 new_size = size;
881 }
882 else
883 {
884 TYPE_SIZE (new_type) = bitsize_int (new_size);
885 TYPE_SIZE_UNIT (new_type)
886 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
887 }
888
889 if (!TYPE_CONTAINS_TEMPLATE_P (type))
890 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
891
892 compute_record_mode (new_type);
893
894 /* Try harder to get a packable type if necessary, for example
895 in case the record itself contains a BLKmode field. */
896 if (in_record && TYPE_MODE (new_type) == BLKmode)
897 SET_TYPE_MODE (new_type,
898 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
899
900 /* If neither the mode nor the size has shrunk, return the old type. */
901 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
902 return type;
903
904 return new_type;
905}
906
907/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
908 If TYPE is the best type, return it. Otherwise, make a new type. We
909 only support new integral and pointer types. FOR_BIASED is true if
910 we are making a biased type. */
911
912tree
913make_type_from_size (tree type, tree size_tree, bool for_biased)
914{
915 unsigned HOST_WIDE_INT size;
916 bool biased_p;
917 tree new_type;
918
919 /* If size indicates an error, just return TYPE to avoid propagating
920 the error. Likewise if it's too large to represent. */
cc269bb6 921 if (!size_tree || !tree_fits_uhwi_p (size_tree))
842d4ee2
EB
922 return type;
923
ae7e9ddd 924 size = tree_to_uhwi (size_tree);
842d4ee2
EB
925
926 switch (TREE_CODE (type))
927 {
928 case INTEGER_TYPE:
929 case ENUMERAL_TYPE:
930 case BOOLEAN_TYPE:
931 biased_p = (TREE_CODE (type) == INTEGER_TYPE
932 && TYPE_BIASED_REPRESENTATION_P (type));
933
934 /* Integer types with precision 0 are forbidden. */
935 if (size == 0)
936 size = 1;
937
938 /* Only do something if the type isn't a packed array type and doesn't
939 already have the proper size and the size isn't too large. */
940 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
941 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
942 || size > LONG_LONG_TYPE_SIZE)
943 break;
944
945 biased_p |= for_biased;
946 if (TYPE_UNSIGNED (type) || biased_p)
947 new_type = make_unsigned_type (size);
948 else
949 new_type = make_signed_type (size);
950 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
951 SET_TYPE_RM_MIN_VALUE (new_type,
952 convert (TREE_TYPE (new_type),
953 TYPE_MIN_VALUE (type)));
954 SET_TYPE_RM_MAX_VALUE (new_type,
955 convert (TREE_TYPE (new_type),
956 TYPE_MAX_VALUE (type)));
957 /* Copy the name to show that it's essentially the same type and
958 not a subrange type. */
959 TYPE_NAME (new_type) = TYPE_NAME (type);
960 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
961 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
962 return new_type;
963
964 case RECORD_TYPE:
965 /* Do something if this is a fat pointer, in which case we
966 may need to return the thin pointer. */
967 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
968 {
969 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
970 if (!targetm.valid_pointer_mode (p_mode))
971 p_mode = ptr_mode;
972 return
973 build_pointer_type_for_mode
974 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
975 p_mode, 0);
976 }
977 break;
978
979 case POINTER_TYPE:
980 /* Only do something if this is a thin pointer, in which case we
981 may need to return the fat pointer. */
982 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
983 return
984 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
985 break;
986
987 default:
988 break;
989 }
990
991 return type;
992}
993
994/* See if the data pointed to by the hash table slot is marked. */
995
996static int
997pad_type_hash_marked_p (const void *p)
998{
999 const_tree const type = ((const struct pad_type_hash *) p)->type;
1000
1001 return ggc_marked_p (type);
1002}
1003
1004/* Return the cached hash value. */
1005
1006static hashval_t
1007pad_type_hash_hash (const void *p)
1008{
1009 return ((const struct pad_type_hash *) p)->hash;
1010}
1011
1012/* Return 1 iff the padded types are equivalent. */
1013
1014static int
1015pad_type_hash_eq (const void *p1, const void *p2)
1016{
1017 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1018 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1019 tree type1, type2;
1020
1021 if (t1->hash != t2->hash)
1022 return 0;
1023
1024 type1 = t1->type;
1025 type2 = t2->type;
1026
1027 /* We consider that the padded types are equivalent if they pad the same
1028 type and have the same size, alignment and RM size. Taking the mode
1029 into account is redundant since it is determined by the others. */
1030 return
1031 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1032 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1033 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1034 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1035}
1036
1037/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1038 if needed. We have already verified that SIZE and TYPE are large enough.
1039 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1040 IS_COMPONENT_TYPE is true if this is being done for the component type of
1041 an array. IS_USER_TYPE is true if the original type needs to be completed.
1042 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1043 the RM size of the resulting type is to be set to SIZE too. */
1044
1045tree
1046maybe_pad_type (tree type, tree size, unsigned int align,
1047 Entity_Id gnat_entity, bool is_component_type,
1048 bool is_user_type, bool definition, bool set_rm_size)
1049{
1050 tree orig_size = TYPE_SIZE (type);
44e9e3ec 1051 unsigned int orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1052 tree record, field;
1053
1054 /* If TYPE is a padded type, see if it agrees with any size and alignment
1055 we were given. If so, return the original type. Otherwise, strip
1056 off the padding, since we will either be returning the inner type
1057 or repadding it. If no size or alignment is specified, use that of
1058 the original padded type. */
1059 if (TYPE_IS_PADDING_P (type))
1060 {
1061 if ((!size
44e9e3ec
EB
1062 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1063 && (align == 0 || align == orig_align))
842d4ee2
EB
1064 return type;
1065
1066 if (!size)
44e9e3ec 1067 size = orig_size;
842d4ee2 1068 if (align == 0)
44e9e3ec 1069 align = orig_align;
842d4ee2
EB
1070
1071 type = TREE_TYPE (TYPE_FIELDS (type));
1072 orig_size = TYPE_SIZE (type);
44e9e3ec 1073 orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1074 }
1075
1076 /* If the size is either not being changed or is being made smaller (which
1077 is not done here and is only valid for bitfields anyway), show the size
1078 isn't changing. Likewise, clear the alignment if it isn't being
1079 changed. Then return if we aren't doing anything. */
1080 if (size
1081 && (operand_equal_p (size, orig_size, 0)
1082 || (TREE_CODE (orig_size) == INTEGER_CST
1083 && tree_int_cst_lt (size, orig_size))))
1084 size = NULL_TREE;
1085
44e9e3ec 1086 if (align == orig_align)
842d4ee2
EB
1087 align = 0;
1088
1089 if (align == 0 && !size)
1090 return type;
1091
1092 /* If requested, complete the original type and give it a name. */
1093 if (is_user_type)
1094 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1095 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1096 !(TYPE_NAME (type)
1097 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1098 && DECL_IGNORED_P (TYPE_NAME (type))),
1099 gnat_entity);
1100
1101 /* We used to modify the record in place in some cases, but that could
1102 generate incorrect debugging information. So make a new record
1103 type and name. */
1104 record = make_node (RECORD_TYPE);
1105 TYPE_PADDING_P (record) = 1;
1106
1107 if (Present (gnat_entity))
1108 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1109
44e9e3ec 1110 TYPE_ALIGN (record) = align ? align : orig_align;
842d4ee2
EB
1111 TYPE_SIZE (record) = size ? size : orig_size;
1112 TYPE_SIZE_UNIT (record)
1113 = convert (sizetype,
1114 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1115 bitsize_unit_node));
1116
1117 /* If we are changing the alignment and the input type is a record with
1118 BLKmode and a small constant size, try to make a form that has an
1119 integral mode. This might allow the padding record to also have an
1120 integral mode, which will be much more efficient. There is no point
1121 in doing so if a size is specified unless it is also a small constant
1122 size and it is incorrect to do so if we cannot guarantee that the mode
1123 will be naturally aligned since the field must always be addressable.
1124
1125 ??? This might not always be a win when done for a stand-alone object:
1126 since the nominal and the effective type of the object will now have
1127 different modes, a VIEW_CONVERT_EXPR will be required for converting
1128 between them and it might be hard to overcome afterwards, including
1129 at the RTL level when the stand-alone object is accessed as a whole. */
1130 if (align != 0
1131 && RECORD_OR_UNION_TYPE_P (type)
1132 && TYPE_MODE (type) == BLKmode
1133 && !TYPE_BY_REFERENCE_P (type)
1134 && TREE_CODE (orig_size) == INTEGER_CST
1135 && !TREE_OVERFLOW (orig_size)
1136 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1137 && (!size
1138 || (TREE_CODE (size) == INTEGER_CST
1139 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1140 {
1141 tree packable_type = make_packable_type (type, true);
1142 if (TYPE_MODE (packable_type) != BLKmode
1143 && align >= TYPE_ALIGN (packable_type))
1144 type = packable_type;
1145 }
1146
1147 /* Now create the field with the original size. */
1148 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1149 bitsize_zero_node, 0, 1);
1150 DECL_INTERNAL_P (field) = 1;
1151
1152 /* Do not emit debug info until after the auxiliary record is built. */
1153 finish_record_type (record, field, 1, false);
1154
1155 /* Set the RM size if requested. */
1156 if (set_rm_size)
1157 {
1158 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1159
1160 /* If the padded type is complete and has constant size, we canonicalize
1161 it by means of the hash table. This is consistent with the language
1162 semantics and ensures that gigi and the middle-end have a common view
1163 of these padded types. */
1164 if (TREE_CONSTANT (TYPE_SIZE (record)))
1165 {
1166 hashval_t hashcode;
1167 struct pad_type_hash in, *h;
1168 void **loc;
1169
1170 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1171 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1172 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1173 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1174
1175 in.hash = hashcode;
1176 in.type = record;
1177 h = (struct pad_type_hash *)
1178 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1179 if (h)
1180 {
1181 record = h->type;
1182 goto built;
1183 }
1184
766090c2 1185 h = ggc_alloc<pad_type_hash> ();
842d4ee2
EB
1186 h->hash = hashcode;
1187 h->type = record;
1188 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1189 INSERT);
1190 *loc = (void *)h;
1191 }
1192 }
1193
1194 /* Unless debugging information isn't being written for the input type,
1195 write a record that shows what we are a subtype of and also make a
1196 variable that indicates our size, if still variable. */
1197 if (TREE_CODE (orig_size) != INTEGER_CST
1198 && TYPE_NAME (record)
1199 && TYPE_NAME (type)
1200 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1201 && DECL_IGNORED_P (TYPE_NAME (type))))
1202 {
1203 tree marker = make_node (RECORD_TYPE);
9dba4b55
PC
1204 tree name = TYPE_IDENTIFIER (record);
1205 tree orig_name = TYPE_IDENTIFIER (type);
842d4ee2
EB
1206
1207 TYPE_NAME (marker) = concat_name (name, "XVS");
1208 finish_record_type (marker,
1209 create_field_decl (orig_name,
1210 build_reference_type (type),
1211 marker, NULL_TREE, NULL_TREE,
1212 0, 0),
1213 0, true);
1214
1215 add_parallel_type (record, marker);
1216
1217 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1218 TYPE_SIZE_UNIT (marker)
1219 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1220 TYPE_SIZE_UNIT (record), false, false, false,
1221 false, NULL, gnat_entity);
1222 }
1223
1224 rest_of_record_type_compilation (record);
1225
1226built:
1227 /* If the size was widened explicitly, maybe give a warning. Take the
1228 original size as the maximum size of the input if there was an
1229 unconstrained record involved and round it up to the specified alignment,
1230 if one was specified. But don't do it if we are just annotating types
1231 and the type is tagged, since tagged types aren't fully laid out in this
1232 mode. */
1233 if (!size
1234 || TREE_CODE (size) == COND_EXPR
1235 || TREE_CODE (size) == MAX_EXPR
1236 || No (gnat_entity)
1237 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1238 return record;
1239
1240 if (CONTAINS_PLACEHOLDER_P (orig_size))
1241 orig_size = max_size (orig_size, true);
1242
1243 if (align)
1244 orig_size = round_up (orig_size, align);
1245
1246 if (!operand_equal_p (size, orig_size, 0)
1247 && !(TREE_CODE (size) == INTEGER_CST
1248 && TREE_CODE (orig_size) == INTEGER_CST
1249 && (TREE_OVERFLOW (size)
1250 || TREE_OVERFLOW (orig_size)
1251 || tree_int_cst_lt (size, orig_size))))
1252 {
1253 Node_Id gnat_error_node = Empty;
1254
1255 if (Is_Packed_Array_Type (gnat_entity))
1256 gnat_entity = Original_Array_Type (gnat_entity);
1257
1258 if ((Ekind (gnat_entity) == E_Component
1259 || Ekind (gnat_entity) == E_Discriminant)
1260 && Present (Component_Clause (gnat_entity)))
1261 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1262 else if (Present (Size_Clause (gnat_entity)))
1263 gnat_error_node = Expression (Size_Clause (gnat_entity));
1264
1265 /* Generate message only for entities that come from source, since
1266 if we have an entity created by expansion, the message will be
1267 generated for some other corresponding source entity. */
1268 if (Comes_From_Source (gnat_entity))
1269 {
1270 if (Present (gnat_error_node))
1271 post_error_ne_tree ("{^ }bits of & unused?",
1272 gnat_error_node, gnat_entity,
1273 size_diffop (size, orig_size));
1274 else if (is_component_type)
1275 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1276 gnat_entity, gnat_entity,
1277 size_diffop (size, orig_size));
1278 }
1279 }
1280
1281 return record;
1282}
1283\f
1284/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1285 If this is a multi-dimensional array type, do this recursively.
1286
1287 OP may be
1288 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1289 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1290 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1291
1292void
1293relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1294{
1295 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1296 of a one-dimensional array, since the padding has the same alias set
1297 as the field type, but if it's a multi-dimensional array, we need to
1298 see the inner types. */
1299 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1300 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1301 || TYPE_PADDING_P (gnu_old_type)))
1302 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1303
1304 /* Unconstrained array types are deemed incomplete and would thus be given
1305 alias set 0. Retrieve the underlying array type. */
1306 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1307 gnu_old_type
1308 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1309 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1310 gnu_new_type
1311 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1312
1313 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1314 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1315 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1316 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1317
1318 switch (op)
1319 {
1320 case ALIAS_SET_COPY:
1321 /* The alias set shouldn't be copied between array types with different
1322 aliasing settings because this can break the aliasing relationship
1323 between the array type and its element type. */
1324#ifndef ENABLE_CHECKING
1325 if (flag_strict_aliasing)
1326#endif
1327 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1328 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1329 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1330 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1331
1332 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1333 break;
1334
1335 case ALIAS_SET_SUBSET:
1336 case ALIAS_SET_SUPERSET:
1337 {
1338 alias_set_type old_set = get_alias_set (gnu_old_type);
1339 alias_set_type new_set = get_alias_set (gnu_new_type);
1340
1341 /* Do nothing if the alias sets conflict. This ensures that we
1342 never call record_alias_subset several times for the same pair
1343 or at all for alias set 0. */
1344 if (!alias_sets_conflict_p (old_set, new_set))
1345 {
1346 if (op == ALIAS_SET_SUBSET)
1347 record_alias_subset (old_set, new_set);
1348 else
1349 record_alias_subset (new_set, old_set);
1350 }
1351 }
1352 break;
1353
1354 default:
1355 gcc_unreachable ();
1356 }
1357
1358 record_component_aliases (gnu_new_type);
1359}
1360\f
1aeb40dd
EB
1361/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1362 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
a1ab4c31
AC
1363
1364void
1aeb40dd 1365record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1366{
c172df28
AH
1367 tree type_decl = build_decl (input_location,
1368 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1369 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1370 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1371 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1372
10069d53
EB
1373 if (debug_hooks->type_decl)
1374 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1375}
1376\f
e3edbd56
EB
1377/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1378 finish constructing the record type as a fat pointer type. */
1379
1380void
1381finish_fat_pointer_type (tree record_type, tree field_list)
1382{
1383 /* Make sure we can put it into a register. */
5da8c011
EB
1384 if (STRICT_ALIGNMENT)
1385 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1386
1387 /* Show what it really is. */
1388 TYPE_FAT_POINTER_P (record_type) = 1;
1389
1390 /* Do not emit debug info for it since the types of its fields may still be
1391 incomplete at this point. */
1392 finish_record_type (record_type, field_list, 0, false);
1393
1394 /* Force type_contains_placeholder_p to return true on it. Although the
1395 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1396 type but the representation of the unconstrained array. */
1397 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1398}
1399
032d1b71 1400/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1401 finish constructing the record or union type. If REP_LEVEL is zero, this
1402 record has no representation clause and so will be entirely laid out here.
1403 If REP_LEVEL is one, this record has a representation clause and has been
1404 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1405 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1406 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1407 we need to write debug information about this type. */
a1ab4c31
AC
1408
1409void
032d1b71
EB
1410finish_record_type (tree record_type, tree field_list, int rep_level,
1411 bool debug_info_p)
a1ab4c31
AC
1412{
1413 enum tree_code code = TREE_CODE (record_type);
9dba4b55 1414 tree name = TYPE_IDENTIFIER (record_type);
a1ab4c31
AC
1415 tree ada_size = bitsize_zero_node;
1416 tree size = bitsize_zero_node;
1417 bool had_size = TYPE_SIZE (record_type) != 0;
1418 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1419 bool had_align = TYPE_ALIGN (record_type) != 0;
1420 tree field;
1421
032d1b71 1422 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1423
10069d53
EB
1424 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1425 generate debug info and have a parallel type. */
10069d53 1426 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1427
1428 /* Globally initialize the record first. If this is a rep'ed record,
1429 that just means some initializations; otherwise, layout the record. */
1430 if (rep_level > 0)
1431 {
1432 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1433
1434 if (!had_size_unit)
1435 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1436
a1ab4c31
AC
1437 if (!had_size)
1438 TYPE_SIZE (record_type) = bitsize_zero_node;
1439
1440 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1441 out just like a UNION_TYPE, since the size will be fixed. */
1442 else if (code == QUAL_UNION_TYPE)
1443 code = UNION_TYPE;
1444 }
1445 else
1446 {
1447 /* Ensure there isn't a size already set. There can be in an error
1448 case where there is a rep clause but all fields have errors and
1449 no longer have a position. */
1450 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1451
1452 /* Ensure we use the traditional GCC layout for bitfields when we need
1453 to pack the record type or have a representation clause. The other
1454 possible layout (Microsoft C compiler), if available, would prevent
1455 efficient packing in almost all cases. */
1456#ifdef TARGET_MS_BITFIELD_LAYOUT
1457 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1458 decl_attributes (&record_type,
1459 tree_cons (get_identifier ("gcc_struct"),
1460 NULL_TREE, NULL_TREE),
1461 ATTR_FLAG_TYPE_IN_PLACE);
1462#endif
1463
a1ab4c31
AC
1464 layout_type (record_type);
1465 }
1466
1467 /* At this point, the position and size of each field is known. It was
1468 either set before entry by a rep clause, or by laying out the type above.
1469
1470 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1471 to compute the Ada size; the GCC size and alignment (for rep'ed records
1472 that are not padding types); and the mode (for rep'ed records). We also
1473 clear the DECL_BIT_FIELD indication for the cases we know have not been
1474 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1475
1476 if (code == QUAL_UNION_TYPE)
032d1b71 1477 field_list = nreverse (field_list);
a1ab4c31 1478
910ad8de 1479 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1480 {
1481 tree type = TREE_TYPE (field);
1482 tree pos = bit_position (field);
1483 tree this_size = DECL_SIZE (field);
1484 tree this_ada_size;
1485
e1e5852c 1486 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1487 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1488 && !TYPE_CONTAINS_TEMPLATE_P (type)
1489 && TYPE_ADA_SIZE (type))
1490 this_ada_size = TYPE_ADA_SIZE (type);
1491 else
1492 this_ada_size = this_size;
1493
1494 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1495 if (DECL_BIT_FIELD (field)
1496 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1497 {
1498 unsigned int align = TYPE_ALIGN (type);
1499
1500 /* In the general case, type alignment is required. */
1501 if (value_factor_p (pos, align))
1502 {
1503 /* The enclosing record type must be sufficiently aligned.
1504 Otherwise, if no alignment was specified for it and it
1505 has been laid out already, bump its alignment to the
1506 desired one if this is compatible with its size. */
1507 if (TYPE_ALIGN (record_type) >= align)
1508 {
1509 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1510 DECL_BIT_FIELD (field) = 0;
1511 }
1512 else if (!had_align
1513 && rep_level == 0
1514 && value_factor_p (TYPE_SIZE (record_type), align))
1515 {
1516 TYPE_ALIGN (record_type) = align;
1517 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1518 DECL_BIT_FIELD (field) = 0;
1519 }
1520 }
1521
1522 /* In the non-strict alignment case, only byte alignment is. */
1523 if (!STRICT_ALIGNMENT
1524 && DECL_BIT_FIELD (field)
1525 && value_factor_p (pos, BITS_PER_UNIT))
1526 DECL_BIT_FIELD (field) = 0;
1527 }
1528
c1abd261
EB
1529 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1530 field is technically not addressable. Except that it can actually
1531 be addressed if it is BLKmode and happens to be properly aligned. */
1532 if (DECL_BIT_FIELD (field)
1533 && !(DECL_MODE (field) == BLKmode
1534 && value_factor_p (pos, BITS_PER_UNIT)))
1535 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1536
1537 /* A type must be as aligned as its most aligned field that is not
1538 a bit-field. But this is already enforced by layout_type. */
1539 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1540 TYPE_ALIGN (record_type)
1541 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1542
1543 switch (code)
1544 {
1545 case UNION_TYPE:
1546 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1547 size = size_binop (MAX_EXPR, size, this_size);
1548 break;
1549
1550 case QUAL_UNION_TYPE:
1551 ada_size
1552 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1553 this_ada_size, ada_size);
1554 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1555 this_size, size);
1556 break;
1557
1558 case RECORD_TYPE:
1559 /* Since we know here that all fields are sorted in order of
1560 increasing bit position, the size of the record is one
1561 higher than the ending bit of the last field processed
1562 unless we have a rep clause, since in that case we might
1563 have a field outside a QUAL_UNION_TYPE that has a higher ending
1564 position. So use a MAX in that case. Also, if this field is a
1565 QUAL_UNION_TYPE, we need to take into account the previous size in
1566 the case of empty variants. */
1567 ada_size
1568 = merge_sizes (ada_size, pos, this_ada_size,
1569 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1570 size
1571 = merge_sizes (size, pos, this_size,
1572 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1573 break;
1574
1575 default:
1576 gcc_unreachable ();
1577 }
1578 }
1579
1580 if (code == QUAL_UNION_TYPE)
032d1b71 1581 nreverse (field_list);
a1ab4c31
AC
1582
1583 if (rep_level < 2)
1584 {
1585 /* If this is a padding record, we never want to make the size smaller
1586 than what was specified in it, if any. */
315cff15 1587 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1588 size = TYPE_SIZE (record_type);
1589
1590 /* Now set any of the values we've just computed that apply. */
315cff15 1591 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1592 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1593 SET_TYPE_ADA_SIZE (record_type, ada_size);
1594
1595 if (rep_level > 0)
1596 {
1597 tree size_unit = had_size_unit
1598 ? TYPE_SIZE_UNIT (record_type)
1599 : convert (sizetype,
1600 size_binop (CEIL_DIV_EXPR, size,
1601 bitsize_unit_node));
1602 unsigned int align = TYPE_ALIGN (record_type);
1603
1604 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1605 TYPE_SIZE_UNIT (record_type)
1606 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1607
1608 compute_record_mode (record_type);
1609 }
1610 }
1611
032d1b71 1612 if (debug_info_p)
a1ab4c31
AC
1613 rest_of_record_type_compilation (record_type);
1614}
1615
a5695aa2
EB
1616/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1617
1618void
1619add_parallel_type (tree type, tree parallel_type)
1620{
1621 tree decl = TYPE_STUB_DECL (type);
1622
1623 while (DECL_PARALLEL_TYPE (decl))
1624 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1625
1626 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1627}
1628
1629/* Return true if TYPE has a parallel type. */
1630
1631static bool
1632has_parallel_type (tree type)
1633{
1634 tree decl = TYPE_STUB_DECL (type);
1635
1636 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1637}
1638
032d1b71
EB
1639/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1640 associated with it. It need not be invoked directly in most cases since
1641 finish_record_type takes care of doing so, but this can be necessary if
1642 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1643
1644void
1645rest_of_record_type_compilation (tree record_type)
1646{
a1ab4c31 1647 bool var_size = false;
fb88e1dd 1648 tree field;
a1ab4c31 1649
fb88e1dd
EB
1650 /* If this is a padded type, the bulk of the debug info has already been
1651 generated for the field's type. */
1652 if (TYPE_IS_PADDING_P (record_type))
1653 return;
1654
a5695aa2
EB
1655 /* If the type already has a parallel type (XVS type), then we're done. */
1656 if (has_parallel_type (record_type))
1657 return;
1658
fb88e1dd 1659 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1660 {
1661 /* We need to make an XVE/XVU record if any field has variable size,
1662 whether or not the record does. For example, if we have a union,
1663 it may be that all fields, rounded up to the alignment, have the
1664 same size, in which case we'll use that size. But the debug
1665 output routines (except Dwarf2) won't be able to output the fields,
1666 so we need to make the special record. */
1667 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1668 /* If a field has a non-constant qualifier, the record will have
1669 variable size too. */
fb88e1dd 1670 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1671 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1672 {
1673 var_size = true;
1674 break;
1675 }
1676 }
1677
fb88e1dd
EB
1678 /* If this record type is of variable size, make a parallel record type that
1679 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1680 if (var_size)
a1ab4c31
AC
1681 {
1682 tree new_record_type
1683 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1684 ? UNION_TYPE : TREE_CODE (record_type));
9dba4b55 1685 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
a1ab4c31 1686 tree last_pos = bitsize_zero_node;
0fb2335d 1687 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1688
0fb2335d
EB
1689 new_name
1690 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1691 ? "XVU" : "XVE");
1692 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1693 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1694 TYPE_STUB_DECL (new_record_type)
0fb2335d 1695 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1696 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1697 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1698 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1699 TYPE_SIZE_UNIT (new_record_type)
1700 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1701
5c475ba9
EB
1702 /* Now scan all the fields, replacing each field with a new field
1703 corresponding to the new encoding. */
a1ab4c31 1704 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1705 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1706 {
1707 tree field_type = TREE_TYPE (old_field);
1708 tree field_name = DECL_NAME (old_field);
a1ab4c31 1709 tree curpos = bit_position (old_field);
5c475ba9 1710 tree pos, new_field;
a1ab4c31
AC
1711 bool var = false;
1712 unsigned int align = 0;
a1ab4c31 1713
5c475ba9
EB
1714 /* We're going to do some pattern matching below so remove as many
1715 conversions as possible. */
1716 curpos = remove_conversions (curpos, true);
a1ab4c31 1717
5c475ba9 1718 /* See how the position was modified from the last position.
a1ab4c31 1719
5c475ba9
EB
1720 There are two basic cases we support: a value was added
1721 to the last position or the last position was rounded to
1722 a boundary and they something was added. Check for the
1723 first case first. If not, see if there is any evidence
1724 of rounding. If so, round the last position and retry.
a1ab4c31 1725
5c475ba9 1726 If this is a union, the position can be taken as zero. */
a1ab4c31 1727 if (TREE_CODE (new_record_type) == UNION_TYPE)
5c475ba9 1728 pos = bitsize_zero_node;
a1ab4c31
AC
1729 else
1730 pos = compute_related_constant (curpos, last_pos);
1731
5c475ba9
EB
1732 if (!pos
1733 && TREE_CODE (curpos) == MULT_EXPR
cc269bb6 1734 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
a1ab4c31
AC
1735 {
1736 tree offset = TREE_OPERAND (curpos, 0);
ae7e9ddd 1737 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
5c475ba9
EB
1738 align = scale_by_factor_of (offset, align);
1739 last_pos = round_up (last_pos, align);
1740 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1741 }
5c475ba9
EB
1742 else if (!pos
1743 && TREE_CODE (curpos) == PLUS_EXPR
cc269bb6 1744 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
a1ab4c31 1745 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
5a36c51b
RS
1746 && tree_fits_uhwi_p
1747 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
a1ab4c31 1748 {
5c475ba9
EB
1749 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1750 unsigned HOST_WIDE_INT addend
ae7e9ddd 1751 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
a1ab4c31 1752 align
ae7e9ddd 1753 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
5c475ba9
EB
1754 align = scale_by_factor_of (offset, align);
1755 align = MIN (align, addend & -addend);
1756 last_pos = round_up (last_pos, align);
1757 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1758 }
5c475ba9 1759 else if (potential_alignment_gap (prev_old_field, old_field, pos))
a1ab4c31
AC
1760 {
1761 align = TYPE_ALIGN (field_type);
5c475ba9
EB
1762 last_pos = round_up (last_pos, align);
1763 pos = compute_related_constant (curpos, last_pos);
a1ab4c31
AC
1764 }
1765
1766 /* If we can't compute a position, set it to zero.
1767
5c475ba9
EB
1768 ??? We really should abort here, but it's too much work
1769 to get this correct for all cases. */
a1ab4c31
AC
1770 if (!pos)
1771 pos = bitsize_zero_node;
1772
1773 /* See if this type is variable-sized and make a pointer type
1774 and indicate the indirection if so. Beware that the debug
1775 back-end may adjust the position computed above according
1776 to the alignment of the field type, i.e. the pointer type
1777 in this case, if we don't preventively counter that. */
1778 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1779 {
1780 field_type = build_pointer_type (field_type);
1781 if (align != 0 && TYPE_ALIGN (field_type) > align)
1782 {
1783 field_type = copy_node (field_type);
1784 TYPE_ALIGN (field_type) = align;
1785 }
1786 var = true;
1787 }
1788
1789 /* Make a new field name, if necessary. */
1790 if (var || align != 0)
1791 {
1792 char suffix[16];
1793
1794 if (align != 0)
1795 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1796 align / BITS_PER_UNIT);
1797 else
1798 strcpy (suffix, "XVL");
1799
0fb2335d 1800 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
1801 }
1802
da01bfee
EB
1803 new_field
1804 = create_field_decl (field_name, field_type, new_record_type,
1805 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 1806 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
1807 TYPE_FIELDS (new_record_type) = new_field;
1808
1809 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1810 zero. The only time it's not the last field of the record
1811 is when there are other components at fixed positions after
1812 it (meaning there was a rep clause for every field) and we
1813 want to be able to encode them. */
1814 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1815 (TREE_CODE (TREE_TYPE (old_field))
1816 == QUAL_UNION_TYPE)
1817 ? bitsize_zero_node
1818 : DECL_SIZE (old_field));
1819 prev_old_field = old_field;
1820 }
1821
fb88e1dd 1822 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 1823
a5695aa2 1824 add_parallel_type (record_type, new_record_type);
a1ab4c31 1825 }
a1ab4c31
AC
1826}
1827
a1ab4c31 1828/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
1829 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1830 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1831 replace a value of zero with the old size. If HAS_REP is true, we take the
1832 MAX of the end position of this field with LAST_SIZE. In all other cases,
1833 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1834
1835static tree
1836merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1837 bool has_rep)
1838{
1839 tree type = TREE_TYPE (last_size);
c6bd4220 1840 tree new_size;
a1ab4c31
AC
1841
1842 if (!special || TREE_CODE (size) != COND_EXPR)
1843 {
c6bd4220 1844 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 1845 if (has_rep)
c6bd4220 1846 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
1847 }
1848
1849 else
c6bd4220
EB
1850 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1851 integer_zerop (TREE_OPERAND (size, 1))
1852 ? last_size : merge_sizes (last_size, first_bit,
1853 TREE_OPERAND (size, 1),
1854 1, has_rep),
1855 integer_zerop (TREE_OPERAND (size, 2))
1856 ? last_size : merge_sizes (last_size, first_bit,
1857 TREE_OPERAND (size, 2),
1858 1, has_rep));
a1ab4c31
AC
1859
1860 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1861 when fed through substitute_in_expr) into thinking that a constant
1862 size is not constant. */
c6bd4220
EB
1863 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1864 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 1865
c6bd4220 1866 return new_size;
a1ab4c31
AC
1867}
1868
1869/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1870 related by the addition of a constant. Return that constant if so. */
1871
1872static tree
1873compute_related_constant (tree op0, tree op1)
1874{
1875 tree op0_var, op1_var;
1876 tree op0_con = split_plus (op0, &op0_var);
1877 tree op1_con = split_plus (op1, &op1_var);
1878 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1879
1880 if (operand_equal_p (op0_var, op1_var, 0))
1881 return result;
1882 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1883 return result;
1884 else
1885 return 0;
1886}
1887
1888/* Utility function of above to split a tree OP which may be a sum, into a
1889 constant part, which is returned, and a variable part, which is stored
1890 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1891 bitsizetype. */
1892
1893static tree
1894split_plus (tree in, tree *pvar)
1895{
722356ce
EB
1896 /* Strip conversions in order to ease the tree traversal and maximize the
1897 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
1898 to always return and set *pvar to bitsizetype trees, but it's worth
1899 the effort. */
722356ce 1900 in = remove_conversions (in, false);
a1ab4c31
AC
1901
1902 *pvar = convert (bitsizetype, in);
1903
1904 if (TREE_CODE (in) == INTEGER_CST)
1905 {
1906 *pvar = bitsize_zero_node;
1907 return convert (bitsizetype, in);
1908 }
1909 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1910 {
1911 tree lhs_var, rhs_var;
1912 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1913 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1914
1915 if (lhs_var == TREE_OPERAND (in, 0)
1916 && rhs_var == TREE_OPERAND (in, 1))
1917 return bitsize_zero_node;
1918
1919 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1920 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1921 }
1922 else
1923 return bitsize_zero_node;
1924}
1925\f
d47d0a8d
EB
1926/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1927 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1928 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1929 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1930 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1931 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1932 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1933 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1934 invisible reference. */
a1ab4c31
AC
1935
1936tree
1937create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
1938 bool return_unconstrained_p, bool return_by_direct_ref_p,
1939 bool return_by_invisi_ref_p)
a1ab4c31 1940{
e5b00edf
NF
1941 /* A list of the data type nodes of the subprogram formal parameters.
1942 This list is generated by traversing the input list of PARM_DECL
1943 nodes. */
9771b263 1944 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 1945 tree t, type;
a1ab4c31 1946
910ad8de 1947 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 1948 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 1949
e5b00edf 1950 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 1951
d47d0a8d
EB
1952 /* TYPE may have been shared since GCC hashes types. If it has a different
1953 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
1954 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1955 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
1956 {
1957 type = copy_type (type);
1958 TYPE_CI_CO_LIST (type) = cico_list;
1959 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1960 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1961 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1962 }
a1ab4c31 1963
a1ab4c31
AC
1964 return type;
1965}
1966\f
1967/* Return a copy of TYPE but safe to modify in any way. */
1968
1969tree
1970copy_type (tree type)
1971{
c6bd4220 1972 tree new_type = copy_node (type);
a1ab4c31 1973
90dcfecb
EB
1974 /* Unshare the language-specific data. */
1975 if (TYPE_LANG_SPECIFIC (type))
1976 {
1977 TYPE_LANG_SPECIFIC (new_type) = NULL;
1978 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1979 }
1980
1981 /* And the contents of the language-specific slot if needed. */
1982 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1983 && TYPE_RM_VALUES (type))
1984 {
1985 TYPE_RM_VALUES (new_type) = NULL_TREE;
1986 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1987 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1988 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1989 }
1990
a1ab4c31
AC
1991 /* copy_node clears this field instead of copying it, because it is
1992 aliased with TREE_CHAIN. */
c6bd4220 1993 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 1994
c6bd4220
EB
1995 TYPE_POINTER_TO (new_type) = 0;
1996 TYPE_REFERENCE_TO (new_type) = 0;
1997 TYPE_MAIN_VARIANT (new_type) = new_type;
1998 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 1999
c6bd4220 2000 return new_type;
a1ab4c31
AC
2001}
2002\f
c1abd261
EB
2003/* Return a subtype of sizetype with range MIN to MAX and whose
2004 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2005 of the associated TYPE_DECL. */
a1ab4c31
AC
2006
2007tree
2008create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2009{
2010 /* First build a type for the desired range. */
523e82a7 2011 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2012
523e82a7 2013 /* Then set the index type. */
a1ab4c31 2014 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2015 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2016
a1ab4c31
AC
2017 return type;
2018}
84fb43a1
EB
2019
2020/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2021 sizetype is used. */
2022
2023tree
2024create_range_type (tree type, tree min, tree max)
2025{
2026 tree range_type;
2027
2028 if (type == NULL_TREE)
2029 type = sizetype;
2030
2031 /* First build a type with the base range. */
523e82a7
EB
2032 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2033 TYPE_MAX_VALUE (type));
84fb43a1
EB
2034
2035 /* Then set the actual range. */
523e82a7
EB
2036 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2037 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
84fb43a1
EB
2038
2039 return range_type;
2040}
a1ab4c31 2041\f
10069d53
EB
2042/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2043 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2044 its data type. */
2045
2046tree
2047create_type_stub_decl (tree type_name, tree type)
2048{
2049 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2050 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2051 emitted in DWARF. */
44e9e3ec 2052 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
10069d53 2053 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2054 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2055 return type_decl;
2056}
2057
2058/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2059 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2060 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2061 true if we need to write debug information about this type. GNAT_NODE
2062 is used for the position of the decl. */
a1ab4c31
AC
2063
2064tree
74746d49
EB
2065create_type_decl (tree type_name, tree type, bool artificial_p,
2066 bool debug_info_p, Node_Id gnat_node)
a1ab4c31 2067{
a1ab4c31 2068 enum tree_code code = TREE_CODE (type);
10069d53
EB
2069 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2070 tree type_decl;
a1ab4c31 2071
10069d53
EB
2072 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2073 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2074
10069d53
EB
2075 /* If the type hasn't been named yet, we're naming it; preserve an existing
2076 TYPE_STUB_DECL that has been attached to it for some purpose. */
2077 if (!named && TYPE_STUB_DECL (type))
2078 {
2079 type_decl = TYPE_STUB_DECL (type);
2080 DECL_NAME (type_decl) = type_name;
2081 }
2082 else
74746d49 2083 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
a1ab4c31 2084
10069d53 2085 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2086 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2087
2088 /* Add this decl to the current binding level. */
10069d53 2089 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2090
10069d53
EB
2091 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2092 This causes the name to be also viewed as a "tag" by the debug
2093 back-end, with the advantage that no DW_TAG_typedef is emitted
2094 for artificial "tagged" types in DWARF. */
2095 if (!named)
2096 TYPE_STUB_DECL (type) = type_decl;
2097
50741117
EB
2098 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2099 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2100 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2101 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2102
2103 return type_decl;
2104}
10069d53 2105\f
a1ab4c31
AC
2106/* Return a VAR_DECL or CONST_DECL node.
2107
2108 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2109 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2110 the GCC tree for an optional initial expression; NULL_TREE if none.
2111
2112 CONST_FLAG is true if this variable is constant, in which case we might
2113 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2114
2115 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2116 definition to be made visible outside of the current compilation unit, for
2117 instance variable definitions in a package specification.
2118
1e17ef87 2119 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2120 opposed to a definition: no storage is to be allocated for the variable).
2121
2122 STATIC_FLAG is only relevant when not at top level. In that case
2123 it indicates whether to always allocate storage to the variable.
2124
2125 GNAT_NODE is used for the position of the decl. */
2126
2127tree
2128create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2129 bool const_flag, bool public_flag, bool extern_flag,
2130 bool static_flag, bool const_decl_allowed_p,
2131 struct attrib *attr_list, Node_Id gnat_node)
2132{
50179d58
EB
2133 /* Whether the initializer is a constant initializer. At the global level
2134 or for an external object or an object to be allocated in static memory,
2135 we check that it is a valid constant expression for use in initializing
2136 a static variable; otherwise, we only check that it is constant. */
a1ab4c31
AC
2137 bool init_const
2138 = (var_init != 0
2139 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
50179d58 2140 && (global_bindings_p () || extern_flag || static_flag
a1ab4c31
AC
2141 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2142 : TREE_CONSTANT (var_init)));
2143
2144 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2145 case the initializer may be used in-lieu of the DECL node (as done in
2146 Identifier_to_gnu). This is useful to prevent the need of elaboration
2147 code when an identifier for which such a decl is made is in turn used as
2148 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2149 but extra constraints apply to this choice (see below) and are not
2150 relevant to the distinction we wish to make. */
2151 bool constant_p = const_flag && init_const;
2152
2153 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2154 and may be used for scalars in general but not for aggregates. */
2155 tree var_decl
c172df28
AH
2156 = build_decl (input_location,
2157 (constant_p && const_decl_allowed_p
a1ab4c31
AC
2158 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2159 var_name, type);
2160
2161 /* If this is external, throw away any initializations (they will be done
2162 elsewhere) unless this is a constant for which we would like to remain
2163 able to get the initializer. If we are defining a global here, leave a
2164 constant initialization and save any variable elaborations for the
2165 elaboration routine. If we are just annotating types, throw away the
2166 initialization if it isn't a constant. */
2167 if ((extern_flag && !constant_p)
2168 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2169 var_init = NULL_TREE;
2170
2171 /* At the global level, an initializer requiring code to be generated
2172 produces elaboration statements. Check that such statements are allowed,
2173 that is, not violating a No_Elaboration_Code restriction. */
3b9e8343 2174 if (global_bindings_p () && var_init != 0 && !init_const)
a1ab4c31 2175 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2176
8b7b0c36
JH
2177 DECL_INITIAL (var_decl) = var_init;
2178 TREE_READONLY (var_decl) = const_flag;
2179 DECL_EXTERNAL (var_decl) = extern_flag;
2180 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2181 TREE_CONSTANT (var_decl) = constant_p;
2182 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2183 = TYPE_VOLATILE (type);
a1ab4c31
AC
2184
2185 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2186 try to fiddle with DECL_COMMON. However, on platforms that don't
2187 support global BSS sections, uninitialized global variables would
2188 go in DATA instead, thus increasing the size of the executable. */
2189 if (!flag_no_common
2190 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2191 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2192 && !have_global_bss_p ())
2193 DECL_COMMON (var_decl) = 1;
a1ab4c31 2194
2231f17f
EB
2195 /* At the global binding level, we need to allocate static storage for the
2196 variable if it isn't external. Otherwise, we allocate automatic storage
2197 unless requested not to. */
a1ab4c31 2198 TREE_STATIC (var_decl)
2231f17f 2199 = !extern_flag && (static_flag || global_bindings_p ());
a1ab4c31 2200
5225a138
EB
2201 /* For an external constant whose initializer is not absolute, do not emit
2202 debug info. In DWARF this would mean a global relocation in a read-only
c01fe451 2203 section which runs afoul of the PE-COFF run-time relocation mechanism. */
5225a138
EB
2204 if (extern_flag
2205 && constant_p
50179d58 2206 && var_init
5225a138
EB
2207 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2208 != null_pointer_node)
2209 DECL_IGNORED_P (var_decl) = 1;
2210
a1ab4c31
AC
2211 if (TREE_SIDE_EFFECTS (var_decl))
2212 TREE_ADDRESSABLE (var_decl) = 1;
2213
74746d49
EB
2214 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2215 if (TREE_CODE (var_decl) == VAR_DECL)
2216 process_attributes (&var_decl, &attr_list, true, gnat_node);
2217
2218 /* Add this decl to the current binding level. */
2219 gnat_pushdecl (var_decl, gnat_node);
2220
58c8f770 2221 if (TREE_CODE (var_decl) == VAR_DECL)
a1ab4c31 2222 {
58c8f770
EB
2223 if (asm_name)
2224 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
74746d49 2225
a1ab4c31
AC
2226 if (global_bindings_p ())
2227 rest_of_decl_compilation (var_decl, true, 0);
2228 }
a1ab4c31
AC
2229
2230 return var_decl;
2231}
2232\f
2233/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2234
2235static bool
2236aggregate_type_contains_array_p (tree type)
2237{
2238 switch (TREE_CODE (type))
2239 {
2240 case RECORD_TYPE:
2241 case UNION_TYPE:
2242 case QUAL_UNION_TYPE:
2243 {
2244 tree field;
910ad8de 2245 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2246 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2247 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2248 return true;
2249 return false;
2250 }
2251
2252 case ARRAY_TYPE:
2253 return true;
2254
2255 default:
2256 gcc_unreachable ();
2257 }
2258}
2259
62f9f3ce 2260/* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
da01bfee
EB
2261 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2262 nonzero, it is the specified size of the field. If POS is nonzero, it is
2263 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2264 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2265 means we are allowed to take the address of the field; if it is negative,
2266 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2267
2268tree
2269create_field_decl (tree field_name, tree field_type, tree record_type,
da01bfee 2270 tree size, tree pos, int packed, int addressable)
a1ab4c31 2271{
c172df28
AH
2272 tree field_decl = build_decl (input_location,
2273 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
2274
2275 DECL_CONTEXT (field_decl) = record_type;
2276 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2277
2278 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2279 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2280 Likewise for an aggregate without specified position that contains an
2281 array, because in this case slices of variable length of this array
2282 must be handled by GCC and variable-sized objects need to be aligned
2283 to at least a byte boundary. */
2284 if (packed && (TYPE_MODE (field_type) == BLKmode
2285 || (!pos
2286 && AGGREGATE_TYPE_P (field_type)
2287 && aggregate_type_contains_array_p (field_type))))
2288 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2289
2290 /* If a size is specified, use it. Otherwise, if the record type is packed
2291 compute a size to use, which may differ from the object's natural size.
2292 We always set a size in this case to trigger the checks for bitfield
2293 creation below, which is typically required when no position has been
2294 specified. */
2295 if (size)
2296 size = convert (bitsizetype, size);
2297 else if (packed == 1)
2298 {
2299 size = rm_size (field_type);
62f9f3ce
EB
2300 if (TYPE_MODE (field_type) == BLKmode)
2301 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2302 }
2303
2304 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2305 specified for two reasons: first if the size differs from the natural
2306 size. Second, if the alignment is insufficient. There are a number of
2307 ways the latter can be true.
2308
2309 We never make a bitfield if the type of the field has a nonconstant size,
2310 because no such entity requiring bitfield operations should reach here.
2311
2312 We do *preventively* make a bitfield when there might be the need for it
2313 but we don't have all the necessary information to decide, as is the case
2314 of a field with no specified position in a packed record.
2315
2316 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2317 in layout_decl or finish_record_type to clear the bit_field indication if
2318 it is in fact not needed. */
2319 if (addressable >= 0
2320 && size
2321 && TREE_CODE (size) == INTEGER_CST
2322 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2323 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2324 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2325 || packed
2326 || (TYPE_ALIGN (record_type) != 0
2327 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2328 {
2329 DECL_BIT_FIELD (field_decl) = 1;
2330 DECL_SIZE (field_decl) = size;
2331 if (!packed && !pos)
feec4372
EB
2332 {
2333 if (TYPE_ALIGN (record_type) != 0
2334 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2335 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2336 else
2337 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2338 }
a1ab4c31
AC
2339 }
2340
2341 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2342
2343 /* Bump the alignment if need be, either for bitfield/packing purposes or
2344 to satisfy the type requirements if no such consideration applies. When
2345 we get the alignment from the type, indicate if this is from an explicit
2346 user request, which prevents stor-layout from lowering it later on. */
2347 {
d9223014 2348 unsigned int bit_align
a1ab4c31
AC
2349 = (DECL_BIT_FIELD (field_decl) ? 1
2350 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2351
2352 if (bit_align > DECL_ALIGN (field_decl))
2353 DECL_ALIGN (field_decl) = bit_align;
2354 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2355 {
2356 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2357 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2358 }
2359 }
2360
2361 if (pos)
2362 {
2363 /* We need to pass in the alignment the DECL is known to have.
2364 This is the lowest-order bit set in POS, but no more than
2365 the alignment of the record, if one is specified. Note
2366 that an alignment of 0 is taken as infinite. */
2367 unsigned int known_align;
2368
cc269bb6 2369 if (tree_fits_uhwi_p (pos))
ae7e9ddd 2370 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
a1ab4c31
AC
2371 else
2372 known_align = BITS_PER_UNIT;
2373
2374 if (TYPE_ALIGN (record_type)
2375 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2376 known_align = TYPE_ALIGN (record_type);
2377
2378 layout_decl (field_decl, known_align);
2379 SET_DECL_OFFSET_ALIGN (field_decl,
cc269bb6 2380 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
a1ab4c31
AC
2381 : BITS_PER_UNIT);
2382 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2383 &DECL_FIELD_BIT_OFFSET (field_decl),
2384 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2385 }
2386
2387 /* In addition to what our caller says, claim the field is addressable if we
2388 know that its type is not suitable.
2389
2390 The field may also be "technically" nonaddressable, meaning that even if
2391 we attempt to take the field's address we will actually get the address
2392 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2393 value we have at this point is not accurate enough, so we don't account
2394 for this here and let finish_record_type decide. */
4c5a0615 2395 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
2396 addressable = 1;
2397
2398 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2399
2400 return field_decl;
2401}
2402\f
a8e05f92
EB
2403/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2404 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2405 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2406
2407tree
2408create_param_decl (tree param_name, tree param_type, bool readonly)
2409{
c172df28
AH
2410 tree param_decl = build_decl (input_location,
2411 PARM_DECL, param_name, param_type);
a1ab4c31 2412
a8e05f92
EB
2413 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2414 can lead to various ABI violations. */
2415 if (targetm.calls.promote_prototypes (NULL_TREE)
2416 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
2417 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2418 {
2419 /* We have to be careful about biased types here. Make a subtype
2420 of integer_type_node with the proper biasing. */
2421 if (TREE_CODE (param_type) == INTEGER_TYPE
2422 && TYPE_BIASED_REPRESENTATION_P (param_type))
2423 {
84fb43a1
EB
2424 tree subtype
2425 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2426 TREE_TYPE (subtype) = integer_type_node;
2427 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
2428 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2429 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 2430 param_type = subtype;
a1ab4c31
AC
2431 }
2432 else
2433 param_type = integer_type_node;
2434 }
2435
2436 DECL_ARG_TYPE (param_decl) = param_type;
2437 TREE_READONLY (param_decl) = readonly;
2438 return param_decl;
2439}
2440\f
74746d49
EB
2441/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2442 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2443 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2444
74746d49
EB
2445void
2446process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2447 Node_Id gnat_node)
a1ab4c31 2448{
74746d49
EB
2449 struct attrib *attr;
2450
2451 for (attr = *attr_list; attr; attr = attr->next)
2452 switch (attr->type)
a1ab4c31
AC
2453 {
2454 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2455 Sloc_to_locus (Sloc (gnat_node), &input_location);
2456 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2457 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2458 break;
2459
2460 case ATTR_LINK_ALIAS:
74746d49 2461 if (!DECL_EXTERNAL (*node))
a1ab4c31 2462 {
74746d49
EB
2463 TREE_STATIC (*node) = 1;
2464 assemble_alias (*node, attr->name);
a1ab4c31
AC
2465 }
2466 break;
2467
2468 case ATTR_WEAK_EXTERNAL:
2469 if (SUPPORTS_WEAK)
74746d49 2470 declare_weak (*node);
a1ab4c31
AC
2471 else
2472 post_error ("?weak declarations not supported on this target",
74746d49 2473 attr->error_point);
a1ab4c31
AC
2474 break;
2475
2476 case ATTR_LINK_SECTION:
677f3fa8 2477 if (targetm_common.have_named_sections)
a1ab4c31 2478 {
74746d49
EB
2479 DECL_SECTION_NAME (*node)
2480 = build_string (IDENTIFIER_LENGTH (attr->name),
2481 IDENTIFIER_POINTER (attr->name));
2482 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2483 }
2484 else
2485 post_error ("?section attributes are not supported for this target",
74746d49 2486 attr->error_point);
a1ab4c31
AC
2487 break;
2488
2489 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2490 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2491 TREE_USED (*node) = 1;
a1ab4c31
AC
2492 break;
2493
2494 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2495 DECL_STATIC_DESTRUCTOR (*node) = 1;
2496 TREE_USED (*node) = 1;
a1ab4c31 2497 break;
40a14772
TG
2498
2499 case ATTR_THREAD_LOCAL_STORAGE:
74746d49
EB
2500 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2501 DECL_COMMON (*node) = 0;
40a14772 2502 break;
a1ab4c31 2503 }
74746d49
EB
2504
2505 *attr_list = NULL;
a1ab4c31
AC
2506}
2507\f
feec4372 2508/* Record DECL as a global renaming pointer. */
a1ab4c31
AC
2509
2510void
2511record_global_renaming_pointer (tree decl)
2512{
15bf7d19 2513 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
9771b263 2514 vec_safe_push (global_renaming_pointers, decl);
a1ab4c31
AC
2515}
2516
e297e2ea
EB
2517/* Invalidate the global renaming pointers that are not constant, lest their
2518 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2519 that we should not blindly invalidate everything here because of the need
2520 to propagate constant values through renaming. */
a1ab4c31
AC
2521
2522void
2523invalidate_global_renaming_pointers (void)
2524{
2525 unsigned int i;
2526 tree iter;
2527
9771b263
DN
2528 if (global_renaming_pointers == NULL)
2529 return;
2530
2531 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
e297e2ea
EB
2532 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2533 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
a1ab4c31 2534
9771b263 2535 vec_free (global_renaming_pointers);
a1ab4c31
AC
2536}
2537
2538/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2539 a power of 2. */
2540
2541bool
2542value_factor_p (tree value, HOST_WIDE_INT factor)
2543{
cc269bb6 2544 if (tree_fits_uhwi_p (value))
ae7e9ddd 2545 return tree_to_uhwi (value) % factor == 0;
a1ab4c31
AC
2546
2547 if (TREE_CODE (value) == MULT_EXPR)
2548 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2549 || value_factor_p (TREE_OPERAND (value, 1), factor));
2550
2551 return false;
2552}
2553
5c475ba9
EB
2554/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2555
2556static unsigned int
2557scale_by_factor_of (tree expr, unsigned int value)
2558{
2559 expr = remove_conversions (expr, true);
2560
2561 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2562 corresponding to the number of trailing zeros of the mask. */
2563 if (TREE_CODE (expr) == BIT_AND_EXPR
2564 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2565 {
2566 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2567 unsigned int i = 0;
2568
2569 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2570 {
2571 mask >>= 1;
2572 value *= 2;
2573 i++;
2574 }
2575 }
2576
2577 return value;
2578}
2579
7d7fcb08 2580/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2581 unless we can prove these 2 fields are laid out in such a way that no gap
2582 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2583 is the distance in bits between the end of PREV_FIELD and the starting
2584 position of CURR_FIELD. It is ignored if null. */
2585
2586static bool
2587potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2588{
2589 /* If this is the first field of the record, there cannot be any gap */
2590 if (!prev_field)
2591 return false;
2592
78df6221 2593 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
2594 time when such a field is not the last field of the record is when
2595 there are other components at fixed positions after it (meaning there
2596 was a rep clause for every field), in which case we don't want the
2597 alignment constraint to override them. */
2598 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2599 return false;
2600
2601 /* If the distance between the end of prev_field and the beginning of
2602 curr_field is constant, then there is a gap if the value of this
2603 constant is not null. */
cc269bb6 2604 if (offset && tree_fits_uhwi_p (offset))
a1ab4c31
AC
2605 return !integer_zerop (offset);
2606
2607 /* If the size and position of the previous field are constant,
2608 then check the sum of this size and position. There will be a gap
2609 iff it is not multiple of the current field alignment. */
cc269bb6
RS
2610 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2611 && tree_fits_uhwi_p (bit_position (prev_field)))
ae7e9ddd
RS
2612 return ((tree_to_uhwi (bit_position (prev_field))
2613 + tree_to_uhwi (DECL_SIZE (prev_field)))
a1ab4c31
AC
2614 % DECL_ALIGN (curr_field) != 0);
2615
2616 /* If both the position and size of the previous field are multiples
2617 of the current field alignment, there cannot be any gap. */
2618 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2619 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2620 return false;
2621
2622 /* Fallback, return that there may be a potential gap */
2623 return true;
2624}
2625
88a94e2b
EB
2626/* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2627 of the decl. */
a1ab4c31
AC
2628
2629tree
88a94e2b 2630create_label_decl (tree label_name, Node_Id gnat_node)
a1ab4c31 2631{
88a94e2b
EB
2632 tree label_decl
2633 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
a1ab4c31 2634
88a94e2b
EB
2635 DECL_MODE (label_decl) = VOIDmode;
2636
2637 /* Add this decl to the current binding level. */
2638 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
2639
2640 return label_decl;
2641}
2642\f
7d7fcb08 2643/* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
a1ab4c31
AC
2644 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2645 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
7d76717d 2646 PARM_DECL nodes chained through the DECL_CHAIN field).
a1ab4c31 2647
0e24192c 2648 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
7d7fcb08
EB
2649 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2650 used for the position of the decl. */
a1ab4c31
AC
2651
2652tree
7d7fcb08 2653create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
0e24192c
EB
2654 tree param_decl_list, enum inline_status_t inline_status,
2655 bool public_flag, bool extern_flag, bool artificial_flag,
7d7fcb08 2656 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 2657{
d47d0a8d
EB
2658 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2659 subprog_type);
2660 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2661 TREE_TYPE (subprog_type));
7d7fcb08 2662 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 2663
d84b344a
EB
2664 /* If this is a non-inline function nested inside an inlined external
2665 function, we cannot honor both requests without cloning the nested
2666 function in the current unit since it is private to the other unit.
2667 We could inline the nested function as well but it's probably better
2668 to err on the side of too little inlining. */
0e24192c 2669 if (inline_status != is_enabled
5daed84a 2670 && !public_flag
d84b344a
EB
2671 && current_function_decl
2672 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 2673 && DECL_EXTERNAL (current_function_decl))
d84b344a 2674 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31 2675
7d7fcb08
EB
2676 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2677 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
2678
2679 switch (inline_status)
2680 {
2681 case is_suppressed:
2682 DECL_UNINLINABLE (subprog_decl) = 1;
2683 break;
2684
2685 case is_disabled:
2686 break;
2687
2688 case is_enabled:
2689 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2690 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2691 break;
2692
2693 default:
2694 gcc_unreachable ();
2695 }
7d7fcb08
EB
2696
2697 TREE_PUBLIC (subprog_decl) = public_flag;
2698 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
a1ab4c31
AC
2699 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2700 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
a1ab4c31 2701
d47d0a8d
EB
2702 DECL_ARTIFICIAL (result_decl) = 1;
2703 DECL_IGNORED_P (result_decl) = 1;
2704 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2705 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 2706
a1ab4c31
AC
2707 if (asm_name)
2708 {
2709 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2710
2711 /* The expand_main_function circuitry expects "main_identifier_node" to
2712 designate the DECL_NAME of the 'main' entry point, in turn expected
2713 to be declared as the "main" function literally by default. Ada
2714 program entry points are typically declared with a different name
2715 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 2716 system expectations. Force main_identifier_node in this case. */
a1ab4c31 2717 if (asm_name == main_identifier_node)
cfbb663c 2718 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
2719 }
2720
74746d49
EB
2721 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2722
a1ab4c31
AC
2723 /* Add this decl to the current binding level. */
2724 gnat_pushdecl (subprog_decl, gnat_node);
2725
2726 /* Output the assembler code and/or RTL for the declaration. */
2727 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2728
2729 return subprog_decl;
2730}
2731\f
2732/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2733 body. This routine needs to be invoked before processing the declarations
2734 appearing in the subprogram. */
2735
2736void
2737begin_subprog_body (tree subprog_decl)
2738{
2739 tree param_decl;
2740
a1ab4c31
AC
2741 announce_function (subprog_decl);
2742
0ae44446
JR
2743 /* This function is being defined. */
2744 TREE_STATIC (subprog_decl) = 1;
2745
58c8f770
EB
2746 current_function_decl = subprog_decl;
2747
a1ab4c31
AC
2748 /* Enter a new binding level and show that all the parameters belong to
2749 this function. */
2750 gnat_pushlevel ();
a09d56d8 2751
a1ab4c31 2752 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 2753 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
2754 DECL_CONTEXT (param_decl) = subprog_decl;
2755
2756 make_decl_rtl (subprog_decl);
a1ab4c31
AC
2757}
2758
71196d4e 2759/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
2760
2761void
a406865a 2762end_subprog_body (tree body)
a1ab4c31
AC
2763{
2764 tree fndecl = current_function_decl;
2765
bd9c7fb9 2766 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
2767 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2768 DECL_INITIAL (fndecl) = current_binding_level->block;
2769 gnat_poplevel ();
2770
a1ab4c31
AC
2771 /* Mark the RESULT_DECL as being in this subprogram. */
2772 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2773
a963da4d
EB
2774 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2775 if (TREE_CODE (body) == BIND_EXPR)
2776 {
2777 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2778 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2779 }
2780
a1ab4c31
AC
2781 DECL_SAVED_TREE (fndecl) = body;
2782
228ee426 2783 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
2784}
2785
2786/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 2787
71196d4e
EB
2788void
2789rest_of_subprog_body_compilation (tree subprog_decl)
2790{
a1ab4c31
AC
2791 /* We cannot track the location of errors past this point. */
2792 error_gnat_node = Empty;
2793
2794 /* If we're only annotating types, don't actually compile this function. */
2795 if (type_annotate_only)
2796 return;
2797
a406865a 2798 /* Dump functions before gimplification. */
71196d4e 2799 dump_function (TDI_original, subprog_decl);
a406865a 2800
228ee426 2801 if (!decl_function_context (subprog_decl))
71196d4e 2802 cgraph_finalize_function (subprog_decl, false);
a1ab4c31
AC
2803 else
2804 /* Register this function with cgraph just far enough to get it
2805 added to our parent's nested function list. */
71196d4e 2806 (void) cgraph_get_create_node (subprog_decl);
a1ab4c31
AC
2807}
2808
a1ab4c31
AC
2809tree
2810gnat_builtin_function (tree decl)
2811{
2812 gnat_pushdecl (decl, Empty);
2813 return decl;
2814}
2815
2816/* Return an integer type with the number of bits of precision given by
2817 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2818 it is a signed type. */
2819
2820tree
2821gnat_type_for_size (unsigned precision, int unsignedp)
2822{
2823 tree t;
2824 char type_name[20];
2825
2826 if (precision <= 2 * MAX_BITS_PER_WORD
2827 && signed_and_unsigned_types[precision][unsignedp])
2828 return signed_and_unsigned_types[precision][unsignedp];
2829
2830 if (unsignedp)
2831 t = make_unsigned_type (precision);
2832 else
2833 t = make_signed_type (precision);
2834
2835 if (precision <= 2 * MAX_BITS_PER_WORD)
2836 signed_and_unsigned_types[precision][unsignedp] = t;
2837
2838 if (!TYPE_NAME (t))
2839 {
c3d79c60 2840 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
2841 TYPE_NAME (t) = get_identifier (type_name);
2842 }
2843
2844 return t;
2845}
2846
2847/* Likewise for floating-point types. */
2848
2849static tree
2850float_type_for_precision (int precision, enum machine_mode mode)
2851{
2852 tree t;
2853 char type_name[20];
2854
2855 if (float_types[(int) mode])
2856 return float_types[(int) mode];
2857
2858 float_types[(int) mode] = t = make_node (REAL_TYPE);
2859 TYPE_PRECISION (t) = precision;
2860 layout_type (t);
2861
2862 gcc_assert (TYPE_MODE (t) == mode);
2863 if (!TYPE_NAME (t))
2864 {
2865 sprintf (type_name, "FLOAT_%d", precision);
2866 TYPE_NAME (t) = get_identifier (type_name);
2867 }
2868
2869 return t;
2870}
2871
2872/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2873 an unsigned type; otherwise a signed type is returned. */
2874
2875tree
2876gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2877{
2878 if (mode == BLKmode)
2879 return NULL_TREE;
2799d18c
EB
2880
2881 if (mode == VOIDmode)
a1ab4c31 2882 return void_type_node;
2799d18c
EB
2883
2884 if (COMPLEX_MODE_P (mode))
a1ab4c31 2885 return NULL_TREE;
2799d18c
EB
2886
2887 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 2888 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
2889
2890 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 2891 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
2892
2893 if (VECTOR_MODE_P (mode))
2894 {
2895 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2896 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2897 if (inner_type)
2898 return build_vector_type_for_mode (inner_type, mode);
2899 }
2900
2901 return NULL_TREE;
a1ab4c31
AC
2902}
2903
2904/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2905
2906tree
2907gnat_unsigned_type (tree type_node)
2908{
2909 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2910
2911 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2912 {
2913 type = copy_node (type);
2914 TREE_TYPE (type) = type_node;
2915 }
2916 else if (TREE_TYPE (type_node)
2917 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2918 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2919 {
2920 type = copy_node (type);
2921 TREE_TYPE (type) = TREE_TYPE (type_node);
2922 }
2923
2924 return type;
2925}
2926
2927/* Return the signed version of a TYPE_NODE, a scalar type. */
2928
2929tree
2930gnat_signed_type (tree type_node)
2931{
2932 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2933
2934 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2935 {
2936 type = copy_node (type);
2937 TREE_TYPE (type) = type_node;
2938 }
2939 else if (TREE_TYPE (type_node)
2940 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2941 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2942 {
2943 type = copy_node (type);
2944 TREE_TYPE (type) = TREE_TYPE (type_node);
2945 }
2946
2947 return type;
2948}
2949
2950/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2951 transparently converted to each other. */
2952
2953int
2954gnat_types_compatible_p (tree t1, tree t2)
2955{
2956 enum tree_code code;
2957
2958 /* This is the default criterion. */
2959 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2960 return 1;
2961
2962 /* We only check structural equivalence here. */
2963 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2964 return 0;
2965
7948ae37
OH
2966 /* Vector types are also compatible if they have the same number of subparts
2967 and the same form of (scalar) element type. */
2968 if (code == VECTOR_TYPE
2969 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2970 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2971 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2972 return 1;
2973
cfa0bd19 2974 /* Array types are also compatible if they are constrained and have the same
96bba5e6 2975 domain(s) and the same component type. */
a1ab4c31 2976 if (code == ARRAY_TYPE
0adef32b
JJ
2977 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2978 || (TYPE_DOMAIN (t1)
b4680ca1 2979 && TYPE_DOMAIN (t2)
0adef32b
JJ
2980 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2982 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 2983 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
2984 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2985 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2986 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
a1ab4c31
AC
2987 return 1;
2988
a1ab4c31
AC
2989 return 0;
2990}
523e82a7 2991
71196d4e
EB
2992/* Return true if EXPR is a useless type conversion. */
2993
2994bool
2995gnat_useless_type_conversion (tree expr)
2996{
2997 if (CONVERT_EXPR_P (expr)
2998 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2999 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3000 return gnat_types_compatible_p (TREE_TYPE (expr),
3001 TREE_TYPE (TREE_OPERAND (expr, 0)));
3002
3003 return false;
3004}
3005
523e82a7
EB
3006/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3007
3008bool
3009fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3010 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3011{
3012 return TYPE_CI_CO_LIST (t) == cico_list
3013 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3014 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3015 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3016}
a1ab4c31
AC
3017\f
3018/* EXP is an expression for the size of an object. If this size contains
3019 discriminant references, replace them with the maximum (if MAX_P) or
3020 minimum (if !MAX_P) possible value of the discriminant. */
3021
3022tree
3023max_size (tree exp, bool max_p)
3024{
3025 enum tree_code code = TREE_CODE (exp);
3026 tree type = TREE_TYPE (exp);
3027
3028 switch (TREE_CODE_CLASS (code))
3029 {
3030 case tcc_declaration:
3031 case tcc_constant:
3032 return exp;
3033
3034 case tcc_vl_exp:
3035 if (code == CALL_EXPR)
3036 {
f82a627c
EB
3037 tree t, *argarray;
3038 int n, i;
3039
3040 t = maybe_inline_call_in_expr (exp);
3041 if (t)
3042 return max_size (t, max_p);
a1ab4c31 3043
f82a627c
EB
3044 n = call_expr_nargs (exp);
3045 gcc_assert (n > 0);
2bb1fc26 3046 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3047 for (i = 0; i < n; i++)
3048 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3049 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3050 }
3051 break;
3052
3053 case tcc_reference:
3054 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3055 modify. Otherwise, we treat it like a variable. */
3056 if (!CONTAINS_PLACEHOLDER_P (exp))
3057 return exp;
3058
3059 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3060 return
3061 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3062
3063 case tcc_comparison:
3064 return max_p ? size_one_node : size_zero_node;
3065
3066 case tcc_unary:
ce3da0d0
EB
3067 if (code == NON_LVALUE_EXPR)
3068 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3069
ce3da0d0
EB
3070 return fold_build1 (code, type,
3071 max_size (TREE_OPERAND (exp, 0),
3072 code == NEGATE_EXPR ? !max_p : max_p));
3073
a1ab4c31 3074 case tcc_binary:
ce3da0d0
EB
3075 {
3076 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3077 tree rhs = max_size (TREE_OPERAND (exp, 1),
3078 code == MINUS_EXPR ? !max_p : max_p);
3079
3080 /* Special-case wanting the maximum value of a MIN_EXPR.
3081 In that case, if one side overflows, return the other. */
3082 if (max_p && code == MIN_EXPR)
3083 {
3084 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3085 return lhs;
3086
3087 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3088 return rhs;
3089 }
3090
3091 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3092 overflowing and the RHS a variable. */
3093 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3094 && TREE_CODE (lhs) == INTEGER_CST
3095 && TREE_OVERFLOW (lhs)
3096 && !TREE_CONSTANT (rhs))
3097 return lhs;
3098
3099 return size_binop (code, lhs, rhs);
3100 }
3101
a1ab4c31
AC
3102 case tcc_expression:
3103 switch (TREE_CODE_LENGTH (code))
3104 {
3105 case 1:
722356ce
EB
3106 if (code == SAVE_EXPR)
3107 return exp;
ce3da0d0
EB
3108
3109 return fold_build1 (code, type,
3110 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3111
3112 case 2:
3113 if (code == COMPOUND_EXPR)
3114 return max_size (TREE_OPERAND (exp, 1), max_p);
3115
ce3da0d0
EB
3116 return fold_build2 (code, type,
3117 max_size (TREE_OPERAND (exp, 0), max_p),
3118 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3119
3120 case 3:
722356ce 3121 if (code == COND_EXPR)
a1ab4c31
AC
3122 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3123 max_size (TREE_OPERAND (exp, 1), max_p),
3124 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3125
3126 default:
3127 break;
a1ab4c31
AC
3128 }
3129
3130 /* Other tree classes cannot happen. */
3131 default:
3132 break;
3133 }
3134
3135 gcc_unreachable ();
3136}
3137\f
3138/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3139 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3140 Return a constructor for the template. */
3141
3142tree
3143build_template (tree template_type, tree array_type, tree expr)
3144{
9771b263 3145 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3146 tree bound_list = NULL_TREE;
3147 tree field;
3148
3149 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3150 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3151 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3152 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3153
3154 if (TREE_CODE (array_type) == ARRAY_TYPE
3155 || (TREE_CODE (array_type) == INTEGER_TYPE
3156 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3157 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3158
3159 /* First make the list for a CONSTRUCTOR for the template. Go down the
3160 field list of the template instead of the type chain because this
3161 array might be an Ada array of arrays and we can't tell where the
3162 nested arrays stop being the underlying object. */
3163
3164 for (field = TYPE_FIELDS (template_type); field;
3165 (bound_list
3166 ? (bound_list = TREE_CHAIN (bound_list))
3167 : (array_type = TREE_TYPE (array_type))),
910ad8de 3168 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3169 {
3170 tree bounds, min, max;
3171
3172 /* If we have a bound list, get the bounds from there. Likewise
3173 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3174 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3175 This will give us a maximum range. */
3176 if (bound_list)
3177 bounds = TREE_VALUE (bound_list);
3178 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3179 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3180 else if (expr && TREE_CODE (expr) == PARM_DECL
3181 && DECL_BY_COMPONENT_PTR_P (expr))
3182 bounds = TREE_TYPE (field);
3183 else
3184 gcc_unreachable ();
3185
3186 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3187 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3188
3189 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3190 substitute it from OBJECT. */
3191 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3192 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3193
0e228dd9 3194 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3195 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3196 }
3197
0e228dd9 3198 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3199}
3200\f
e63b36bd
EB
3201/* Return true if TYPE is suitable for the element type of a vector. */
3202
3203static bool
3204type_for_vector_element_p (tree type)
3205{
3206 enum machine_mode mode;
3207
3208 if (!INTEGRAL_TYPE_P (type)
3209 && !SCALAR_FLOAT_TYPE_P (type)
3210 && !FIXED_POINT_TYPE_P (type))
3211 return false;
3212
3213 mode = TYPE_MODE (type);
3214 if (GET_MODE_CLASS (mode) != MODE_INT
3215 && !SCALAR_FLOAT_MODE_P (mode)
3216 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3217 return false;
3218
3219 return true;
3220}
3221
3222/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3223 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3224 attribute declaration and want to issue error messages on failure. */
3225
3226static tree
3227build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3228{
3229 unsigned HOST_WIDE_INT size_int, inner_size_int;
3230 int nunits;
3231
3232 /* Silently punt on variable sizes. We can't make vector types for them,
3233 need to ignore them on front-end generated subtypes of unconstrained
3234 base types, and this attribute is for binding implementors, not end
3235 users, so we should never get there from legitimate explicit uses. */
3236 if (!tree_fits_uhwi_p (size))
3237 return NULL_TREE;
3238 size_int = tree_to_uhwi (size);
3239
3240 if (!type_for_vector_element_p (inner_type))
3241 {
3242 if (attribute)
3243 error ("invalid element type for attribute %qs",
3244 IDENTIFIER_POINTER (attribute));
3245 return NULL_TREE;
3246 }
3247 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3248
3249 if (size_int % inner_size_int)
3250 {
3251 if (attribute)
3252 error ("vector size not an integral multiple of component size");
3253 return NULL_TREE;
3254 }
3255
3256 if (size_int == 0)
3257 {
3258 if (attribute)
3259 error ("zero vector size");
3260 return NULL_TREE;
3261 }
3262
3263 nunits = size_int / inner_size_int;
3264 if (nunits & (nunits - 1))
3265 {
3266 if (attribute)
3267 error ("number of components of vector not a power of two");
3268 return NULL_TREE;
3269 }
3270
3271 return build_vector_type (inner_type, nunits);
3272}
3273
3274/* Return a vector type whose representative array type is ARRAY_TYPE, or
3275 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3276 processing the attribute and want to issue error messages on failure. */
3277
3278static tree
3279build_vector_type_for_array (tree array_type, tree attribute)
3280{
3281 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3282 TYPE_SIZE_UNIT (array_type),
3283 attribute);
3284 if (!vector_type)
3285 return NULL_TREE;
3286
3287 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3288 return vector_type;
3289}
3290\f
31a5a547
EB
3291/* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3292 being built; the new decl is chained on to the front of the list. */
3293
3294static tree
3295make_descriptor_field (const char *name, tree type, tree rec_type,
3296 tree initial, tree field_list)
3297{
3298 tree field
3299 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3300 NULL_TREE, 0, 0);
3301
3302 DECL_INITIAL (field) = initial;
3303 DECL_CHAIN (field) = field_list;
3304 return field;
3305}
3306
58c8f770
EB
3307/* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3308 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3309 type contains in its DECL_INITIAL the expression to use when a constructor
3310 is made for the type. GNAT_ENTITY is an entity used to print out an error
3311 message if the mechanism cannot be applied to an object of that type and
3312 also for the name. */
a1ab4c31
AC
3313
3314tree
d628c015 3315build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
3316{
3317 tree record_type = make_node (RECORD_TYPE);
31a5a547 3318 tree pointer32_type, pointer64_type;
788e5046 3319 tree field_list = NULL_TREE;
31a5a547
EB
3320 int klass, ndim, i, dtype = 0;
3321 tree inner_type, tem;
a1ab4c31 3322 tree *idx_arr;
a1ab4c31
AC
3323
3324 /* If TYPE is an unconstrained array, use the underlying array type. */
3325 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3326 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3327
3328 /* If this is an array, compute the number of dimensions in the array,
3329 get the index types, and point to the inner type. */
3330 if (TREE_CODE (type) != ARRAY_TYPE)
3331 ndim = 0;
3332 else
3333 for (ndim = 1, inner_type = type;
3334 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3335 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3336 ndim++, inner_type = TREE_TYPE (inner_type))
3337 ;
3338
2bb1fc26 3339 idx_arr = XALLOCAVEC (tree, ndim);
a1ab4c31 3340
d628c015 3341 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
3342 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3343 for (i = ndim - 1, inner_type = type;
3344 i >= 0;
3345 i--, inner_type = TREE_TYPE (inner_type))
3346 idx_arr[i] = TYPE_DOMAIN (inner_type);
3347 else
3348 for (i = 0, inner_type = type;
3349 i < ndim;
3350 i++, inner_type = TREE_TYPE (inner_type))
3351 idx_arr[i] = TYPE_DOMAIN (inner_type);
3352
3353 /* Now get the DTYPE value. */
3354 switch (TREE_CODE (type))
3355 {
3356 case INTEGER_TYPE:
3357 case ENUMERAL_TYPE:
01ddebf2 3358 case BOOLEAN_TYPE:
a1ab4c31 3359 if (TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3360 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
a1ab4c31
AC
3361 {
3362 case 6:
3363 dtype = 10;
3364 break;
3365 case 9:
3366 dtype = 11;
3367 break;
3368 case 15:
3369 dtype = 27;
3370 break;
3371 }
3372 else
3373 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3374 {
3375 case 8:
3376 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3377 break;
3378 case 16:
3379 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3380 break;
3381 case 32:
3382 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3383 break;
3384 case 64:
3385 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3386 break;
3387 case 128:
3388 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3389 break;
3390 }
3391 break;
3392
3393 case REAL_TYPE:
3394 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3395 break;
3396
3397 case COMPLEX_TYPE:
3398 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3399 && TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3400 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
a1ab4c31
AC
3401 {
3402 case 6:
3403 dtype = 12;
3404 break;
3405 case 9:
3406 dtype = 13;
3407 break;
3408 case 15:
3409 dtype = 29;
3410 }
3411 else
3412 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3413 break;
3414
3415 case ARRAY_TYPE:
3416 dtype = 14;
3417 break;
3418
3419 default:
3420 break;
3421 }
3422
3423 /* Get the CLASS value. */
3424 switch (mech)
3425 {
3426 case By_Descriptor_A:
d628c015 3427 case By_Short_Descriptor_A:
c6bd4220 3428 klass = 4;
a1ab4c31
AC
3429 break;
3430 case By_Descriptor_NCA:
d628c015 3431 case By_Short_Descriptor_NCA:
c6bd4220 3432 klass = 10;
a1ab4c31
AC
3433 break;
3434 case By_Descriptor_SB:
d628c015 3435 case By_Short_Descriptor_SB:
c6bd4220 3436 klass = 15;
a1ab4c31
AC
3437 break;
3438 case By_Descriptor:
d628c015 3439 case By_Short_Descriptor:
a1ab4c31 3440 case By_Descriptor_S:
d628c015 3441 case By_Short_Descriptor_S:
a1ab4c31 3442 default:
c6bd4220 3443 klass = 1;
a1ab4c31
AC
3444 break;
3445 }
3446
58c8f770
EB
3447 /* Make the type for a descriptor for VMS. The first four fields are the
3448 same for all types. */
82b481ed
EB
3449 field_list
3450 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3451 size_in_bytes ((mech == By_Descriptor_A
3452 || mech == By_Short_Descriptor_A)
3453 ? inner_type : type),
3454 field_list);
3455 field_list
3456 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3457 size_int (dtype), field_list);
3458 field_list
3459 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3460 size_int (klass), field_list);
a1ab4c31 3461
a1ab4c31 3462 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
31a5a547
EB
3463 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3464
3465 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3466 that we cannot build a template call to the CE routine as it would get a
3467 wrong source location; instead we use a second placeholder for it. */
3468 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3469 build0 (PLACEHOLDER_EXPR, type));
3470 tem = build3 (COND_EXPR, pointer32_type,
f542c405
EB
3471 Pmode != SImode
3472 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3473 build_int_cstu (pointer64_type, 0x80000000))
3474 : boolean_false_node,
31a5a547
EB
3475 build0 (PLACEHOLDER_EXPR, void_type_node),
3476 convert (pointer32_type, tem));
a1ab4c31 3477
82b481ed 3478 field_list
31a5a547 3479 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
82b481ed 3480 field_list);
a1ab4c31
AC
3481
3482 switch (mech)
3483 {
3484 case By_Descriptor:
d628c015 3485 case By_Short_Descriptor:
a1ab4c31 3486 case By_Descriptor_S:
d628c015 3487 case By_Short_Descriptor_S:
a1ab4c31
AC
3488 break;
3489
3490 case By_Descriptor_SB:
d628c015 3491 case By_Short_Descriptor_SB:
82b481ed
EB
3492 field_list
3493 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3494 record_type,
3495 (TREE_CODE (type) == ARRAY_TYPE
3496 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3497 : size_zero_node),
3498 field_list);
3499 field_list
3500 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3501 record_type,
3502 (TREE_CODE (type) == ARRAY_TYPE
3503 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3504 : size_zero_node),
3505 field_list);
a1ab4c31
AC
3506 break;
3507
3508 case By_Descriptor_A:
d628c015 3509 case By_Short_Descriptor_A:
a1ab4c31 3510 case By_Descriptor_NCA:
d628c015 3511 case By_Short_Descriptor_NCA:
82b481ed
EB
3512 field_list
3513 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3514 record_type, size_zero_node, field_list);
3515
3516 field_list
3517 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3518 record_type, size_zero_node, field_list);
3519
82b481ed
EB
3520 field_list
3521 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3522 record_type,
3523 size_int ((mech == By_Descriptor_NCA
3524 || mech == By_Short_Descriptor_NCA)
3525 ? 0
3526 /* Set FL_COLUMN, FL_COEFF, and
3527 FL_BOUNDS. */
3528 : (TREE_CODE (type) == ARRAY_TYPE
3529 && TYPE_CONVENTION_FORTRAN_P
3530 (type)
3531 ? 224 : 192)),
3532 field_list);
3533
3534 field_list
3535 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3536 record_type, size_int (ndim), field_list);
3537
3538 field_list
3539 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3540 record_type, size_in_bytes (type),
3541 field_list);
a1ab4c31
AC
3542
3543 /* Now build a pointer to the 0,0,0... element. */
3544 tem = build0 (PLACEHOLDER_EXPR, type);
3545 for (i = 0, inner_type = type; i < ndim;
3546 i++, inner_type = TREE_TYPE (inner_type))
3547 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3548 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3549 NULL_TREE, NULL_TREE);
3550
82b481ed
EB
3551 field_list
3552 = make_descriptor_field ("A0", pointer32_type, record_type,
3553 build1 (ADDR_EXPR, pointer32_type, tem),
3554 field_list);
a1ab4c31
AC
3555
3556 /* Next come the addressing coefficients. */
3557 tem = size_one_node;
3558 for (i = 0; i < ndim; i++)
3559 {
3560 char fname[3];
3561 tree idx_length
3562 = size_binop (MULT_EXPR, tem,
3563 size_binop (PLUS_EXPR,
3564 size_binop (MINUS_EXPR,
3565 TYPE_MAX_VALUE (idx_arr[i]),
3566 TYPE_MIN_VALUE (idx_arr[i])),
3567 size_int (1)));
3568
d628c015
DR
3569 fname[0] = ((mech == By_Descriptor_NCA ||
3570 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31 3571 fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3572 field_list
3573 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3574 record_type, idx_length, field_list);
a1ab4c31 3575
d628c015 3576 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
3577 tem = idx_length;
3578 }
3579
3580 /* Finally here are the bounds. */
3581 for (i = 0; i < ndim; i++)
3582 {
3583 char fname[3];
3584
3585 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
82b481ed
EB
3586 field_list
3587 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3588 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3589 field_list);
a1ab4c31
AC
3590
3591 fname[0] = 'U';
82b481ed
EB
3592 field_list
3593 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3594 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3595 field_list);
a1ab4c31
AC
3596 }
3597 break;
3598
3599 default:
3600 post_error ("unsupported descriptor type for &", gnat_entity);
3601 }
3602
10069d53 3603 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
788e5046 3604 finish_record_type (record_type, nreverse (field_list), 0, false);
a1ab4c31
AC
3605 return record_type;
3606}
3607
58c8f770
EB
3608/* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3609 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3610 type contains in its DECL_INITIAL the expression to use when a constructor
3611 is made for the type. GNAT_ENTITY is an entity used to print out an error
3612 message if the mechanism cannot be applied to an object of that type and
3613 also for the name. */
6ca2b0a0
DR
3614
3615tree
d628c015 3616build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0 3617{
31a5a547 3618 tree record_type = make_node (RECORD_TYPE);
6ca2b0a0 3619 tree pointer64_type;
31a5a547
EB
3620 tree field_list = NULL_TREE;
3621 int klass, ndim, i, dtype = 0;
3622 tree inner_type, tem;
6ca2b0a0 3623 tree *idx_arr;
6ca2b0a0
DR
3624
3625 /* If TYPE is an unconstrained array, use the underlying array type. */
3626 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3627 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3628
3629 /* If this is an array, compute the number of dimensions in the array,
3630 get the index types, and point to the inner type. */
3631 if (TREE_CODE (type) != ARRAY_TYPE)
3632 ndim = 0;
3633 else
3634 for (ndim = 1, inner_type = type;
3635 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3636 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3637 ndim++, inner_type = TREE_TYPE (inner_type))
3638 ;
3639
2bb1fc26 3640 idx_arr = XALLOCAVEC (tree, ndim);
6ca2b0a0
DR
3641
3642 if (mech != By_Descriptor_NCA
3643 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3644 for (i = ndim - 1, inner_type = type;
3645 i >= 0;
3646 i--, inner_type = TREE_TYPE (inner_type))
3647 idx_arr[i] = TYPE_DOMAIN (inner_type);
3648 else
3649 for (i = 0, inner_type = type;
3650 i < ndim;
3651 i++, inner_type = TREE_TYPE (inner_type))
3652 idx_arr[i] = TYPE_DOMAIN (inner_type);
3653
3654 /* Now get the DTYPE value. */
3655 switch (TREE_CODE (type))
3656 {
3657 case INTEGER_TYPE:
3658 case ENUMERAL_TYPE:
01ddebf2 3659 case BOOLEAN_TYPE:
6ca2b0a0 3660 if (TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3661 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
6ca2b0a0
DR
3662 {
3663 case 6:
3664 dtype = 10;
3665 break;
3666 case 9:
3667 dtype = 11;
3668 break;
3669 case 15:
3670 dtype = 27;
3671 break;
3672 }
3673 else
3674 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3675 {
3676 case 8:
3677 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3678 break;
3679 case 16:
3680 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3681 break;
3682 case 32:
3683 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3684 break;
3685 case 64:
3686 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3687 break;
3688 case 128:
3689 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3690 break;
3691 }
3692 break;
3693
3694 case REAL_TYPE:
3695 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3696 break;
3697
3698 case COMPLEX_TYPE:
3699 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3700 && TYPE_VAX_FLOATING_POINT_P (type))
ae7e9ddd 3701 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
6ca2b0a0
DR
3702 {
3703 case 6:
3704 dtype = 12;
3705 break;
3706 case 9:
3707 dtype = 13;
3708 break;
3709 case 15:
3710 dtype = 29;
3711 }
3712 else
3713 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3714 break;
3715
3716 case ARRAY_TYPE:
3717 dtype = 14;
3718 break;
3719
3720 default:
3721 break;
3722 }
3723
3724 /* Get the CLASS value. */
3725 switch (mech)
3726 {
3727 case By_Descriptor_A:
c6bd4220 3728 klass = 4;
6ca2b0a0
DR
3729 break;
3730 case By_Descriptor_NCA:
c6bd4220 3731 klass = 10;
6ca2b0a0
DR
3732 break;
3733 case By_Descriptor_SB:
c6bd4220 3734 klass = 15;
6ca2b0a0
DR
3735 break;
3736 case By_Descriptor:
3737 case By_Descriptor_S:
3738 default:
c6bd4220 3739 klass = 1;
6ca2b0a0
DR
3740 break;
3741 }
3742
58c8f770 3743 /* Make the type for a 64-bit descriptor for VMS. The first six fields
6ca2b0a0 3744 are the same for all types. */
31a5a547 3745 field_list
788e5046 3746 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
31a5a547
EB
3747 record_type, size_int (1), field_list);
3748 field_list
788e5046 3749 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
31a5a547
EB
3750 record_type, size_int (dtype), field_list);
3751 field_list
788e5046 3752 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
31a5a547
EB
3753 record_type, size_int (klass), field_list);
3754 field_list
788e5046 3755 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
f54ee980 3756 record_type, size_int (-1), field_list);
31a5a547 3757 field_list
788e5046 3758 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
31a5a547 3759 record_type,
788e5046 3760 size_in_bytes (mech == By_Descriptor_A
82b481ed 3761 ? inner_type : type),
31a5a547 3762 field_list);
6ca2b0a0
DR
3763
3764 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3765
31a5a547
EB
3766 field_list
3767 = make_descriptor_field ("POINTER", pointer64_type, record_type,
788e5046 3768 build_unary_op (ADDR_EXPR, pointer64_type,
82b481ed 3769 build0 (PLACEHOLDER_EXPR, type)),
31a5a547 3770 field_list);
6ca2b0a0
DR
3771
3772 switch (mech)
3773 {
3774 case By_Descriptor:
3775 case By_Descriptor_S:
3776 break;
3777
3778 case By_Descriptor_SB:
31a5a547 3779 field_list
788e5046 3780 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
31a5a547 3781 record_type,
788e5046
NF
3782 (TREE_CODE (type) == ARRAY_TYPE
3783 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
82b481ed 3784 : size_zero_node),
31a5a547
EB
3785 field_list);
3786 field_list
788e5046 3787 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
31a5a547 3788 record_type,
788e5046
NF
3789 (TREE_CODE (type) == ARRAY_TYPE
3790 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
82b481ed 3791 : size_zero_node),
31a5a547 3792 field_list);
6ca2b0a0
DR
3793 break;
3794
3795 case By_Descriptor_A:
3796 case By_Descriptor_NCA:
31a5a547 3797 field_list
788e5046 3798 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
31a5a547 3799 record_type, size_zero_node, field_list);
788e5046 3800
31a5a547 3801 field_list
788e5046 3802 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
31a5a547 3803 record_type, size_zero_node, field_list);
788e5046
NF
3804
3805 dtype = (mech == By_Descriptor_NCA
3806 ? 0
3807 /* Set FL_COLUMN, FL_COEFF, and
3808 FL_BOUNDS. */
3809 : (TREE_CODE (type) == ARRAY_TYPE
3810 && TYPE_CONVENTION_FORTRAN_P (type)
3811 ? 224 : 192));
31a5a547 3812 field_list
788e5046 3813 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
31a5a547
EB
3814 record_type, size_int (dtype),
3815 field_list);
6ca2b0a0 3816
31a5a547 3817 field_list
788e5046 3818 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
31a5a547 3819 record_type, size_int (ndim), field_list);
788e5046 3820
31a5a547 3821 field_list
788e5046 3822 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
31a5a547
EB
3823 record_type, size_int (0), field_list);
3824 field_list
788e5046 3825 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
31a5a547
EB
3826 record_type, size_in_bytes (type),
3827 field_list);
6ca2b0a0
DR
3828
3829 /* Now build a pointer to the 0,0,0... element. */
3830 tem = build0 (PLACEHOLDER_EXPR, type);
3831 for (i = 0, inner_type = type; i < ndim;
3832 i++, inner_type = TREE_TYPE (inner_type))
3833 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3834 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3835 NULL_TREE, NULL_TREE);
3836
31a5a547
EB
3837 field_list
3838 = make_descriptor_field ("A0", pointer64_type, record_type,
788e5046 3839 build1 (ADDR_EXPR, pointer64_type, tem),
31a5a547 3840 field_list);
6ca2b0a0
DR
3841
3842 /* Next come the addressing coefficients. */
3843 tem = size_one_node;
3844 for (i = 0; i < ndim; i++)
3845 {
3846 char fname[3];
3847 tree idx_length
3848 = size_binop (MULT_EXPR, tem,
3849 size_binop (PLUS_EXPR,
3850 size_binop (MINUS_EXPR,
3851 TYPE_MAX_VALUE (idx_arr[i]),
3852 TYPE_MIN_VALUE (idx_arr[i])),
3853 size_int (1)));
3854
3855 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3856 fname[1] = '0' + i, fname[2] = 0;
31a5a547 3857 field_list
788e5046 3858 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547 3859 record_type, idx_length, field_list);
6ca2b0a0
DR
3860
3861 if (mech == By_Descriptor_NCA)
3862 tem = idx_length;
3863 }
3864
3865 /* Finally here are the bounds. */
3866 for (i = 0; i < ndim; i++)
3867 {
3868 char fname[3];
3869
3870 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
31a5a547 3871 field_list
788e5046 3872 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3873 record_type,
3874 TYPE_MIN_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3875
3876 fname[0] = 'U';
31a5a547 3877 field_list
788e5046 3878 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
31a5a547
EB
3879 record_type,
3880 TYPE_MAX_VALUE (idx_arr[i]), field_list);
6ca2b0a0
DR
3881 }
3882 break;
3883
3884 default:
3885 post_error ("unsupported descriptor type for &", gnat_entity);
3886 }
3887
31a5a547
EB
3888 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3889 finish_record_type (record_type, nreverse (field_list), 0, false);
3890 return record_type;
6ca2b0a0
DR
3891}
3892
31a5a547
EB
3893/* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3894 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
a1ab4c31 3895
31a5a547
EB
3896tree
3897fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
a1ab4c31 3898{
9771b263 3899 vec<constructor_elt, va_gc> *v = NULL;
31a5a547 3900 tree field;
a1ab4c31 3901
31a5a547
EB
3902 gnu_expr = maybe_unconstrained_array (gnu_expr);
3903 gnu_expr = gnat_protect_expr (gnu_expr);
3904 gnat_mark_addressable (gnu_expr);
3905
3906 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3907 routine in case we have a 32-bit descriptor. */
3908 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3909 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3910 N_Raise_Constraint_Error),
3911 gnu_expr);
3912
3913 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3914 {
3915 tree value
3916 = convert (TREE_TYPE (field),
3917 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3918 gnu_expr));
3919 CONSTRUCTOR_APPEND_ELT (v, field, value);
3920 }
3921
3922 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
3923}
3924
d628c015
DR
3925/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3926 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3927 which the VMS descriptor is passed. */
a1ab4c31
AC
3928
3929static tree
d628c015
DR
3930convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3931{
3932 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3933 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3934 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 3935 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
d628c015 3936 /* The POINTER field is the 6th field in the descriptor. */
910ad8de 3937 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
d628c015
DR
3938
3939 /* Retrieve the value of the POINTER field. */
3940 tree gnu_expr64
58c8f770 3941 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
d628c015
DR
3942
3943 if (POINTER_TYPE_P (gnu_type))
3944 return convert (gnu_type, gnu_expr64);
3945
315cff15 3946 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
d628c015
DR
3947 {
3948 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 3949 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
d628c015
DR
3950 tree template_type = TREE_TYPE (p_bounds_type);
3951 tree min_field = TYPE_FIELDS (template_type);
7d76717d 3952 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 3953 tree template_tree, template_addr, aflags, dimct, t, u;
d628c015 3954 /* See the head comment of build_vms_descriptor. */
c6bd4220 3955 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
d628c015 3956 tree lfield, ufield;
9771b263 3957 vec<constructor_elt, va_gc> *v;
d628c015 3958
86060344 3959 /* Convert POINTER to the pointer-to-array type. */
d628c015
DR
3960 gnu_expr64 = convert (p_array_type, gnu_expr64);
3961
c6bd4220 3962 switch (iklass)
d628c015
DR
3963 {
3964 case 1: /* Class S */
3965 case 15: /* Class SB */
3966 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
9771b263 3967 vec_alloc (v, 2);
910ad8de 3968 t = DECL_CHAIN (DECL_CHAIN (klass));
d628c015 3969 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
3970 CONSTRUCTOR_APPEND_ELT (v, min_field,
3971 convert (TREE_TYPE (min_field),
3972 integer_one_node));
3973 CONSTRUCTOR_APPEND_ELT (v, max_field,
3974 convert (TREE_TYPE (max_field), t));
3975 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 3976 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
d628c015
DR
3977
3978 /* For class S, we are done. */
c6bd4220 3979 if (iklass == 1)
d628c015
DR
3980 break;
3981
3982 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
3983 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3984 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 3985 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
d628c015
DR
3986 /* If so, there is already a template in the descriptor and
3987 it is located right after the POINTER field. The fields are
3988 64bits so they must be repacked. */
7d76717d 3989 t = DECL_CHAIN (pointer);
d628c015
DR
3990 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3991 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3992
7d76717d 3993 t = DECL_CHAIN (t);
d628c015
DR
3994 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3995 ufield = convert
910ad8de 3996 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
3997
3998 /* Build the template in the form of a constructor. */
9771b263 3999 vec_alloc (v, 2);
0e228dd9 4000 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
7d76717d 4001 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
4002 ufield);
4003 template_tree = gnat_build_constructor (template_type, v);
d628c015
DR
4004
4005 /* Otherwise use the {1, LENGTH} template we build above. */
4006 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4007 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 4008 template_tree),
d628c015
DR
4009 template_addr);
4010 break;
4011
4012 case 4: /* Class A */
4013 /* The AFLAGS field is the 3rd field after the pointer in the
4014 descriptor. */
910ad8de 4015 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
d628c015
DR
4016 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4017 /* The DIMCT field is the next field in the descriptor after
4018 aflags. */
7d76717d 4019 t = DECL_CHAIN (t);
d628c015
DR
4020 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4021 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4022 or FL_COEFF or FL_BOUNDS not set. */
4023 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
4024 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4025 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
4026 dimct,
4027 convert (TREE_TYPE (dimct),
4028 size_one_node)),
1139f2e8 4029 build_binary_op (NE_EXPR, boolean_type_node,
d628c015
DR
4030 build2 (BIT_AND_EXPR,
4031 TREE_TYPE (aflags),
4032 aflags, u),
4033 u));
4034 /* There is already a template in the descriptor and it is located
4035 in block 3. The fields are 64bits so they must be repacked. */
910ad8de 4036 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
d628c015
DR
4037 (t)))));
4038 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4039 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
4040
7d76717d 4041 t = DECL_CHAIN (t);
d628c015
DR
4042 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4043 ufield = convert
910ad8de 4044 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
d628c015
DR
4045
4046 /* Build the template in the form of a constructor. */
9771b263 4047 vec_alloc (v, 2);
0e228dd9 4048 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
910ad8de 4049 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
0e228dd9
NF
4050 ufield);
4051 template_tree = gnat_build_constructor (template_type, v);
f76d6e6f 4052 template_tree = build3 (COND_EXPR, template_type, u,
d628c015
DR
4053 build_call_raise (CE_Length_Check_Failed, Empty,
4054 N_Raise_Constraint_Error),
6bf68a93 4055 template_tree);
c6bd4220
EB
4056 template_addr
4057 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
d628c015
DR
4058 break;
4059
4060 case 10: /* Class NCA */
4061 default:
4062 post_error ("unsupported descriptor type for &", gnat_subprog);
4063 template_addr = integer_zero_node;
4064 break;
4065 }
4066
4067 /* Build the fat pointer in the form of a constructor. */
9771b263 4068 vec_alloc (v, 2);
0e228dd9 4069 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
910ad8de 4070 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9
NF
4071 template_addr);
4072 return gnat_build_constructor (gnu_type, v);
d628c015
DR
4073 }
4074
4075 else
4076 gcc_unreachable ();
4077}
4078
4079/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
4080 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
4081 which the VMS descriptor is passed. */
4082
4083static tree
4084convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
4085{
4086 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4087 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4088 /* The CLASS field is the 3rd field in the descriptor. */
910ad8de 4089 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
a1ab4c31 4090 /* The POINTER field is the 4th field in the descriptor. */
910ad8de 4091 tree pointer = DECL_CHAIN (klass);
a1ab4c31
AC
4092
4093 /* Retrieve the value of the POINTER field. */
d628c015 4094 tree gnu_expr32
a1ab4c31
AC
4095 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
4096
4097 if (POINTER_TYPE_P (gnu_type))
d628c015 4098 return convert (gnu_type, gnu_expr32);
a1ab4c31 4099
315cff15 4100 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
a1ab4c31
AC
4101 {
4102 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
7d76717d 4103 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
a1ab4c31
AC
4104 tree template_type = TREE_TYPE (p_bounds_type);
4105 tree min_field = TYPE_FIELDS (template_type);
7d76717d 4106 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
6bf68a93 4107 tree template_tree, template_addr, aflags, dimct, t, u;
a1ab4c31 4108 /* See the head comment of build_vms_descriptor. */
c6bd4220 4109 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
9771b263 4110 vec<constructor_elt, va_gc> *v;
a1ab4c31 4111
86060344 4112 /* Convert POINTER to the pointer-to-array type. */
d628c015 4113 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31 4114
c6bd4220 4115 switch (iklass)
a1ab4c31
AC
4116 {
4117 case 1: /* Class S */
4118 case 15: /* Class SB */
4119 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
9771b263 4120 vec_alloc (v, 2);
a1ab4c31
AC
4121 t = TYPE_FIELDS (desc_type);
4122 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
0e228dd9
NF
4123 CONSTRUCTOR_APPEND_ELT (v, min_field,
4124 convert (TREE_TYPE (min_field),
4125 integer_one_node));
4126 CONSTRUCTOR_APPEND_ELT (v, max_field,
4127 convert (TREE_TYPE (max_field), t));
4128 template_tree = gnat_build_constructor (template_type, v);
6bf68a93 4129 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
a1ab4c31
AC
4130
4131 /* For class S, we are done. */
c6bd4220 4132 if (iklass == 1)
a1ab4c31
AC
4133 break;
4134
4135 /* Test that we really have a SB descriptor, like DEC Ada. */
c6bd4220
EB
4136 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4137 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
1139f2e8 4138 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
a1ab4c31
AC
4139 /* If so, there is already a template in the descriptor and
4140 it is located right after the POINTER field. */
7d76717d 4141 t = DECL_CHAIN (pointer);
c6bd4220
EB
4142 template_tree
4143 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
a1ab4c31
AC
4144 /* Otherwise use the {1, LENGTH} template we build above. */
4145 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4146 build_unary_op (ADDR_EXPR, p_bounds_type,
6bf68a93 4147 template_tree),
a1ab4c31
AC
4148 template_addr);
4149 break;
4150
4151 case 4: /* Class A */
4152 /* The AFLAGS field is the 7th field in the descriptor. */
910ad8de 4153 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
a1ab4c31
AC
4154 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4155 /* The DIMCT field is the 8th field in the descriptor. */
7d76717d 4156 t = DECL_CHAIN (t);
a1ab4c31
AC
4157 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4158 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4159 or FL_COEFF or FL_BOUNDS not set. */
4160 u = build_int_cst (TREE_TYPE (aflags), 192);
1139f2e8
EB
4161 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4162 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4163 dimct,
4164 convert (TREE_TYPE (dimct),
4165 size_one_node)),
1139f2e8 4166 build_binary_op (NE_EXPR, boolean_type_node,
a1ab4c31
AC
4167 build2 (BIT_AND_EXPR,
4168 TREE_TYPE (aflags),
4169 aflags, u),
4170 u));
a1ab4c31
AC
4171 /* There is already a template in the descriptor and it is
4172 located at the start of block 3 (12th field). */
910ad8de 4173 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
c6bd4220
EB
4174 template_tree
4175 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
f76d6e6f 4176 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
d628c015
DR
4177 build_call_raise (CE_Length_Check_Failed, Empty,
4178 N_Raise_Constraint_Error),
6bf68a93 4179 template_tree);
c6bd4220
EB
4180 template_addr
4181 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
a1ab4c31
AC
4182 break;
4183
4184 case 10: /* Class NCA */
4185 default:
4186 post_error ("unsupported descriptor type for &", gnat_subprog);
4187 template_addr = integer_zero_node;
4188 break;
4189 }
4190
4191 /* Build the fat pointer in the form of a constructor. */
9771b263 4192 vec_alloc (v, 2);
0e228dd9 4193 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
910ad8de 4194 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
0e228dd9 4195 template_addr);
d628c015 4196
0e228dd9 4197 return gnat_build_constructor (gnu_type, v);
a1ab4c31
AC
4198 }
4199
4200 else
4201 gcc_unreachable ();
4202}
4203
a981c964
EB
4204/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4205 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
491f54a7
EB
4206 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4207 descriptor is passed. */
d628c015 4208
f3d34576 4209tree
a981c964 4210convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
491f54a7 4211 Entity_Id gnat_subprog)
d628c015
DR
4212{
4213 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4214 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4215 tree mbo = TYPE_FIELDS (desc_type);
4216 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
910ad8de 4217 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
491f54a7 4218 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 4219
a981c964
EB
4220 /* If the field name is not MBO, it must be 32-bit and no alternate.
4221 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 4222 if (strcmp (mbostr, "MBO") != 0)
0c700259 4223 {
491f54a7 4224 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
0c700259
EB
4225 return ret;
4226 }
d628c015 4227
a981c964 4228 /* Build the test for 64-bit descriptor. */
d628c015
DR
4229 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4230 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964 4231 is64bit
1139f2e8
EB
4232 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4233 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4234 convert (integer_type_node, mbo),
4235 integer_one_node),
1139f2e8 4236 build_binary_op (EQ_EXPR, boolean_type_node,
a981c964
EB
4237 convert (integer_type_node, mbmo),
4238 integer_minus_one_node));
4239
4240 /* Build the 2 possible end results. */
491f54a7 4241 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4242 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
491f54a7 4243 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
a981c964 4244 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015 4245}
a1ab4c31 4246\f
928dfa4b
EB
4247/* Build a type to be used to represent an aliased object whose nominal type
4248 is an unconstrained array. This consists of a RECORD_TYPE containing a
4249 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4250 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4251 an arbitrary unconstrained object. Use NAME as the name of the record.
4252 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
4253
4254tree
928dfa4b
EB
4255build_unc_object_type (tree template_type, tree object_type, tree name,
4256 bool debug_info_p)
a1ab4c31
AC
4257{
4258 tree type = make_node (RECORD_TYPE);
da01bfee
EB
4259 tree template_field
4260 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4261 NULL_TREE, NULL_TREE, 0, 1);
4262 tree array_field
4263 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4264 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
4265
4266 TYPE_NAME (type) = name;
4267 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 4268 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
4269 finish_record_type (type, template_field, 0, true);
4270
4271 /* Declare it now since it will never be declared otherwise. This is
4272 necessary to ensure that its subtrees are properly marked. */
74746d49 4273 create_type_decl (name, type, true, debug_info_p, Empty);
a1ab4c31
AC
4274
4275 return type;
4276}
4277
4278/* Same, taking a thin or fat pointer type instead of a template type. */
4279
4280tree
4281build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 4282 tree name, bool debug_info_p)
a1ab4c31
AC
4283{
4284 tree template_type;
4285
315cff15 4286 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
4287
4288 template_type
315cff15 4289 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 4290 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 4291 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
4292
4293 return
4294 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 4295}
a1ab4c31 4296\f
229077b0
EB
4297/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4298 In the normal case this is just two adjustments, but we have more to
4299 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
4300
4301void
4302update_pointer_to (tree old_type, tree new_type)
4303{
4304 tree ptr = TYPE_POINTER_TO (old_type);
4305 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 4306 tree t;
a1ab4c31
AC
4307
4308 /* If this is the main variant, process all the other variants first. */
4309 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
4310 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4311 update_pointer_to (t, new_type);
a1ab4c31 4312
229077b0 4313 /* If no pointers and no references, we are done. */
a1ab4c31
AC
4314 if (!ptr && !ref)
4315 return;
4316
4317 /* Merge the old type qualifiers in the new type.
4318
4319 Each old variant has qualifiers for specific reasons, and the new
229077b0 4320 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
4321 information grabbed at some point, and merging the two simply unifies
4322 these inputs into the final type description.
4323
4324 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
4325 type designating it; after the designated type's freeze, we get here with
4326 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4327 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
4328 designated type, because that's what it really is.
4329
229077b0
EB
4330 We might also get here for a non-dummy OLD_TYPE variant with different
4331 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 4332 to private record type elaboration (see the comments around the call to
229077b0
EB
4333 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4334 the qualifiers in those cases too, to avoid accidentally discarding the
4335 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4336 new_type
4337 = build_qualified_type (new_type,
4338 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4339
4340 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
4341 if (old_type == new_type)
4342 return;
4343
4344 /* Otherwise, first handle the simple case. */
4345 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4346 {
aeecf17c
EB
4347 tree new_ptr, new_ref;
4348
4349 /* If pointer or reference already points to new type, nothing to do.
4350 This can happen as update_pointer_to can be invoked multiple times
4351 on the same couple of types because of the type variants. */
4352 if ((ptr && TREE_TYPE (ptr) == new_type)
4353 || (ref && TREE_TYPE (ref) == new_type))
4354 return;
4355
4356 /* Chain PTR and its variants at the end. */
4357 new_ptr = TYPE_POINTER_TO (new_type);
4358 if (new_ptr)
4359 {
4360 while (TYPE_NEXT_PTR_TO (new_ptr))
4361 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4362 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4363 }
4364 else
4365 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 4366
aeecf17c 4367 /* Now adjust them. */
a1ab4c31 4368 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 4369 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
4370 {
4371 TREE_TYPE (t) = new_type;
4372 if (TYPE_NULL_BOUNDS (t))
4373 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4374 }
de9528f0 4375
aeecf17c
EB
4376 /* Chain REF and its variants at the end. */
4377 new_ref = TYPE_REFERENCE_TO (new_type);
4378 if (new_ref)
4379 {
4380 while (TYPE_NEXT_REF_TO (new_ref))
4381 new_ref = TYPE_NEXT_REF_TO (new_ref);
4382 TYPE_NEXT_REF_TO (new_ref) = ref;
4383 }
4384 else
4385 TYPE_REFERENCE_TO (new_type) = ref;
4386
4387 /* Now adjust them. */
a1ab4c31 4388 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
4389 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4390 TREE_TYPE (t) = new_type;
de9528f0
EB
4391
4392 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 4393 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4394 }
4395
aeecf17c
EB
4396 /* Now deal with the unconstrained array case. In this case the pointer
4397 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
4398 Turn them into pointers to the correct types using update_pointer_to.
4399 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
4400 else
4401 {
e3edbd56 4402 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
4403
4404 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4405
e3edbd56 4406 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
4407 since update_pointer_to can be invoked multiple times on the same
4408 couple of types because of the type variants. */
4409 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4410 return;
4411
a1ab4c31 4412 update_pointer_to
e3edbd56
EB
4413 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4414 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 4415
a1ab4c31 4416 update_pointer_to
e3edbd56
EB
4417 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4418 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 4419
e3edbd56
EB
4420 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4421 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 4422
e3edbd56 4423 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
4424 }
4425}
4426\f
8df2e902
EB
4427/* Convert EXPR, a pointer to a constrained array, into a pointer to an
4428 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
4429
4430static tree
4431convert_to_fat_pointer (tree type, tree expr)
4432{
910ad8de 4433 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 4434 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 4435 tree etype = TREE_TYPE (expr);
88293f03 4436 tree template_addr;
9771b263
DN
4437 vec<constructor_elt, va_gc> *v;
4438 vec_alloc (v, 2);
a1ab4c31 4439
50179d58
EB
4440 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4441 array (compare_fat_pointers ensures that this is the full discriminant)
4442 and a valid pointer to the bounds. This latter property is necessary
4443 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 4444 if (integer_zerop (expr))
0e228dd9 4445 {
50179d58
EB
4446 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4447 tree null_bounds, t;
4448
4449 if (TYPE_NULL_BOUNDS (ptr_template_type))
4450 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4451 else
4452 {
4453 /* The template type can still be dummy at this point so we build an
4454 empty constructor. The middle-end will fill it in with zeros. */
9771b263
DN
4455 t = build_constructor (template_type,
4456 NULL);
50179d58
EB
4457 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4458 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4459 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4460 }
4461
0e228dd9 4462 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
4463 fold_convert (p_array_type, null_pointer_node));
4464 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4465 t = build_constructor (type, v);
4466 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4467 TREE_CONSTANT (t) = 0;
4468 TREE_STATIC (t) = 1;
4469
4470 return t;
0e228dd9 4471 }
a1ab4c31 4472
0d7de0e1
EB
4473 /* If EXPR is a thin pointer, make template and data from the record. */
4474 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 4475 {
0d7de0e1 4476 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 4477
7d7a1fe8 4478 expr = gnat_protect_expr (expr);
88293f03
EB
4479
4480 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4481 the thin pointer value has been shifted so we shift it back to get
4482 the template address. */
4483 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
2b45154d 4484 {
88293f03
EB
4485 template_addr
4486 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4487 fold_build1 (NEGATE_EXPR, sizetype,
4488 byte_position
4489 (DECL_CHAIN (field))));
4490 template_addr
4491 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4492 template_addr);
2b45154d 4493 }
a1ab4c31 4494
88293f03
EB
4495 /* Otherwise we explicitly take the address of the fields. */
4496 else
4497 {
4498 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4499 template_addr
4500 = build_unary_op (ADDR_EXPR, NULL_TREE,
4501 build_component_ref (expr, NULL_TREE, field,
4502 false));
4503 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4504 build_component_ref (expr, NULL_TREE,
4505 DECL_CHAIN (field),
4506 false));
4507 }
a1ab4c31 4508 }
8df2e902
EB
4509
4510 /* Otherwise, build the constructor for the template. */
a1ab4c31 4511 else
88293f03
EB
4512 template_addr
4513 = build_unary_op (ADDR_EXPR, NULL_TREE,
4514 build_template (template_type, TREE_TYPE (etype),
4515 expr));
a1ab4c31 4516
8df2e902 4517 /* The final result is a constructor for the fat pointer.
a1ab4c31 4518
8df2e902
EB
4519 If EXPR is an argument of a foreign convention subprogram, the type it
4520 points to is directly the component type. In this case, the expression
a1ab4c31 4521 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4522 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4523 required, for instance because it ensures that possible later folding of
8df2e902 4524 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4525 same type as the initial reference.
4526
8df2e902
EB
4527 Note that the call to "build_template" above is still fine because it
4528 will only refer to the provided TEMPLATE_TYPE in this case. */
88293f03
EB
4529 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4530 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
0e228dd9 4531 return gnat_build_constructor (type, v);
a1ab4c31
AC
4532}
4533\f
a1ab4c31
AC
4534/* Create an expression whose value is that of EXPR,
4535 converted to type TYPE. The TREE_TYPE of the value
4536 is always TYPE. This function implements all reasonable
4537 conversions; callers should filter out those that are
4538 not permitted by the language being compiled. */
4539
4540tree
4541convert (tree type, tree expr)
4542{
a1ab4c31
AC
4543 tree etype = TREE_TYPE (expr);
4544 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4545 enum tree_code code = TREE_CODE (type);
a1ab4c31 4546
c34f3839
EB
4547 /* If the expression is already of the right type, we are done. */
4548 if (etype == type)
a1ab4c31
AC
4549 return expr;
4550
4551 /* If both input and output have padding and are of variable size, do this
4552 as an unchecked conversion. Likewise if one is a mere variant of the
4553 other, so we avoid a pointless unpad/repad sequence. */
4554 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4555 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4556 && (!TREE_CONSTANT (TYPE_SIZE (type))
4557 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4558 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4559 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4560 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4561 ;
4562
f88facfe
EB
4563 /* If the output type has padding, convert to the inner type and make a
4564 constructor to build the record, unless a variable size is involved. */
315cff15 4565 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4566 {
9771b263 4567 vec<constructor_elt, va_gc> *v;
0e228dd9 4568
a1ab4c31
AC
4569 /* If we previously converted from another type and our type is
4570 of variable size, remove the conversion to avoid the need for
f88facfe 4571 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4572 original and packable version. */
4573 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4574 && (!TREE_CONSTANT (TYPE_SIZE (type))
4575 || (ecode == RECORD_TYPE
4576 && TYPE_NAME (etype)
4577 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4578 expr = TREE_OPERAND (expr, 0);
4579
4580 /* If we are just removing the padding from expr, convert the original
4581 object if we have variable size in order to avoid the need for some
f88facfe 4582 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4583 of the other, so we avoid a pointless unpad/repad sequence. */
4584 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4585 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4586 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4587 || TYPE_MAIN_VARIANT (type)
4588 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4589 || (ecode == RECORD_TYPE
4590 && TYPE_NAME (etype)
4591 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4592 return convert (type, TREE_OPERAND (expr, 0));
4593
431cfac1
EB
4594 /* If the inner type is of self-referential size and the expression type
4595 is a record, do this as an unchecked conversion. But first pad the
4596 expression if possible to have the same size on both sides. */
c34f3839 4597 if (ecode == RECORD_TYPE
f88facfe 4598 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4599 {
980a0501 4600 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4601 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4602 false, false, false, true),
4603 expr);
431cfac1
EB
4604 return unchecked_convert (type, expr, false);
4605 }
a1ab4c31 4606
f88facfe
EB
4607 /* If we are converting between array types with variable size, do the
4608 final conversion as an unchecked conversion, again to avoid the need
4609 for some variable-sized temporaries. If valid, this conversion is
4610 very likely purely technical and without real effects. */
c34f3839 4611 if (ecode == ARRAY_TYPE
f88facfe
EB
4612 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4613 && !TREE_CONSTANT (TYPE_SIZE (etype))
4614 && !TREE_CONSTANT (TYPE_SIZE (type)))
4615 return unchecked_convert (type,
4616 convert (TREE_TYPE (TYPE_FIELDS (type)),
4617 expr),
4618 false);
4619
9771b263 4620 vec_alloc (v, 1);
0e228dd9
NF
4621 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4622 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4623 return gnat_build_constructor (type, v);
a1ab4c31
AC
4624 }
4625
4626 /* If the input type has padding, remove it and convert to the output type.
4627 The conditions ordering is arranged to ensure that the output type is not
4628 a padding type here, as it is not clear whether the conversion would
4629 always be correct if this was to happen. */
315cff15 4630 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4631 {
4632 tree unpadded;
4633
4634 /* If we have just converted to this padded type, just get the
4635 inner expression. */
4636 if (TREE_CODE (expr) == CONSTRUCTOR
9771b263
DN
4637 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4638 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4639 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
a1ab4c31
AC
4640
4641 /* Otherwise, build an explicit component reference. */
4642 else
4643 unpadded
4644 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4645
4646 return convert (type, unpadded);
4647 }
4648
4649 /* If the input is a biased type, adjust first. */
4650 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4651 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4652 fold_convert (TREE_TYPE (etype),
4653 expr),
4654 TYPE_MIN_VALUE (etype)));
4655
4656 /* If the input is a justified modular type, we need to extract the actual
4657 object before converting it to any other type with the exceptions of an
4658 unconstrained array or of a mere type variant. It is useful to avoid the
4659 extraction and conversion in the type variant case because it could end
4660 up replacing a VAR_DECL expr by a constructor and we might be about the
4661 take the address of the result. */
4662 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4663 && code != UNCONSTRAINED_ARRAY_TYPE
4664 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4665 return convert (type, build_component_ref (expr, NULL_TREE,
4666 TYPE_FIELDS (etype), false));
4667
4668 /* If converting to a type that contains a template, convert to the data
4669 type and then build the template. */
4670 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4671 {
910ad8de 4672 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4673 vec<constructor_elt, va_gc> *v;
4674 vec_alloc (v, 2);
a1ab4c31
AC
4675
4676 /* If the source already has a template, get a reference to the
4677 associated array only, as we are going to rebuild a template
4678 for the target type anyway. */
4679 expr = maybe_unconstrained_array (expr);
4680
0e228dd9
NF
4681 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4682 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4683 obj_type, NULL_TREE));
910ad8de 4684 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4685 convert (obj_type, expr));
4686 return gnat_build_constructor (type, v);
a1ab4c31
AC
4687 }
4688
a1c7d797 4689 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4690 switch (TREE_CODE (expr))
4691 {
4692 case ERROR_MARK:
4693 return expr;
4694
4695 case NULL_EXPR:
4696 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4697 conversion in gnat_expand_expr. NULL_EXPR does not represent
4698 and actual value, so no conversion is needed. */
4699 expr = copy_node (expr);
4700 TREE_TYPE (expr) = type;
4701 return expr;
4702
4703 case STRING_CST:
4704 /* If we are converting a STRING_CST to another constrained array type,
4705 just make a new one in the proper type. */
4706 if (code == ecode && AGGREGATE_TYPE_P (etype)
4707 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4708 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4709 {
4710 expr = copy_node (expr);
4711 TREE_TYPE (expr) = type;
4712 return expr;
4713 }
4714 break;
4715
7948ae37 4716 case VECTOR_CST:
44e9e3ec 4717 /* If we are converting a VECTOR_CST to a mere type variant, just make
7948ae37
OH
4718 a new one in the proper type. */
4719 if (code == ecode && gnat_types_compatible_p (type, etype))
4720 {
4721 expr = copy_node (expr);
4722 TREE_TYPE (expr) = type;
4723 return expr;
4724 }
4725
a1ab4c31 4726 case CONSTRUCTOR:
44e9e3ec
EB
4727 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4728 another padding type around the same type, just make a new one in
4729 the proper type. */
4730 if (code == ecode
4731 && (gnat_types_compatible_p (type, etype)
4732 || (code == RECORD_TYPE
4733 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4734 && TREE_TYPE (TYPE_FIELDS (type))
4735 == TREE_TYPE (TYPE_FIELDS (etype)))))
a1ab4c31
AC
4736 {
4737 expr = copy_node (expr);
4738 TREE_TYPE (expr) = type;
9771b263 4739 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4740 return expr;
4741 }
4742
cb3d597d
EB
4743 /* Likewise for a conversion between original and packable version, or
4744 conversion between types of the same size and with the same list of
4745 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4746 if (code == ecode
4747 && code == RECORD_TYPE
cb3d597d
EB
4748 && (TYPE_NAME (type) == TYPE_NAME (etype)
4749 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4750
a1ab4c31 4751 {
9771b263
DN
4752 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4753 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4754 vec<constructor_elt, va_gc> *v;
4755 vec_alloc (v, len);
a1ab4c31
AC
4756 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4757 unsigned HOST_WIDE_INT idx;
4758 tree index, value;
4759
db868e1e
OH
4760 /* Whether we need to clear TREE_CONSTANT et al. on the output
4761 constructor when we convert in place. */
4762 bool clear_constant = false;
4763
a1ab4c31
AC
4764 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4765 {
44e9e3ec
EB
4766 /* Skip the missing fields in the CONSTRUCTOR. */
4767 while (efield && field && !SAME_FIELD_P (efield, index))
4768 {
4769 efield = DECL_CHAIN (efield);
4770 field = DECL_CHAIN (field);
4771 }
cb3d597d 4772 /* The field must be the same. */
44e9e3ec 4773 if (!(efield && field && SAME_FIELD_P (efield, field)))
a1ab4c31 4774 break;
44e9e3ec
EB
4775 constructor_elt elt
4776 = {field, convert (TREE_TYPE (field), value)};
9771b263 4777 v->quick_push (elt);
db868e1e
OH
4778
4779 /* If packing has made this field a bitfield and the input
4780 value couldn't be emitted statically any more, we need to
4781 clear TREE_CONSTANT on our output. */
ced57283
EB
4782 if (!clear_constant
4783 && TREE_CONSTANT (expr)
db868e1e
OH
4784 && !CONSTRUCTOR_BITFIELD_P (efield)
4785 && CONSTRUCTOR_BITFIELD_P (field)
4786 && !initializer_constant_valid_for_bitfield_p (value))
4787 clear_constant = true;
4788
910ad8de
NF
4789 efield = DECL_CHAIN (efield);
4790 field = DECL_CHAIN (field);
a1ab4c31
AC
4791 }
4792
db868e1e
OH
4793 /* If we have been able to match and convert all the input fields
4794 to their output type, convert in place now. We'll fallback to a
4795 view conversion downstream otherwise. */
a1ab4c31
AC
4796 if (idx == len)
4797 {
4798 expr = copy_node (expr);
4799 TREE_TYPE (expr) = type;
4800 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4801 if (clear_constant)
ced57283 4802 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4803 return expr;
4804 }
4805 }
7948ae37
OH
4806
4807 /* Likewise for a conversion between array type and vector type with a
4808 compatible representative array. */
4809 else if (code == VECTOR_TYPE
4810 && ecode == ARRAY_TYPE
4811 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4812 etype))
4813 {
9771b263
DN
4814 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4815 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4816 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4817 unsigned HOST_WIDE_INT ix;
4818 tree value;
4819
4820 /* Build a VECTOR_CST from a *constant* array constructor. */
4821 if (TREE_CONSTANT (expr))
4822 {
4823 bool constant_p = true;
4824
4825 /* Iterate through elements and check if all constructor
4826 elements are *_CSTs. */
4827 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4828 if (!CONSTANT_CLASS_P (value))
4829 {
4830 constant_p = false;
4831 break;
4832 }
4833
4834 if (constant_p)
4835 return build_vector_from_ctor (type,
4836 CONSTRUCTOR_ELTS (expr));
4837 }
4838
4839 /* Otherwise, build a regular vector constructor. */
9771b263 4840 vec_alloc (v, len);
7948ae37
OH
4841 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4842 {
f32682ca 4843 constructor_elt elt = {NULL_TREE, value};
9771b263 4844 v->quick_push (elt);
7948ae37
OH
4845 }
4846 expr = copy_node (expr);
4847 TREE_TYPE (expr) = type;
4848 CONSTRUCTOR_ELTS (expr) = v;
4849 return expr;
4850 }
a1ab4c31
AC
4851 break;
4852
4853 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4854 /* First retrieve the underlying array. */
4855 expr = maybe_unconstrained_array (expr);
4856 etype = TREE_TYPE (expr);
4857 ecode = TREE_CODE (etype);
4858 break;
a1ab4c31
AC
4859
4860 case VIEW_CONVERT_EXPR:
4861 {
4862 /* GCC 4.x is very sensitive to type consistency overall, and view
4863 conversions thus are very frequent. Even though just "convert"ing
4864 the inner operand to the output type is fine in most cases, it
4865 might expose unexpected input/output type mismatches in special
4866 circumstances so we avoid such recursive calls when we can. */
4867 tree op0 = TREE_OPERAND (expr, 0);
4868
4869 /* If we are converting back to the original type, we can just
4870 lift the input conversion. This is a common occurrence with
4871 switches back-and-forth amongst type variants. */
4872 if (type == TREE_TYPE (op0))
4873 return op0;
4874
7948ae37
OH
4875 /* Otherwise, if we're converting between two aggregate or vector
4876 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4877 target type in place or to just convert the inner expression. */
4878 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4879 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4880 {
4881 /* If we are converting between mere variants, we can just
4882 substitute the VIEW_CONVERT_EXPR in place. */
4883 if (gnat_types_compatible_p (type, etype))
4884 return build1 (VIEW_CONVERT_EXPR, type, op0);
4885
4886 /* Otherwise, we may just bypass the input view conversion unless
4887 one of the types is a fat pointer, which is handled by
4888 specialized code below which relies on exact type matching. */
315cff15
EB
4889 else if (!TYPE_IS_FAT_POINTER_P (type)
4890 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4891 return convert (type, op0);
4892 }
ad1d36ba
EB
4893
4894 break;
a1ab4c31 4895 }
a1ab4c31 4896
a1ab4c31
AC
4897 default:
4898 break;
4899 }
4900
4901 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4902 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4903 return convert_to_fat_pointer (type, expr);
4904
7948ae37
OH
4905 /* If we are converting between two aggregate or vector types that are mere
4906 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4907 to a vector type from its representative array type. */
4908 else if ((code == ecode
4909 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4910 && gnat_types_compatible_p (type, etype))
4911 || (code == VECTOR_TYPE
4912 && ecode == ARRAY_TYPE
4913 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4914 etype)))
a1ab4c31
AC
4915 return build1 (VIEW_CONVERT_EXPR, type, expr);
4916
76af763d
EB
4917 /* If we are converting between tagged types, try to upcast properly. */
4918 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4919 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4920 {
4921 tree child_etype = etype;
4922 do {
4923 tree field = TYPE_FIELDS (child_etype);
4924 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4925 return build_component_ref (expr, NULL_TREE, field, false);
4926 child_etype = TREE_TYPE (field);
4927 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4928 }
4929
bb1f7929
EB
4930 /* If we are converting from a smaller form of record type back to it, just
4931 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4932 size on both sides. */
4933 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4934 && smaller_form_type_p (etype, type))
4935 {
4936 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4937 false, false, false, true),
4938 expr);
4939 return build1 (VIEW_CONVERT_EXPR, type, expr);
4940 }
4941
a1ab4c31 4942 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4943 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4944 return fold_convert (type, expr);
4945
4946 switch (code)
4947 {
4948 case VOID_TYPE:
4949 return fold_build1 (CONVERT_EXPR, type, expr);
4950
a1ab4c31
AC
4951 case INTEGER_TYPE:
4952 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4953 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4954 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4955 return unchecked_convert (type, expr, false);
4956 else if (TYPE_BIASED_REPRESENTATION_P (type))
4957 return fold_convert (type,
4958 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4959 convert (TREE_TYPE (type), expr),
4960 TYPE_MIN_VALUE (type)));
4961
4962 /* ... fall through ... */
4963
4964 case ENUMERAL_TYPE:
01ddebf2 4965 case BOOLEAN_TYPE:
a1ab4c31
AC
4966 /* If we are converting an additive expression to an integer type
4967 with lower precision, be wary of the optimization that can be
4968 applied by convert_to_integer. There are 2 problematic cases:
4969 - if the first operand was originally of a biased type,
4970 because we could be recursively called to convert it
4971 to an intermediate type and thus rematerialize the
4972 additive operator endlessly,
4973 - if the expression contains a placeholder, because an
4974 intermediate conversion that changes the sign could
4975 be inserted and thus introduce an artificial overflow
4976 at compile time when the placeholder is substituted. */
4977 if (code == INTEGER_TYPE
4978 && ecode == INTEGER_TYPE
4979 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4980 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4981 {
4982 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4983
4984 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4985 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4986 || CONTAINS_PLACEHOLDER_P (expr))
4987 return build1 (NOP_EXPR, type, expr);
4988 }
4989
4990 return fold (convert_to_integer (type, expr));
4991
4992 case POINTER_TYPE:
4993 case REFERENCE_TYPE:
0d7de0e1 4994 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4995 for differing offsets from the base pointer, depending on whether
4996 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4997 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4998 {
2b45154d
EB
4999 tree etype_pos
5000 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
5001 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
5002 : size_zero_node;
5003 tree type_pos
5004 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
5005 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
5006 : size_zero_node;
5007 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 5008
a1ab4c31 5009 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
5010 if (integer_zerop (byte_diff))
5011 return expr;
5012
5013 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 5014 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
5015 }
5016
0d7de0e1
EB
5017 /* If converting fat pointer to normal or thin pointer, get the pointer
5018 to the array and then convert it. */
5019 if (TYPE_IS_FAT_POINTER_P (etype))
86060344
EB
5020 expr
5021 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
a1ab4c31
AC
5022
5023 return fold (convert_to_pointer (type, expr));
5024
5025 case REAL_TYPE:
5026 return fold (convert_to_real (type, expr));
5027
5028 case RECORD_TYPE:
5029 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 5030 {
9771b263
DN
5031 vec<constructor_elt, va_gc> *v;
5032 vec_alloc (v, 1);
0e228dd9
NF
5033
5034 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
5035 convert (TREE_TYPE (TYPE_FIELDS (type)),
5036 expr));
5037 return gnat_build_constructor (type, v);
5038 }
a1ab4c31
AC
5039
5040 /* ... fall through ... */
5041
5042 case ARRAY_TYPE:
5043 /* In these cases, assume the front-end has validated the conversion.
5044 If the conversion is valid, it will be a bit-wise conversion, so
5045 it can be viewed as an unchecked conversion. */
5046 return unchecked_convert (type, expr, false);
5047
5048 case UNION_TYPE:
5049 /* This is a either a conversion between a tagged type and some
5050 subtype, which we have to mark as a UNION_TYPE because of
5051 overlapping fields or a conversion of an Unchecked_Union. */
5052 return unchecked_convert (type, expr, false);
5053
5054 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
5055 /* If the input is a VECTOR_TYPE, convert to the representative
5056 array type first. */
5057 if (ecode == VECTOR_TYPE)
5058 {
5059 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
5060 etype = TREE_TYPE (expr);
5061 ecode = TREE_CODE (etype);
5062 }
5063
a1ab4c31
AC
5064 /* If EXPR is a constrained array, take its address, convert it to a
5065 fat pointer, and then dereference it. Likewise if EXPR is a
5066 record containing both a template and a constrained array.
5067 Note that a record representing a justified modular type
5068 always represents a packed constrained array. */
5069 if (ecode == ARRAY_TYPE
5070 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
5071 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
5072 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
5073 return
5074 build_unary_op
5075 (INDIRECT_REF, NULL_TREE,
5076 convert_to_fat_pointer (TREE_TYPE (type),
5077 build_unary_op (ADDR_EXPR,
5078 NULL_TREE, expr)));
5079
5080 /* Do something very similar for converting one unconstrained
5081 array to another. */
5082 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
5083 return
5084 build_unary_op (INDIRECT_REF, NULL_TREE,
5085 convert (TREE_TYPE (type),
5086 build_unary_op (ADDR_EXPR,
5087 NULL_TREE, expr)));
5088 else
5089 gcc_unreachable ();
5090
5091 case COMPLEX_TYPE:
5092 return fold (convert_to_complex (type, expr));
5093
5094 default:
5095 gcc_unreachable ();
5096 }
5097}
15bf7d19
EB
5098
5099/* Create an expression whose value is that of EXPR converted to the common
5100 index type, which is sizetype. EXPR is supposed to be in the base type
5101 of the GNAT index type. Calling it is equivalent to doing
5102
5103 convert (sizetype, expr)
5104
5105 but we try to distribute the type conversion with the knowledge that EXPR
5106 cannot overflow in its type. This is a best-effort approach and we fall
5107 back to the above expression as soon as difficulties are encountered.
5108
5109 This is necessary to overcome issues that arise when the GNAT base index
5110 type and the GCC common index type (sizetype) don't have the same size,
5111 which is quite frequent on 64-bit architectures. In this case, and if
5112 the GNAT base index type is signed but the iteration type of the loop has
5113 been forced to unsigned, the loop scalar evolution engine cannot compute
5114 a simple evolution for the general induction variables associated with the
5115 array indices, because it will preserve the wrap-around semantics in the
5116 unsigned type of their "inner" part. As a result, many loop optimizations
5117 are blocked.
5118
5119 The solution is to use a special (basic) induction variable that is at
5120 least as large as sizetype, and to express the aforementioned general
5121 induction variables in terms of this induction variable, eliminating
5122 the problematic intermediate truncation to the GNAT base index type.
5123 This is possible as long as the original expression doesn't overflow
5124 and if the middle-end hasn't introduced artificial overflows in the
5125 course of the various simplification it can make to the expression. */
5126
5127tree
5128convert_to_index_type (tree expr)
5129{
5130 enum tree_code code = TREE_CODE (expr);
5131 tree type = TREE_TYPE (expr);
5132
5133 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5134 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5135 if (TYPE_UNSIGNED (type) || !optimize)
5136 return convert (sizetype, expr);
5137
5138 switch (code)
5139 {
5140 case VAR_DECL:
5141 /* The main effect of the function: replace a loop parameter with its
5142 associated special induction variable. */
5143 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5144 expr = DECL_INDUCTION_VAR (expr);
5145 break;
5146
5147 CASE_CONVERT:
5148 {
5149 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5150 /* Bail out as soon as we suspect some sort of type frobbing. */
5151 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5152 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5153 break;
5154 }
5155
5156 /* ... fall through ... */
5157
5158 case NON_LVALUE_EXPR:
5159 return fold_build1 (code, sizetype,
5160 convert_to_index_type (TREE_OPERAND (expr, 0)));
5161
5162 case PLUS_EXPR:
5163 case MINUS_EXPR:
5164 case MULT_EXPR:
5165 return fold_build2 (code, sizetype,
5166 convert_to_index_type (TREE_OPERAND (expr, 0)),
5167 convert_to_index_type (TREE_OPERAND (expr, 1)));
5168
5169 case COMPOUND_EXPR:
5170 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5171 convert_to_index_type (TREE_OPERAND (expr, 1)));
5172
5173 case COND_EXPR:
5174 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5175 convert_to_index_type (TREE_OPERAND (expr, 1)),
5176 convert_to_index_type (TREE_OPERAND (expr, 2)));
5177
5178 default:
5179 break;
5180 }
5181
5182 return convert (sizetype, expr);
5183}
a1ab4c31
AC
5184\f
5185/* Remove all conversions that are done in EXP. This includes converting
5186 from a padded type or to a justified modular type. If TRUE_ADDRESS
5187 is true, always return the address of the containing object even if
5188 the address is not bit-aligned. */
5189
5190tree
5191remove_conversions (tree exp, bool true_address)
5192{
5193 switch (TREE_CODE (exp))
5194 {
5195 case CONSTRUCTOR:
5196 if (true_address
5197 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5198 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5199 return
9771b263 5200 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
a1ab4c31
AC
5201 break;
5202
5203 case COMPONENT_REF:
315cff15 5204 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
5205 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5206 break;
5207
a1ab4c31 5208 CASE_CONVERT:
722356ce
EB
5209 case VIEW_CONVERT_EXPR:
5210 case NON_LVALUE_EXPR:
a1ab4c31
AC
5211 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5212
5213 default:
5214 break;
5215 }
5216
5217 return exp;
5218}
5219\f
5220/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 5221 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
5222 likewise return an expression pointing to the underlying array. */
5223
5224tree
5225maybe_unconstrained_array (tree exp)
5226{
5227 enum tree_code code = TREE_CODE (exp);
1aa291f7 5228 tree type = TREE_TYPE (exp);
a1ab4c31 5229
1aa291f7 5230 switch (TREE_CODE (type))
a1ab4c31
AC
5231 {
5232 case UNCONSTRAINED_ARRAY_TYPE:
5233 if (code == UNCONSTRAINED_ARRAY_REF)
5234 {
7e169899 5235 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
5236 const bool no_trap = TREE_THIS_NOTRAP (exp);
5237
7e169899 5238 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
5239 type = TREE_TYPE (exp);
5240
7e169899
EB
5241 if (TREE_CODE (exp) == COND_EXPR)
5242 {
5243 tree op1
5244 = build_unary_op (INDIRECT_REF, NULL_TREE,
5245 build_component_ref (TREE_OPERAND (exp, 1),
5246 NULL_TREE,
1aa291f7 5247 TYPE_FIELDS (type),
7e169899
EB
5248 false));
5249 tree op2
5250 = build_unary_op (INDIRECT_REF, NULL_TREE,
5251 build_component_ref (TREE_OPERAND (exp, 2),
5252 NULL_TREE,
1aa291f7 5253 TYPE_FIELDS (type),
7e169899
EB
5254 false));
5255
5256 exp = build3 (COND_EXPR,
1aa291f7 5257 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
5258 TREE_OPERAND (exp, 0), op1, op2);
5259 }
5260 else
a1c7d797
EB
5261 {
5262 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5263 build_component_ref (exp, NULL_TREE,
1aa291f7 5264 TYPE_FIELDS (type),
a1c7d797
EB
5265 false));
5266 TREE_READONLY (exp) = read_only;
5267 TREE_THIS_NOTRAP (exp) = no_trap;
5268 }
a1ab4c31
AC
5269 }
5270
5271 else if (code == NULL_EXPR)
1aa291f7
EB
5272 exp = build1 (NULL_EXPR,
5273 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5274 TREE_OPERAND (exp, 0));
5275 break;
a1ab4c31
AC
5276
5277 case RECORD_TYPE:
1aa291f7
EB
5278 /* If this is a padded type and it contains a template, convert to the
5279 unpadded type first. */
5280 if (TYPE_PADDING_P (type)
5281 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5282 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 5283 {
1aa291f7
EB
5284 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5285 type = TREE_TYPE (exp);
5286 }
5287
5288 if (TYPE_CONTAINS_TEMPLATE_P (type))
5289 {
5290 exp = build_component_ref (exp, NULL_TREE,
5291 DECL_CHAIN (TYPE_FIELDS (type)),
5292 false);
5293 type = TREE_TYPE (exp);
5294
5295 /* If the array type is padded, convert to the unpadded type. */
5296 if (TYPE_IS_PADDING_P (type))
5297 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 5298 }
a1ab4c31
AC
5299 break;
5300
5301 default:
5302 break;
5303 }
5304
5305 return exp;
5306}
5307\f
afcea859 5308/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 5309 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
5310
5311static bool
5312can_fold_for_view_convert_p (tree expr)
5313{
5314 tree t1, t2;
5315
5316 /* The folder will fold NOP_EXPRs between integral types with the same
5317 precision (in the middle-end's sense). We cannot allow it if the
5318 types don't have the same precision in the Ada sense as well. */
5319 if (TREE_CODE (expr) != NOP_EXPR)
5320 return true;
5321
5322 t1 = TREE_TYPE (expr);
5323 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5324
5325 /* Defer to the folder for non-integral conversions. */
5326 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5327 return true;
5328
5329 /* Only fold conversions that preserve both precisions. */
5330 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5331 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5332 return true;
5333
5334 return false;
5335}
5336
a1ab4c31 5337/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
5338 If NOTRUNC_P is true, truncation operations should be suppressed.
5339
5340 Special care is required with (source or target) integral types whose
5341 precision is not equal to their size, to make sure we fetch or assign
5342 the value bits whose location might depend on the endianness, e.g.
5343
5344 Rmsize : constant := 8;
5345 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5346
5347 type Bit_Array is array (1 .. Rmsize) of Boolean;
5348 pragma Pack (Bit_Array);
5349
5350 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5351
5352 Value : Int := 2#1000_0001#;
5353 Vbits : Bit_Array := To_Bit_Array (Value);
5354
5355 we expect the 8 bits at Vbits'Address to always contain Value, while
5356 their original location depends on the endianness, at Value'Address
84fb43a1 5357 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
5358
5359tree
5360unchecked_convert (tree type, tree expr, bool notrunc_p)
5361{
5362 tree etype = TREE_TYPE (expr);
c34f3839
EB
5363 enum tree_code ecode = TREE_CODE (etype);
5364 enum tree_code code = TREE_CODE (type);
e63b36bd 5365 tree tem;
980a0501 5366 int c;
a1ab4c31 5367
c34f3839 5368 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
5369 if (etype == type)
5370 return expr;
5371
5372 /* If both types types are integral just do a normal conversion.
5373 Likewise for a conversion to an unconstrained array. */
5374 if ((((INTEGRAL_TYPE_P (type)
c34f3839 5375 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
0d7de0e1 5376 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 5377 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
a1ab4c31 5378 && ((INTEGRAL_TYPE_P (etype)
c34f3839 5379 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
315cff15 5380 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
5381 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5382 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 5383 {
c34f3839 5384 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
5385 {
5386 tree ntype = copy_type (etype);
a1ab4c31
AC
5387 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5388 TYPE_MAIN_VARIANT (ntype) = ntype;
5389 expr = build1 (NOP_EXPR, ntype, expr);
5390 }
5391
c34f3839 5392 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 5393 {
afcea859 5394 tree rtype = copy_type (type);
a1ab4c31
AC
5395 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5396 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
5397 expr = convert (rtype, expr);
5398 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 5399 }
afcea859
EB
5400 else
5401 expr = convert (type, expr);
a1ab4c31
AC
5402 }
5403
afcea859 5404 /* If we are converting to an integral type whose precision is not equal
416de7d5
EB
5405 to its size, first unchecked convert to a record type that contains an
5406 field of the given precision. Then extract the field. */
980a0501
EB
5407 else if (INTEGRAL_TYPE_P (type)
5408 && TYPE_RM_SIZE (type)
a1ab4c31
AC
5409 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5410 GET_MODE_BITSIZE (TYPE_MODE (type))))
5411 {
5412 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
5413 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5414 tree field_type, field;
5415
5416 if (TYPE_UNSIGNED (type))
5417 field_type = make_unsigned_type (prec);
5418 else
5419 field_type = make_signed_type (prec);
5420 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5421
5422 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 5423 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 5424
44e9e3ec 5425 finish_record_type (rec_type, field, 1, false);
a1ab4c31
AC
5426
5427 expr = unchecked_convert (rec_type, expr, notrunc_p);
3cd64bab 5428 expr = build_component_ref (expr, NULL_TREE, field, false);
416de7d5 5429 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
5430 }
5431
416de7d5
EB
5432 /* Similarly if we are converting from an integral type whose precision is
5433 not equal to its size, first copy into a field of the given precision
5434 and unchecked convert the record type. */
980a0501
EB
5435 else if (INTEGRAL_TYPE_P (etype)
5436 && TYPE_RM_SIZE (etype)
5437 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5438 GET_MODE_BITSIZE (TYPE_MODE (etype))))
a1ab4c31
AC
5439 {
5440 tree rec_type = make_node (RECORD_TYPE);
416de7d5 5441 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
5442 vec<constructor_elt, va_gc> *v;
5443 vec_alloc (v, 1);
416de7d5
EB
5444 tree field_type, field;
5445
5446 if (TYPE_UNSIGNED (etype))
5447 field_type = make_unsigned_type (prec);
5448 else
5449 field_type = make_signed_type (prec);
5450 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5451
5452 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 5453 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 5454
44e9e3ec 5455 finish_record_type (rec_type, field, 1, false);
a1ab4c31 5456
416de7d5 5457 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
5458 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5459 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
5460 expr = unchecked_convert (type, expr, notrunc_p);
5461 }
5462
980a0501
EB
5463 /* If we are converting from a scalar type to a type with a different size,
5464 we need to pad to have the same size on both sides.
5465
5466 ??? We cannot do it unconditionally because unchecked conversions are
5467 used liberally by the front-end to implement polymorphism, e.g. in:
5468
5469 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5470 return p___size__4 (p__object!(S191s.all));
5471
5472 so we skip all expressions that are references. */
5473 else if (!REFERENCE_CLASS_P (expr)
5474 && !AGGREGATE_TYPE_P (etype)
5475 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5476 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5477 {
5478 if (c < 0)
5479 {
5480 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5481 false, false, false, true),
5482 expr);
5483 expr = unchecked_convert (type, expr, notrunc_p);
5484 }
5485 else
5486 {
5487 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5488 false, false, false, true);
5489 expr = unchecked_convert (rec_type, expr, notrunc_p);
5490 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5491 false);
5492 }
5493 }
5494
7948ae37
OH
5495 /* We have a special case when we are converting between two unconstrained
5496 array types. In that case, take the address, convert the fat pointer
5497 types, and dereference. */
c34f3839 5498 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5499 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5500 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5501 build_unary_op (ADDR_EXPR, NULL_TREE,
5502 expr)));
7948ae37
OH
5503
5504 /* Another special case is when we are converting to a vector type from its
5505 representative array type; this a regular conversion. */
c34f3839
EB
5506 else if (code == VECTOR_TYPE
5507 && ecode == ARRAY_TYPE
7948ae37
OH
5508 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5509 etype))
5510 expr = convert (type, expr);
5511
e63b36bd
EB
5512 /* And, if the array type is not the representative, we try to build an
5513 intermediate vector type of which the array type is the representative
5514 and to do the unchecked conversion between the vector types, in order
5515 to enable further simplifications in the middle-end. */
5516 else if (code == VECTOR_TYPE
5517 && ecode == ARRAY_TYPE
5518 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5519 {
5520 expr = convert (tem, expr);
5521 return unchecked_convert (type, expr, notrunc_p);
5522 }
5523
44e9e3ec
EB
5524 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5525 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5526 else if (TREE_CODE (expr) == CONSTRUCTOR
5527 && code == RECORD_TYPE
5528 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5529 {
5530 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5531 Empty, false, false, false, true),
5532 expr);
5533 return unchecked_convert (type, expr, notrunc_p);
5534 }
5535
5536 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
a1ab4c31
AC
5537 else
5538 {
5539 expr = maybe_unconstrained_array (expr);
5540 etype = TREE_TYPE (expr);
c34f3839 5541 ecode = TREE_CODE (etype);
afcea859
EB
5542 if (can_fold_for_view_convert_p (expr))
5543 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5544 else
5545 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5546 }
5547
afcea859
EB
5548 /* If the result is an integral type whose precision is not equal to its
5549 size, sign- or zero-extend the result. We need not do this if the input
5550 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
5551 is a biased type or if both the input and output are unsigned. */
5552 if (!notrunc_p
5553 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 5554 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
5555 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5556 GET_MODE_BITSIZE (TYPE_MODE (type)))
5557 && !(INTEGRAL_TYPE_P (etype)
5558 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5559 && operand_equal_p (TYPE_RM_SIZE (type),
5560 (TYPE_RM_SIZE (etype) != 0
5561 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5562 0))
5563 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5564 {
c34f3839
EB
5565 tree base_type
5566 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
5567 tree shift_expr
5568 = convert (base_type,
5569 size_binop (MINUS_EXPR,
5570 bitsize_int
5571 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5572 TYPE_RM_SIZE (type)));
5573 expr
5574 = convert (type,
5575 build_binary_op (RSHIFT_EXPR, base_type,
5576 build_binary_op (LSHIFT_EXPR, base_type,
5577 convert (base_type, expr),
5578 shift_expr),
5579 shift_expr));
5580 }
5581
5582 /* An unchecked conversion should never raise Constraint_Error. The code
5583 below assumes that GCC's conversion routines overflow the same way that
5584 the underlying hardware does. This is probably true. In the rare case
5585 when it is false, we can rely on the fact that such conversions are
5586 erroneous anyway. */
5587 if (TREE_CODE (expr) == INTEGER_CST)
5588 TREE_OVERFLOW (expr) = 0;
5589
5590 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5591 show no longer constant. */
5592 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5593 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5594 OEP_ONLY_CONST))
5595 TREE_CONSTANT (expr) = 0;
5596
5597 return expr;
5598}
5599\f
feec4372 5600/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5601 the latter being a record type as predicated by Is_Record_Type. */
5602
5603enum tree_code
5604tree_code_for_record_type (Entity_Id gnat_type)
5605{
b1a785fb 5606 Node_Id component_list, component;
a1ab4c31 5607
b1a785fb
EB
5608 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5609 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5610 if (!Is_Unchecked_Union (gnat_type))
5611 return RECORD_TYPE;
5612
b1a785fb
EB
5613 gnat_type = Implementation_Base_Type (gnat_type);
5614 component_list
5615 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5616
a1ab4c31
AC
5617 for (component = First_Non_Pragma (Component_Items (component_list));
5618 Present (component);
5619 component = Next_Non_Pragma (component))
5620 if (Ekind (Defining_Entity (component)) == E_Component)
5621 return RECORD_TYPE;
5622
5623 return UNION_TYPE;
5624}
5625
caa9d12a
EB
5626/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5627 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5628 according to the presence of an alignment clause on the type or, if it
5629 is an array, on the component type. */
5630
5631bool
5632is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5633{
5634 gnat_type = Underlying_Type (gnat_type);
5635
5636 *align_clause = Present (Alignment_Clause (gnat_type));
5637
5638 if (Is_Array_Type (gnat_type))
5639 {
5640 gnat_type = Underlying_Type (Component_Type (gnat_type));
5641 if (Present (Alignment_Clause (gnat_type)))
5642 *align_clause = true;
5643 }
5644
5645 if (!Is_Floating_Point_Type (gnat_type))
5646 return false;
5647
5648 if (UI_To_Int (Esize (gnat_type)) != 64)
5649 return false;
5650
5651 return true;
5652}
5653
5654/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5655 size is greater or equal to 64 bits, or an array of such a type. Set
5656 ALIGN_CLAUSE according to the presence of an alignment clause on the
5657 type or, if it is an array, on the component type. */
5658
5659bool
5660is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5661{
5662 gnat_type = Underlying_Type (gnat_type);
5663
5664 *align_clause = Present (Alignment_Clause (gnat_type));
5665
5666 if (Is_Array_Type (gnat_type))
5667 {
5668 gnat_type = Underlying_Type (Component_Type (gnat_type));
5669 if (Present (Alignment_Clause (gnat_type)))
5670 *align_clause = true;
5671 }
5672
5673 if (!Is_Scalar_Type (gnat_type))
5674 return false;
5675
5676 if (UI_To_Int (Esize (gnat_type)) < 64)
5677 return false;
5678
5679 return true;
5680}
5681
a1ab4c31
AC
5682/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5683 component of an aggregate type. */
5684
5685bool
5686type_for_nonaliased_component_p (tree gnu_type)
5687{
5688 /* If the type is passed by reference, we may have pointers to the
5689 component so it cannot be made non-aliased. */
5690 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5691 return false;
5692
5693 /* We used to say that any component of aggregate type is aliased
5694 because the front-end may take 'Reference of it. The front-end
5695 has been enhanced in the meantime so as to use a renaming instead
5696 in most cases, but the back-end can probably take the address of
5697 such a component too so we go for the conservative stance.
5698
5699 For instance, we might need the address of any array type, even
5700 if normally passed by copy, to construct a fat pointer if the
5701 component is used as an actual for an unconstrained formal.
5702
5703 Likewise for record types: even if a specific record subtype is
5704 passed by copy, the parent type might be passed by ref (e.g. if
5705 it's of variable size) and we might take the address of a child
5706 component to pass to a parent formal. We have no way to check
5707 for such conditions here. */
5708 if (AGGREGATE_TYPE_P (gnu_type))
5709 return false;
5710
5711 return true;
5712}
5713
bb1f7929
EB
5714/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5715
5716bool
5717smaller_form_type_p (tree type, tree orig_type)
5718{
5719 tree size, osize;
5720
5721 /* We're not interested in variants here. */
5722 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5723 return false;
5724
5725 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5726 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5727 return false;
5728
5729 size = TYPE_SIZE (type);
5730 osize = TYPE_SIZE (orig_type);
5731
5732 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5733 return false;
5734
5735 return tree_int_cst_lt (size, osize) != 0;
5736}
5737
a1ab4c31
AC
5738/* Perform final processing on global variables. */
5739
65444786
EB
5740static GTY (()) tree dummy_global;
5741
a1ab4c31
AC
5742void
5743gnat_write_global_declarations (void)
5744{
10e4d056
EB
5745 unsigned int i;
5746 tree iter;
5747
65444786
EB
5748 /* If we have declared types as used at the global level, insert them in
5749 the global hash table. We use a dummy variable for this purpose. */
9771b263 5750 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
65444786 5751 {
35e8bcf4 5752 struct varpool_node *node;
d3c268ab
EB
5753 char *label;
5754
5755 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5756 dummy_global
d3c268ab
EB
5757 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5758 void_type_node);
dd25fe0a 5759 DECL_HARD_REGISTER (dummy_global) = 1;
65444786 5760 TREE_STATIC (dummy_global) = 1;
5d59b5e1 5761 node = varpool_node_for_decl (dummy_global);
dd25fe0a 5762 node->definition = 1;
67348ccc 5763 node->force_output = 1;
65444786 5764
9771b263 5765 while (!types_used_by_cur_var_decl->is_empty ())
65444786 5766 {
9771b263 5767 tree t = types_used_by_cur_var_decl->pop ();
65444786
EB
5768 types_used_by_var_decl_insert (t, dummy_global);
5769 }
5770 }
5771
10e4d056
EB
5772 /* Output debug information for all global type declarations first. This
5773 ensures that global types whose compilation hasn't been finalized yet,
5774 for example pointers to Taft amendment types, have their compilation
5775 finalized in the right context. */
9771b263 5776 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5777 if (TREE_CODE (iter) == TYPE_DECL)
5778 debug_hooks->global_decl (iter);
5779
65d630d4
JH
5780 /* Proceed to optimize and emit assembly. */
5781 finalize_compilation_unit ();
a1ab4c31 5782
10e4d056
EB
5783 /* After cgraph has had a chance to emit everything that's going to
5784 be emitted, output debug information for the rest of globals. */
5785 if (!seen_error ())
5786 {
5787 timevar_push (TV_SYMOUT);
9771b263 5788 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
10e4d056
EB
5789 if (TREE_CODE (iter) != TYPE_DECL)
5790 debug_hooks->global_decl (iter);
5791 timevar_pop (TV_SYMOUT);
5792 }
a1ab4c31
AC
5793}
5794
5795/* ************************************************************************
5796 * * GCC builtins support *
5797 * ************************************************************************ */
5798
5799/* The general scheme is fairly simple:
5800
5801 For each builtin function/type to be declared, gnat_install_builtins calls
5802 internal facilities which eventually get to gnat_push_decl, which in turn
5803 tracks the so declared builtin function decls in the 'builtin_decls' global
5804 datastructure. When an Intrinsic subprogram declaration is processed, we
5805 search this global datastructure to retrieve the associated BUILT_IN DECL
5806 node. */
5807
5808/* Search the chain of currently available builtin declarations for a node
5809 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5810 found, if any, or NULL_TREE otherwise. */
5811tree
5812builtin_decl_for (tree name)
5813{
5814 unsigned i;
5815 tree decl;
5816
9771b263 5817 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5818 if (DECL_NAME (decl) == name)
5819 return decl;
5820
5821 return NULL_TREE;
5822}
5823
5824/* The code below eventually exposes gnat_install_builtins, which declares
5825 the builtin types and functions we might need, either internally or as
5826 user accessible facilities.
5827
5828 ??? This is a first implementation shot, still in rough shape. It is
5829 heavily inspired from the "C" family implementation, with chunks copied
5830 verbatim from there.
5831
5832 Two obvious TODO candidates are
5833 o Use a more efficient name/decl mapping scheme
5834 o Devise a middle-end infrastructure to avoid having to copy
5835 pieces between front-ends. */
5836
5837/* ----------------------------------------------------------------------- *
5838 * BUILTIN ELEMENTARY TYPES *
5839 * ----------------------------------------------------------------------- */
5840
5841/* Standard data types to be used in builtin argument declarations. */
5842
5843enum c_tree_index
5844{
5845 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5846 CTI_STRING_TYPE,
5847 CTI_CONST_STRING_TYPE,
5848
5849 CTI_MAX
5850};
5851
5852static tree c_global_trees[CTI_MAX];
5853
5854#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5855#define string_type_node c_global_trees[CTI_STRING_TYPE]
5856#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5857
5858/* ??? In addition some attribute handlers, we currently don't support a
5859 (small) number of builtin-types, which in turns inhibits support for a
5860 number of builtin functions. */
5861#define wint_type_node void_type_node
5862#define intmax_type_node void_type_node
5863#define uintmax_type_node void_type_node
5864
5865/* Build the void_list_node (void_type_node having been created). */
5866
5867static tree
5868build_void_list_node (void)
5869{
5870 tree t = build_tree_list (NULL_TREE, void_type_node);
5871 return t;
5872}
5873
5874/* Used to help initialize the builtin-types.def table. When a type of
5875 the correct size doesn't exist, use error_mark_node instead of NULL.
5876 The later results in segfaults even when a decl using the type doesn't
5877 get invoked. */
5878
5879static tree
5880builtin_type_for_size (int size, bool unsignedp)
5881{
ced57283 5882 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5883 return type ? type : error_mark_node;
5884}
5885
5886/* Build/push the elementary type decls that builtin functions/types
5887 will need. */
5888
5889static void
5890install_builtin_elementary_types (void)
5891{
728936bb 5892 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5893 pid_type_node = integer_type_node;
5894 void_list_node = build_void_list_node ();
5895
5896 string_type_node = build_pointer_type (char_type_node);
5897 const_string_type_node
5898 = build_pointer_type (build_qualified_type
5899 (char_type_node, TYPE_QUAL_CONST));
5900}
5901
5902/* ----------------------------------------------------------------------- *
5903 * BUILTIN FUNCTION TYPES *
5904 * ----------------------------------------------------------------------- */
5905
5906/* Now, builtin function types per se. */
5907
5908enum c_builtin_type
5909{
5910#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5911#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5912#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5913#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5914#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5915#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5916#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5917#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5918#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
acf0174b 5919#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
a1ab4c31
AC
5920#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5921#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5922#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5923#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5924#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5925#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5926 NAME,
5927#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5928#include "builtin-types.def"
5929#undef DEF_PRIMITIVE_TYPE
5930#undef DEF_FUNCTION_TYPE_0
5931#undef DEF_FUNCTION_TYPE_1
5932#undef DEF_FUNCTION_TYPE_2
5933#undef DEF_FUNCTION_TYPE_3
5934#undef DEF_FUNCTION_TYPE_4
5935#undef DEF_FUNCTION_TYPE_5
5936#undef DEF_FUNCTION_TYPE_6
5937#undef DEF_FUNCTION_TYPE_7
acf0174b 5938#undef DEF_FUNCTION_TYPE_8
a1ab4c31
AC
5939#undef DEF_FUNCTION_TYPE_VAR_0
5940#undef DEF_FUNCTION_TYPE_VAR_1
5941#undef DEF_FUNCTION_TYPE_VAR_2
5942#undef DEF_FUNCTION_TYPE_VAR_3
5943#undef DEF_FUNCTION_TYPE_VAR_4
5944#undef DEF_FUNCTION_TYPE_VAR_5
5945#undef DEF_POINTER_TYPE
5946 BT_LAST
5947};
5948
5949typedef enum c_builtin_type builtin_type;
5950
5951/* A temporary array used in communication with def_fn_type. */
5952static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5953
5954/* A helper function for install_builtin_types. Build function type
5955 for DEF with return type RET and N arguments. If VAR is true, then the
5956 function should be variadic after those N arguments.
5957
5958 Takes special care not to ICE if any of the types involved are
5959 error_mark_node, which indicates that said type is not in fact available
5960 (see builtin_type_for_size). In which case the function type as a whole
5961 should be error_mark_node. */
5962
5963static void
5964def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5965{
e5b00edf
NF
5966 tree t;
5967 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5968 va_list list;
5969 int i;
5970
5971 va_start (list, n);
5972 for (i = 0; i < n; ++i)
5973 {
c6bd4220 5974 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5975 t = builtin_types[a];
5976 if (t == error_mark_node)
5977 goto egress;
e5b00edf 5978 args[i] = t;
a1ab4c31 5979 }
a1ab4c31 5980
a1ab4c31
AC
5981 t = builtin_types[ret];
5982 if (t == error_mark_node)
5983 goto egress;
e5b00edf
NF
5984 if (var)
5985 t = build_varargs_function_type_array (t, n, args);
5986 else
5987 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5988
5989 egress:
5990 builtin_types[def] = t;
0edf1bb2 5991 va_end (list);
a1ab4c31
AC
5992}
5993
5994/* Build the builtin function types and install them in the builtin_types
5995 array for later use in builtin function decls. */
5996
5997static void
5998install_builtin_function_types (void)
5999{
6000 tree va_list_ref_type_node;
6001 tree va_list_arg_type_node;
6002
6003 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6004 {
6005 va_list_arg_type_node = va_list_ref_type_node =
6006 build_pointer_type (TREE_TYPE (va_list_type_node));
6007 }
6008 else
6009 {
6010 va_list_arg_type_node = va_list_type_node;
6011 va_list_ref_type_node = build_reference_type (va_list_type_node);
6012 }
6013
6014#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6015 builtin_types[ENUM] = VALUE;
6016#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6017 def_fn_type (ENUM, RETURN, 0, 0);
6018#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6019 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6020#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6021 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6022#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6023 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6024#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6025 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6026#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6027 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6028#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6029 ARG6) \
6030 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6031#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6032 ARG6, ARG7) \
6033 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
6034#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6035 ARG6, ARG7, ARG8) \
6036 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6037 ARG7, ARG8);
a1ab4c31
AC
6038#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6039 def_fn_type (ENUM, RETURN, 1, 0);
6040#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6041 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6042#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6043 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6044#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6045 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6046#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6047 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6048#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6049 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6050#define DEF_POINTER_TYPE(ENUM, TYPE) \
6051 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6052
6053#include "builtin-types.def"
6054
6055#undef DEF_PRIMITIVE_TYPE
6056#undef DEF_FUNCTION_TYPE_1
6057#undef DEF_FUNCTION_TYPE_2
6058#undef DEF_FUNCTION_TYPE_3
6059#undef DEF_FUNCTION_TYPE_4
6060#undef DEF_FUNCTION_TYPE_5
6061#undef DEF_FUNCTION_TYPE_6
6062#undef DEF_FUNCTION_TYPE_VAR_0
6063#undef DEF_FUNCTION_TYPE_VAR_1
6064#undef DEF_FUNCTION_TYPE_VAR_2
6065#undef DEF_FUNCTION_TYPE_VAR_3
6066#undef DEF_FUNCTION_TYPE_VAR_4
6067#undef DEF_FUNCTION_TYPE_VAR_5
6068#undef DEF_POINTER_TYPE
6069 builtin_types[(int) BT_LAST] = NULL_TREE;
6070}
6071
6072/* ----------------------------------------------------------------------- *
6073 * BUILTIN ATTRIBUTES *
6074 * ----------------------------------------------------------------------- */
6075
6076enum built_in_attribute
6077{
6078#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6079#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 6080#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
6081#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6082#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6083#include "builtin-attrs.def"
6084#undef DEF_ATTR_NULL_TREE
6085#undef DEF_ATTR_INT
e384e6b5 6086#undef DEF_ATTR_STRING
a1ab4c31
AC
6087#undef DEF_ATTR_IDENT
6088#undef DEF_ATTR_TREE_LIST
6089 ATTR_LAST
6090};
6091
6092static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6093
6094static void
6095install_builtin_attributes (void)
6096{
6097 /* Fill in the built_in_attributes array. */
6098#define DEF_ATTR_NULL_TREE(ENUM) \
6099 built_in_attributes[(int) ENUM] = NULL_TREE;
6100#define DEF_ATTR_INT(ENUM, VALUE) \
6101 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
6102#define DEF_ATTR_STRING(ENUM, VALUE) \
6103 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
6104#define DEF_ATTR_IDENT(ENUM, STRING) \
6105 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6106#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6107 built_in_attributes[(int) ENUM] \
6108 = tree_cons (built_in_attributes[(int) PURPOSE], \
6109 built_in_attributes[(int) VALUE], \
6110 built_in_attributes[(int) CHAIN]);
6111#include "builtin-attrs.def"
6112#undef DEF_ATTR_NULL_TREE
6113#undef DEF_ATTR_INT
e384e6b5 6114#undef DEF_ATTR_STRING
a1ab4c31
AC
6115#undef DEF_ATTR_IDENT
6116#undef DEF_ATTR_TREE_LIST
6117}
6118
6119/* Handle a "const" attribute; arguments as in
6120 struct attribute_spec.handler. */
6121
6122static tree
6123handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6124 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6125 bool *no_add_attrs)
6126{
6127 if (TREE_CODE (*node) == FUNCTION_DECL)
6128 TREE_READONLY (*node) = 1;
6129 else
6130 *no_add_attrs = true;
6131
6132 return NULL_TREE;
6133}
6134
6135/* Handle a "nothrow" attribute; arguments as in
6136 struct attribute_spec.handler. */
6137
6138static tree
6139handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6140 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6141 bool *no_add_attrs)
6142{
6143 if (TREE_CODE (*node) == FUNCTION_DECL)
6144 TREE_NOTHROW (*node) = 1;
6145 else
6146 *no_add_attrs = true;
6147
6148 return NULL_TREE;
6149}
6150
6151/* Handle a "pure" attribute; arguments as in
6152 struct attribute_spec.handler. */
6153
6154static tree
6155handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6156 int ARG_UNUSED (flags), bool *no_add_attrs)
6157{
6158 if (TREE_CODE (*node) == FUNCTION_DECL)
6159 DECL_PURE_P (*node) = 1;
6160 /* ??? TODO: Support types. */
6161 else
6162 {
7948ae37
OH
6163 warning (OPT_Wattributes, "%qs attribute ignored",
6164 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6165 *no_add_attrs = true;
6166 }
6167
6168 return NULL_TREE;
6169}
6170
6171/* Handle a "no vops" attribute; arguments as in
6172 struct attribute_spec.handler. */
6173
6174static tree
6175handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6176 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6177 bool *ARG_UNUSED (no_add_attrs))
6178{
6179 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6180 DECL_IS_NOVOPS (*node) = 1;
6181 return NULL_TREE;
6182}
6183
6184/* Helper for nonnull attribute handling; fetch the operand number
6185 from the attribute argument list. */
6186
6187static bool
6188get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6189{
6190 /* Verify the arg number is a constant. */
807e902e 6191 if (!tree_fits_uhwi_p (arg_num_expr))
a1ab4c31
AC
6192 return false;
6193
6194 *valp = TREE_INT_CST_LOW (arg_num_expr);
6195 return true;
6196}
6197
6198/* Handle the "nonnull" attribute. */
6199static tree
6200handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6201 tree args, int ARG_UNUSED (flags),
6202 bool *no_add_attrs)
6203{
6204 tree type = *node;
6205 unsigned HOST_WIDE_INT attr_arg_num;
6206
6207 /* If no arguments are specified, all pointer arguments should be
6208 non-null. Verify a full prototype is given so that the arguments
6209 will have the correct types when we actually check them later. */
6210 if (!args)
6211 {
f4da8dce 6212 if (!prototype_p (type))
a1ab4c31
AC
6213 {
6214 error ("nonnull attribute without arguments on a non-prototype");
6215 *no_add_attrs = true;
6216 }
6217 return NULL_TREE;
6218 }
6219
6220 /* Argument list specified. Verify that each argument number references
6221 a pointer argument. */
6222 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6223 {
a1ab4c31
AC
6224 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6225
6226 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6227 {
6228 error ("nonnull argument has invalid operand number (argument %lu)",
6229 (unsigned long) attr_arg_num);
6230 *no_add_attrs = true;
6231 return NULL_TREE;
6232 }
6233
d7d058c5 6234 if (prototype_p (type))
a1ab4c31 6235 {
d7d058c5
NF
6236 function_args_iterator iter;
6237 tree argument;
6238
6239 function_args_iter_init (&iter, type);
6240 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 6241 {
d7d058c5 6242 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
6243 if (!argument || ck_num == arg_num)
6244 break;
a1ab4c31
AC
6245 }
6246
6247 if (!argument
d7d058c5 6248 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 6249 {
58c8f770
EB
6250 error ("nonnull argument with out-of-range operand number "
6251 "(argument %lu, operand %lu)",
a1ab4c31
AC
6252 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6253 *no_add_attrs = true;
6254 return NULL_TREE;
6255 }
6256
d7d058c5 6257 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 6258 {
58c8f770
EB
6259 error ("nonnull argument references non-pointer operand "
6260 "(argument %lu, operand %lu)",
a1ab4c31
AC
6261 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6262 *no_add_attrs = true;
6263 return NULL_TREE;
6264 }
6265 }
6266 }
6267
6268 return NULL_TREE;
6269}
6270
6271/* Handle a "sentinel" attribute. */
6272
6273static tree
6274handle_sentinel_attribute (tree *node, tree name, tree args,
6275 int ARG_UNUSED (flags), bool *no_add_attrs)
6276{
f4da8dce 6277 if (!prototype_p (*node))
a1ab4c31
AC
6278 {
6279 warning (OPT_Wattributes,
7948ae37
OH
6280 "%qs attribute requires prototypes with named arguments",
6281 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6282 *no_add_attrs = true;
6283 }
6284 else
6285 {
dcf0c47e 6286 if (!stdarg_p (*node))
a1ab4c31
AC
6287 {
6288 warning (OPT_Wattributes,
7948ae37
OH
6289 "%qs attribute only applies to variadic functions",
6290 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6291 *no_add_attrs = true;
6292 }
6293 }
6294
6295 if (args)
6296 {
6297 tree position = TREE_VALUE (args);
6298
6299 if (TREE_CODE (position) != INTEGER_CST)
6300 {
6301 warning (0, "requested position is not an integer constant");
6302 *no_add_attrs = true;
6303 }
6304 else
6305 {
6306 if (tree_int_cst_lt (position, integer_zero_node))
6307 {
6308 warning (0, "requested position is less than zero");
6309 *no_add_attrs = true;
6310 }
6311 }
6312 }
6313
6314 return NULL_TREE;
6315}
6316
6317/* Handle a "noreturn" attribute; arguments as in
6318 struct attribute_spec.handler. */
6319
6320static tree
6321handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6322 int ARG_UNUSED (flags), bool *no_add_attrs)
6323{
6324 tree type = TREE_TYPE (*node);
6325
6326 /* See FIXME comment in c_common_attribute_table. */
6327 if (TREE_CODE (*node) == FUNCTION_DECL)
6328 TREE_THIS_VOLATILE (*node) = 1;
6329 else if (TREE_CODE (type) == POINTER_TYPE
6330 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6331 TREE_TYPE (*node)
6332 = build_pointer_type
6333 (build_type_variant (TREE_TYPE (type),
6334 TYPE_READONLY (TREE_TYPE (type)), 1));
6335 else
6336 {
7948ae37
OH
6337 warning (OPT_Wattributes, "%qs attribute ignored",
6338 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6339 *no_add_attrs = true;
6340 }
6341
6342 return NULL_TREE;
6343}
6344
0d6e14fd
JH
6345/* Handle a "leaf" attribute; arguments as in
6346 struct attribute_spec.handler. */
6347
6348static tree
6349handle_leaf_attribute (tree *node, tree name,
6350 tree ARG_UNUSED (args),
6351 int ARG_UNUSED (flags), bool *no_add_attrs)
6352{
6353 if (TREE_CODE (*node) != FUNCTION_DECL)
6354 {
6355 warning (OPT_Wattributes, "%qE attribute ignored", name);
6356 *no_add_attrs = true;
6357 }
6358 if (!TREE_PUBLIC (*node))
6359 {
32a5388a 6360 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd
JH
6361 *no_add_attrs = true;
6362 }
6363
6364 return NULL_TREE;
6365}
6366
a1ab4c31
AC
6367/* Handle a "malloc" attribute; arguments as in
6368 struct attribute_spec.handler. */
6369
6370static tree
6371handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6372 int ARG_UNUSED (flags), bool *no_add_attrs)
6373{
6374 if (TREE_CODE (*node) == FUNCTION_DECL
6375 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6376 DECL_IS_MALLOC (*node) = 1;
6377 else
6378 {
7948ae37
OH
6379 warning (OPT_Wattributes, "%qs attribute ignored",
6380 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6381 *no_add_attrs = true;
6382 }
6383
6384 return NULL_TREE;
6385}
6386
6387/* Fake handler for attributes we don't properly support. */
6388
6389tree
6390fake_attribute_handler (tree * ARG_UNUSED (node),
6391 tree ARG_UNUSED (name),
6392 tree ARG_UNUSED (args),
6393 int ARG_UNUSED (flags),
6394 bool * ARG_UNUSED (no_add_attrs))
6395{
6396 return NULL_TREE;
6397}
6398
6399/* Handle a "type_generic" attribute. */
6400
6401static tree
6402handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6403 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6404 bool * ARG_UNUSED (no_add_attrs))
6405{
a1ab4c31
AC
6406 /* Ensure we have a function type. */
6407 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 6408
a1ab4c31 6409 /* Ensure we have a variadic function. */
dcf0c47e 6410 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
6411
6412 return NULL_TREE;
6413}
6414
2724e58f
OH
6415/* Handle a "vector_size" attribute; arguments as in
6416 struct attribute_spec.handler. */
6417
6418static tree
6419handle_vector_size_attribute (tree *node, tree name, tree args,
e63b36bd 6420 int ARG_UNUSED (flags), bool *no_add_attrs)
2724e58f 6421{
e63b36bd
EB
6422 tree type = *node;
6423 tree vector_type;
2724e58f
OH
6424
6425 *no_add_attrs = true;
6426
2724e58f
OH
6427 /* We need to provide for vector pointers, vector arrays, and
6428 functions returning vectors. For example:
6429
6430 __attribute__((vector_size(16))) short *foo;
6431
6432 In this case, the mode is SI, but the type being modified is
6433 HI, so we need to look further. */
2724e58f
OH
6434 while (POINTER_TYPE_P (type)
6435 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6436 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6437 type = TREE_TYPE (type);
6438
e63b36bd
EB
6439 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6440 if (!vector_type)
6441 return NULL_TREE;
2724e58f
OH
6442
6443 /* Build back pointers if needed. */
e63b36bd 6444 *node = reconstruct_complex_type (*node, vector_type);
2724e58f
OH
6445
6446 return NULL_TREE;
6447}
6448
7948ae37
OH
6449/* Handle a "vector_type" attribute; arguments as in
6450 struct attribute_spec.handler. */
6451
6452static tree
6453handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
e63b36bd 6454 int ARG_UNUSED (flags), bool *no_add_attrs)
7948ae37 6455{
e63b36bd
EB
6456 tree type = *node;
6457 tree vector_type;
7948ae37
OH
6458
6459 *no_add_attrs = true;
6460
e63b36bd 6461 if (TREE_CODE (type) != ARRAY_TYPE)
7948ae37
OH
6462 {
6463 error ("attribute %qs applies to array types only",
6464 IDENTIFIER_POINTER (name));
6465 return NULL_TREE;
6466 }
6467
e63b36bd
EB
6468 vector_type = build_vector_type_for_array (type, name);
6469 if (!vector_type)
7948ae37
OH
6470 return NULL_TREE;
6471
e63b36bd
EB
6472 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6473 *node = vector_type;
7948ae37
OH
6474
6475 return NULL_TREE;
6476}
6477
a1ab4c31
AC
6478/* ----------------------------------------------------------------------- *
6479 * BUILTIN FUNCTIONS *
6480 * ----------------------------------------------------------------------- */
6481
6482/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6483 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6484 if nonansi_p and flag_no_nonansi_builtin. */
6485
6486static void
6487def_builtin_1 (enum built_in_function fncode,
6488 const char *name,
6489 enum built_in_class fnclass,
6490 tree fntype, tree libtype,
6491 bool both_p, bool fallback_p,
6492 bool nonansi_p ATTRIBUTE_UNUSED,
6493 tree fnattrs, bool implicit_p)
6494{
6495 tree decl;
6496 const char *libname;
6497
6498 /* Preserve an already installed decl. It most likely was setup in advance
6499 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 6500 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
6501 return;
6502
6503 gcc_assert ((!both_p && !fallback_p)
6504 || !strncmp (name, "__builtin_",
6505 strlen ("__builtin_")));
6506
6507 libname = name + strlen ("__builtin_");
6508 decl = add_builtin_function (name, fntype, fncode, fnclass,
6509 (fallback_p ? libname : NULL),
6510 fnattrs);
6511 if (both_p)
6512 /* ??? This is normally further controlled by command-line options
6513 like -fno-builtin, but we don't have them for Ada. */
6514 add_builtin_function (libname, libtype, fncode, fnclass,
6515 NULL, fnattrs);
6516
e79983f4 6517 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6518}
6519
6520static int flag_isoc94 = 0;
6521static int flag_isoc99 = 0;
22869a37 6522static int flag_isoc11 = 0;
a1ab4c31
AC
6523
6524/* Install what the common builtins.def offers. */
6525
6526static void
6527install_builtin_functions (void)
6528{
6529#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6530 NONANSI_P, ATTRS, IMPLICIT, COND) \
6531 if (NAME && COND) \
6532 def_builtin_1 (ENUM, NAME, CLASS, \
6533 builtin_types[(int) TYPE], \
6534 builtin_types[(int) LIBTYPE], \
6535 BOTH_P, FALLBACK_P, NONANSI_P, \
6536 built_in_attributes[(int) ATTRS], IMPLICIT);
6537#include "builtins.def"
6538#undef DEF_BUILTIN
6539}
6540
6541/* ----------------------------------------------------------------------- *
6542 * BUILTIN FUNCTIONS *
6543 * ----------------------------------------------------------------------- */
6544
6545/* Install the builtin functions we might need. */
6546
6547void
6548gnat_install_builtins (void)
6549{
6550 install_builtin_elementary_types ();
6551 install_builtin_function_types ();
6552 install_builtin_attributes ();
6553
6554 /* Install builtins used by generic middle-end pieces first. Some of these
6555 know about internal specificities and control attributes accordingly, for
6556 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6557 the generic definition from builtins.def. */
384c400a 6558 build_common_builtin_nodes ();
a1ab4c31
AC
6559
6560 /* Now, install the target specific builtins, such as the AltiVec family on
6561 ppc, and the common set as exposed by builtins.def. */
6562 targetm.init_builtins ();
6563 install_builtin_functions ();
6564}
6565
6566#include "gt-ada-utils.h"
6567#include "gtype-ada.h"
This page took 2.836821 seconds and 5 git commands to generate.