]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
Add a flag to control the balance between GNAT encodings and std. DWARF
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
e1872645 9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
2adfab87
AM
29#include "target.h"
30#include "function.h"
a1ab4c31 31#include "tree.h"
d8a2d370 32#include "stringpool.h"
2adfab87
AM
33#include "cgraph.h"
34#include "diagnostic.h"
35#include "alias.h"
36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "attribs.h"
39#include "varasm.h"
a1ab4c31
AC
40#include "toplev.h"
41#include "output.h"
a1ab4c31
AC
42#include "debug.h"
43#include "convert.h"
677f3fa8 44#include "common/common-target.h"
8713b7e4 45#include "langhooks.h"
8713b7e4 46#include "tree-dump.h"
a1ab4c31 47#include "tree-inline.h"
a1ab4c31
AC
48
49#include "ada.h"
50#include "types.h"
51#include "atree.h"
a1ab4c31 52#include "nlists.h"
a1ab4c31
AC
53#include "uintp.h"
54#include "fe.h"
55#include "sinfo.h"
56#include "einfo.h"
57#include "ada-tree.h"
58#include "gigi.h"
59
a1ab4c31
AC
60/* If nonzero, pretend we are allocating at global level. */
61int force_global;
62
caa9d12a
EB
63/* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66int double_float_alignment;
67
68/* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71int double_scalar_alignment;
72
24228312
AC
73/* True if floating-point arithmetics may use wider intermediate results. */
74bool fp_arith_may_widen = true;
75
a1ab4c31
AC
76/* Tree nodes for the various types and decls we create. */
77tree gnat_std_decls[(int) ADT_LAST];
78
79/* Functions to call for each of the possible raise reasons. */
80tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
ca8e13e8 82/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
83tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
a1ab4c31
AC
85/* Forward declarations for handlers of attributes. */
86static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 93static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
f087ea44 94static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
95static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
96static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 97static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 98static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
99
100/* Fake handler for attributes we don't properly support, typically because
101 they'd require dragging a lot of the common-c front-end circuitry. */
102static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
103
104/* Table of machine-independent internal attributes for Ada. We support
105 this minimal set of attributes to accommodate the needs of builtins. */
106const struct attribute_spec gnat_internal_attribute_table[] =
107{
62d784f7
KT
108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
109 affects_type_identity } */
110 { "const", 0, 0, true, false, false, handle_const_attribute,
111 false },
112 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
113 false },
114 { "pure", 0, 0, true, false, false, handle_pure_attribute,
115 false },
116 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
117 false },
118 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
119 false },
120 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
121 false },
122 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
123 false },
124 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
125 false },
f087ea44
AC
126 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
127 false },
62d784f7
KT
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
132
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
138
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
62d784f7
KT
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 144
62d784f7 145 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
146};
147
148/* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152
153#define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155
156#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158
159#define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161
162/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164
165#define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167
168#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170
171#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173
174/* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
176
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181
182/* Likewise for float types, but record these by mode. */
183static GTY(()) tree float_types[NUM_MACHINE_MODES];
184
185/* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
187
d1b38208 188struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
196};
197
198/* The binding level currently in effect. */
199static GTY(()) struct gnat_binding_level *current_binding_level;
200
201/* A chain of gnat_binding_level structures awaiting reuse. */
202static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203
228ee426
EB
204/* The context to be used for global declarations. */
205static GTY(()) tree global_context;
206
a22b794d
EB
207/* An array of global declarations. */
208static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
209
210/* An array of builtin function declarations. */
9771b263 211static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31 212
a1ab4c31
AC
213/* A chain of unused BLOCK nodes. */
214static GTY((deletable)) tree free_block_chain;
215
842d4ee2
EB
216/* A hash table of padded types. It is modelled on the generic type
217 hash table in tree.c, which must thus be used as a reference. */
d242408f
TS
218
219struct GTY((for_user)) pad_type_hash {
842d4ee2
EB
220 unsigned long hash;
221 tree type;
222};
223
6c907cff 224struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
d242408f
TS
225{
226 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
227 static bool equal (pad_type_hash *a, pad_type_hash *b);
08ec2754 228 static int keep_cache_entry (pad_type_hash *&);
d242408f
TS
229};
230
231static GTY ((cache))
232 hash_table<pad_type_hasher> *pad_type_hash_table;
842d4ee2 233
a1ab4c31
AC
234static tree merge_sizes (tree, tree, tree, bool, bool);
235static tree compute_related_constant (tree, tree);
236static tree split_plus (tree, tree *);
ef4bddc2 237static tree float_type_for_precision (int, machine_mode);
a1ab4c31 238static tree convert_to_fat_pointer (tree, tree);
5c475ba9 239static unsigned int scale_by_factor_of (tree, unsigned int);
a1ab4c31 240static bool potential_alignment_gap (tree, tree, tree);
9a30c7c4
AC
241
242/* A linked list used as a queue to defer the initialization of the
243 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
244 of ..._TYPE nodes. */
245struct deferred_decl_context_node
246{
247 tree decl; /* The ..._DECL node to work on. */
248 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
249 int force_global; /* force_global value when pushing DECL. */
250 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
251 context to. */
252 struct deferred_decl_context_node *next; /* The next queue item. */
253};
254
255static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
256
257/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
258 feed it with the elaboration of GNAT_SCOPE. */
259static struct deferred_decl_context_node *
260add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
261
262/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
263 feed it with the DECL_CONTEXT computed as part of N as soon as it is
264 computed. */
265static void add_deferred_type_context (struct deferred_decl_context_node *n,
266 tree type);
a1ab4c31 267\f
842d4ee2 268/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
269
270void
842d4ee2 271init_gnat_utils (void)
a1ab4c31 272{
842d4ee2 273 /* Initialize the association of GNAT nodes to GCC trees. */
766090c2 274 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
275
276 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
766090c2 277 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
278
279 /* Initialize the hash table of padded types. */
d242408f 280 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
a1ab4c31
AC
281}
282
842d4ee2 283/* Destroy data structures of the utils.c module. */
f04b8d69
EB
284
285void
842d4ee2 286destroy_gnat_utils (void)
f04b8d69 287{
842d4ee2 288 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
289 ggc_free (associate_gnat_to_gnu);
290 associate_gnat_to_gnu = NULL;
f04b8d69 291
842d4ee2
EB
292 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
293 ggc_free (dummy_node_table);
294 dummy_node_table = NULL;
295
296 /* Destroy the hash table of padded types. */
d242408f 297 pad_type_hash_table->empty ();
842d4ee2 298 pad_type_hash_table = NULL;
842d4ee2
EB
299}
300\f
a1d8cc63
EB
301/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
302 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
303 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 304
a1d8cc63 305 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
306
307void
308save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
309{
310 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 311 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
312 means GNAT_ENTITY is defined twice, but occasionally is due to some
313 Gigi problem. */
314 gcc_assert (!(gnu_decl
315 && (PRESENT_GNU_TREE (gnat_entity)
316 || (!no_check && !DECL_P (gnu_decl)))));
317
318 SET_GNU_TREE (gnat_entity, gnu_decl);
319}
320
a1d8cc63
EB
321/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
322 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
323
324 In some cases, such as delayed elaboration or expressions that need to
325 be elaborated only once, GNAT_ENTITY is really not an entity. */
326
327tree
328get_gnu_tree (Entity_Id gnat_entity)
329{
330 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
331 return GET_GNU_TREE (gnat_entity);
332}
333
334/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
335
336bool
337present_gnu_tree (Entity_Id gnat_entity)
338{
339 return PRESENT_GNU_TREE (gnat_entity);
340}
341\f
a1ab4c31
AC
342/* Make a dummy type corresponding to GNAT_TYPE. */
343
344tree
345make_dummy_type (Entity_Id gnat_type)
346{
bf0b0e5e 347 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
a1ab4c31
AC
348 tree gnu_type;
349
a1ab4c31
AC
350 /* If there was no equivalent type (can only happen when just annotating
351 types) or underlying type, go back to the original type. */
bf0b0e5e
AC
352 if (No (gnat_equiv))
353 gnat_equiv = gnat_type;
a1ab4c31
AC
354
355 /* If it there already a dummy type, use that one. Else make one. */
bf0b0e5e
AC
356 if (PRESENT_DUMMY_NODE (gnat_equiv))
357 return GET_DUMMY_NODE (gnat_equiv);
a1ab4c31
AC
358
359 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
360 an ENUMERAL_TYPE. */
bf0b0e5e
AC
361 gnu_type = make_node (Is_Record_Type (gnat_equiv)
362 ? tree_code_for_record_type (gnat_equiv)
a1ab4c31
AC
363 : ENUMERAL_TYPE);
364 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
365 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
366 TYPE_STUB_DECL (gnu_type)
367 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
bf0b0e5e 368 if (Is_By_Reference_Type (gnat_equiv))
a0b8b1b7 369 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31 370
bf0b0e5e 371 SET_DUMMY_NODE (gnat_equiv, gnu_type);
a1ab4c31
AC
372
373 return gnu_type;
374}
e3edbd56
EB
375
376/* Return the dummy type that was made for GNAT_TYPE, if any. */
377
378tree
379get_dummy_type (Entity_Id gnat_type)
380{
381 return GET_DUMMY_NODE (gnat_type);
382}
383
384/* Build dummy fat and thin pointer types whose designated type is specified
385 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
386
387void
388build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
389{
390 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
391 tree gnu_fat_type, fields, gnu_object_type;
392
393 gnu_template_type = make_node (RECORD_TYPE);
394 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
395 TYPE_DUMMY_P (gnu_template_type) = 1;
396 gnu_ptr_template = build_pointer_type (gnu_template_type);
397
398 gnu_array_type = make_node (ENUMERAL_TYPE);
399 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
400 TYPE_DUMMY_P (gnu_array_type) = 1;
401 gnu_ptr_array = build_pointer_type (gnu_array_type);
402
403 gnu_fat_type = make_node (RECORD_TYPE);
404 /* Build a stub DECL to trigger the special processing for fat pointer types
405 in gnat_pushdecl. */
406 TYPE_NAME (gnu_fat_type)
407 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
408 gnu_fat_type);
409 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
410 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
411 DECL_CHAIN (fields)
412 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
413 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
414 finish_fat_pointer_type (gnu_fat_type, fields);
415 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
416 /* Suppress debug info until after the type is completed. */
417 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
418
419 gnu_object_type = make_node (RECORD_TYPE);
420 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
421 TYPE_DUMMY_P (gnu_object_type) = 1;
422
423 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
424 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
425}
a1ab4c31 426\f
c99c0026 427/* Return true if we are in the global binding level. */
a1ab4c31 428
c99c0026 429bool
a1ab4c31
AC
430global_bindings_p (void)
431{
c99c0026 432 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
433}
434
a09d56d8 435/* Enter a new binding level. */
a1ab4c31
AC
436
437void
c6bd4220 438gnat_pushlevel (void)
a1ab4c31
AC
439{
440 struct gnat_binding_level *newlevel = NULL;
441
442 /* Reuse a struct for this binding level, if there is one. */
443 if (free_binding_level)
444 {
445 newlevel = free_binding_level;
446 free_binding_level = free_binding_level->chain;
447 }
448 else
766090c2 449 newlevel = ggc_alloc<gnat_binding_level> ();
a1ab4c31
AC
450
451 /* Use a free BLOCK, if any; otherwise, allocate one. */
452 if (free_block_chain)
453 {
454 newlevel->block = free_block_chain;
455 free_block_chain = BLOCK_CHAIN (free_block_chain);
456 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
457 }
458 else
459 newlevel->block = make_node (BLOCK);
460
461 /* Point the BLOCK we just made to its parent. */
462 if (current_binding_level)
463 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
464
a09d56d8
EB
465 BLOCK_VARS (newlevel->block) = NULL_TREE;
466 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
467 TREE_USED (newlevel->block) = 1;
468
a09d56d8 469 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
470 newlevel->chain = current_binding_level;
471 newlevel->jmpbuf_decl = NULL_TREE;
472 current_binding_level = newlevel;
473}
474
475/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
476 and point FNDECL to this BLOCK. */
477
478void
479set_current_block_context (tree fndecl)
480{
481 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
482 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 483 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
484}
485
486/* Set the jmpbuf_decl for the current binding level to DECL. */
487
488void
489set_block_jmpbuf_decl (tree decl)
490{
491 current_binding_level->jmpbuf_decl = decl;
492}
493
494/* Get the jmpbuf_decl, if any, for the current binding level. */
495
496tree
c6bd4220 497get_block_jmpbuf_decl (void)
a1ab4c31
AC
498{
499 return current_binding_level->jmpbuf_decl;
500}
501
a09d56d8 502/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
503
504void
c6bd4220 505gnat_poplevel (void)
a1ab4c31
AC
506{
507 struct gnat_binding_level *level = current_binding_level;
508 tree block = level->block;
509
510 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 511 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
512
513 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
514 are no variables free the block and merge its subblocks into those of its
a09d56d8 515 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
516 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
517 ;
518 else if (BLOCK_VARS (block) == NULL_TREE)
519 {
520 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
521 = block_chainon (BLOCK_SUBBLOCKS (block),
522 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
523 BLOCK_CHAIN (block) = free_block_chain;
524 free_block_chain = block;
525 }
526 else
527 {
528 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
529 BLOCK_SUBBLOCKS (level->chain->block) = block;
530 TREE_USED (block) = 1;
531 set_block_for_group (block);
532 }
533
534 /* Free this binding structure. */
535 current_binding_level = level->chain;
536 level->chain = free_binding_level;
537 free_binding_level = level;
538}
539
2231f17f
EB
540/* Exit a binding level and discard the associated BLOCK. */
541
542void
543gnat_zaplevel (void)
544{
545 struct gnat_binding_level *level = current_binding_level;
546 tree block = level->block;
547
548 BLOCK_CHAIN (block) = free_block_chain;
549 free_block_chain = block;
550
551 /* Free this binding structure. */
552 current_binding_level = level->chain;
553 level->chain = free_binding_level;
554 free_binding_level = level;
555}
a1ab4c31 556\f
4708440c
EB
557/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
558
559static void
560gnat_set_type_context (tree type, tree context)
561{
562 tree decl = TYPE_STUB_DECL (type);
563
564 TYPE_CONTEXT (type) = context;
565
566 while (decl && DECL_PARALLEL_TYPE (decl))
567 {
24d4b3d5
AC
568 tree parallel_type = DECL_PARALLEL_TYPE (decl);
569
570 /* Give a context to the parallel types and their stub decl, if any.
571 Some parallel types seems to be present in multiple parallel type
572 chains, so don't mess with their context if they already have one. */
573 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
574 {
575 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
576 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
577 TYPE_CONTEXT (parallel_type) = context;
578 }
579
4708440c
EB
580 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
581 }
582}
583
9a30c7c4
AC
584/* Return the innermost scope, starting at GNAT_NODE, we are be interested in
585 the debug info, or Empty if there is no such scope. If not NULL, set
586 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
587
1d4b96e0 588Entity_Id
9a30c7c4
AC
589get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
590{
591 Entity_Id gnat_entity;
592
593 if (is_subprogram)
594 *is_subprogram = false;
595
1d4b96e0
AC
596 if (Nkind (gnat_node) == N_Defining_Identifier
597 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
9a30c7c4
AC
598 gnat_entity = Scope (gnat_node);
599 else
600 return Empty;
601
602 while (Present (gnat_entity))
603 {
604 switch (Ekind (gnat_entity))
605 {
606 case E_Function:
607 case E_Procedure:
608 if (Present (Protected_Body_Subprogram (gnat_entity)))
609 gnat_entity = Protected_Body_Subprogram (gnat_entity);
610
611 /* If the scope is a subprogram, then just rely on
612 current_function_decl, so that we don't have to defer
613 anything. This is needed because other places rely on the
614 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
615 if (is_subprogram)
616 *is_subprogram = true;
617 return gnat_entity;
618
619 case E_Record_Type:
620 case E_Record_Subtype:
621 return gnat_entity;
622
623 default:
624 /* By default, we are not interested in this particular scope: go to
625 the outer one. */
626 break;
627 }
628 gnat_entity = Scope (gnat_entity);
629 }
630 return Empty;
631}
632
633/* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
634 N otherwise. */
635
636static void
637defer_or_set_type_context (tree type,
638 tree context,
639 struct deferred_decl_context_node *n)
640{
641 if (n)
642 add_deferred_type_context (n, type);
643 else
644 gnat_set_type_context (type, context);
645}
646
647/* Return global_context. Create it if needed, first. */
648
649static tree
650get_global_context (void)
651{
652 if (!global_context)
881a5e60
PMR
653 {
654 global_context = build_translation_unit_decl (NULL_TREE);
655 debug_hooks->register_main_translation_unit (global_context);
656 }
9a30c7c4
AC
657 return global_context;
658}
659
228ee426
EB
660/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
661 for location information and flag propagation. */
a1ab4c31
AC
662
663void
664gnat_pushdecl (tree decl, Node_Id gnat_node)
665{
9a30c7c4
AC
666 tree context = NULL_TREE;
667 struct deferred_decl_context_node *deferred_decl_context = NULL;
668
669 /* If explicitely asked to make DECL global or if it's an imported nested
670 object, short-circuit the regular Scope-based context computation. */
671 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
a1ab4c31 672 {
9a30c7c4
AC
673 /* Rely on the GNAT scope, or fallback to the current_function_decl if
674 the GNAT scope reached the global scope, if it reached a subprogram
675 or the declaration is a subprogram or a variable (for them we skip
676 intermediate context types because the subprogram body elaboration
677 machinery and the inliner both expect a subprogram context).
678
679 Falling back to current_function_decl is necessary for implicit
680 subprograms created by gigi, such as the elaboration subprograms. */
681 bool context_is_subprogram = false;
682 const Entity_Id gnat_scope
683 = get_debug_scope (gnat_node, &context_is_subprogram);
684
685 if (Present (gnat_scope)
686 && !context_is_subprogram
687 && TREE_CODE (decl) != FUNCTION_DECL
688 && TREE_CODE (decl) != VAR_DECL)
689 /* Always assume the scope has not been elaborated, thus defer the
690 context propagation to the time its elaboration will be
691 available. */
692 deferred_decl_context
693 = add_deferred_decl_context (decl, gnat_scope, force_global);
694
695 /* External declarations (when force_global > 0) may not be in a
696 local context. */
697 else if (current_function_decl != NULL_TREE && force_global == 0)
698 context = current_function_decl;
a1ab4c31
AC
699 }
700
9a30c7c4
AC
701 /* If either we are forced to be in global mode or if both the GNAT scope and
702 the current_function_decl did not help determining the context, use the
703 global scope. */
704 if (!deferred_decl_context && context == NULL_TREE)
705 context = get_global_context ();
706
707 /* Functions imported in another function are not really nested.
708 For really nested functions mark them initially as needing
709 a static chain for uses of that flag before unnesting;
710 lower_nested_functions will then recompute it. */
711 if (TREE_CODE (decl) == FUNCTION_DECL
712 && !TREE_PUBLIC (decl)
713 && context != NULL_TREE
714 && (TREE_CODE (context) == FUNCTION_DECL
715 || decl_function_context (context) != NULL_TREE))
716 DECL_STATIC_CHAIN (decl) = 1;
717
718 if (!deferred_decl_context)
719 DECL_CONTEXT (decl) = context;
720
228ee426 721 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
722
723 /* Set the location of DECL and emit a declaration for it. */
e8fa3dcd 724 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
a1ab4c31 725 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 726
a1ab4c31
AC
727 add_decl_expr (decl, gnat_node);
728
729 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
730 order. The list will be reversed later. Put global declarations in the
731 globals list and local ones in the current block. But skip TYPE_DECLs
732 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
733 with the debugger and aren't needed anyway. */
734 if (!(TREE_CODE (decl) == TYPE_DECL
735 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 736 {
9083aacd 737 if (DECL_EXTERNAL (decl))
a1ab4c31 738 {
a1ab4c31 739 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 740 vec_safe_push (builtin_decls, decl);
a1ab4c31 741 }
9083aacd 742 else if (global_bindings_p ())
a22b794d 743 vec_safe_push (global_decls, decl);
9083aacd 744 else
a1ab4c31 745 {
a963da4d
EB
746 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
747 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
748 }
749 }
750
aef308d0 751 /* For the declaration of a type, set its name either if it isn't already
10069d53 752 set or if the previous type name was not derived from a source name.
a1ab4c31 753 We'd rather have the type named with a real name and all the pointer
aef308d0
PMR
754 types to the same object have the same node, except when the names are
755 both derived from source names. */
a1ab4c31
AC
756 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
757 {
758 tree t = TREE_TYPE (decl);
759
79714815
EB
760 /* Array and pointer types aren't tagged types in the C sense so we need
761 to generate a typedef in DWARF for them and make sure it is preserved,
762 unless the type is artificial. */
aef308d0 763 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
79714815
EB
764 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
765 || DECL_ARTIFICIAL (decl)))
766 ;
767 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
768 generate the typedef in DWARF. Also do that for fat pointer types
769 because, even though they are tagged types in the C sense, they are
770 still XUP types attached to the base array type at this point. */
aef308d0 771 else if (!DECL_ARTIFICIAL (decl)
79714815
EB
772 && (TREE_CODE (t) == ARRAY_TYPE
773 || TREE_CODE (t) == POINTER_TYPE
774 || TYPE_IS_FAT_POINTER_P (t)))
a1ab4c31 775 {
aef308d0 776 tree tt;
1e039275
EB
777 /* ??? Copy and original type are not supposed to be variant but we
778 really need a variant for the placeholder machinery to work. */
aef308d0
PMR
779 if (TYPE_IS_FAT_POINTER_P (t))
780 tt = build_variant_type_copy (t);
781 else
1e039275
EB
782 {
783 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
784 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
79714815
EB
785 if (TREE_CODE (t) == POINTER_TYPE)
786 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
1e039275
EB
787 tt = build_qualified_type (tt, TYPE_QUALS (t));
788 }
a1ab4c31 789 TYPE_NAME (tt) = decl;
9a30c7c4
AC
790 defer_or_set_type_context (tt,
791 DECL_CONTEXT (decl),
792 deferred_decl_context);
a1ab4c31
AC
793 TREE_USED (tt) = TREE_USED (t);
794 TREE_TYPE (decl) = tt;
79714815 795 if (TYPE_NAME (t)
aef308d0
PMR
796 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
797 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
40c88b94
EB
798 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
799 else
800 DECL_ORIGINAL_TYPE (decl) = t;
79714815
EB
801 /* Array types need to have a name so that they can be related to
802 their GNAT encodings. */
803 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
804 TYPE_NAME (t) = DECL_NAME (decl);
e3edbd56 805 t = NULL_TREE;
a1ab4c31 806 }
79714815 807 else if (TYPE_NAME (t)
aef308d0
PMR
808 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
809 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
a1ab4c31
AC
810 ;
811 else
812 t = NULL_TREE;
813
79714815
EB
814 /* Propagate the name to all the variants, this is needed for the type
815 qualifiers machinery to work properly (see check_qualified_type).
816 Also propagate the context to them. Note that it will be propagated
817 to all parallel types too thanks to gnat_set_type_context. */
a1ab4c31
AC
818 if (t)
819 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
79714815
EB
820 /* ??? Because of the previous kludge, we can have variants of fat
821 pointer types with different names. */
822 if (!(TYPE_IS_FAT_POINTER_P (t)
823 && TYPE_NAME (t)
824 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
825 {
826 TYPE_NAME (t) = decl;
9a30c7c4
AC
827 defer_or_set_type_context (t,
828 DECL_CONTEXT (decl),
829 deferred_decl_context);
d4d05b52 830 }
a1ab4c31
AC
831 }
832}
833\f
842d4ee2
EB
834/* Create a record type that contains a SIZE bytes long field of TYPE with a
835 starting bit position so that it is aligned to ALIGN bits, and leaving at
836 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
837 record is guaranteed to get. GNAT_NODE is used for the position of the
838 associated TYPE_DECL. */
842d4ee2
EB
839
840tree
841make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 842 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
843{
844 /* We will be crafting a record type with one field at a position set to be
845 the next multiple of ALIGN past record'address + room bytes. We use a
846 record placeholder to express record'address. */
847 tree record_type = make_node (RECORD_TYPE);
848 tree record = build0 (PLACEHOLDER_EXPR, record_type);
849
850 tree record_addr_st
851 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
852
853 /* The diagram below summarizes the shape of what we manipulate:
854
855 <--------- pos ---------->
856 { +------------+-------------+-----------------+
857 record =>{ |############| ... | field (type) |
858 { +------------+-------------+-----------------+
859 |<-- room -->|<- voffset ->|<---- size ----->|
860 o o
861 | |
862 record_addr vblock_addr
863
864 Every length is in sizetype bytes there, except "pos" which has to be
865 set as a bit position in the GCC tree for the record. */
866 tree room_st = size_int (room);
867 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
868 tree voffset_st, pos, field;
869
9dba4b55 870 tree name = TYPE_IDENTIFIER (type);
842d4ee2 871
842d4ee2
EB
872 name = concat_name (name, "ALIGN");
873 TYPE_NAME (record_type) = name;
874
875 /* Compute VOFFSET and then POS. The next byte position multiple of some
876 alignment after some address is obtained by "and"ing the alignment minus
877 1 with the two's complement of the address. */
878 voffset_st = size_binop (BIT_AND_EXPR,
879 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
880 size_int ((align / BITS_PER_UNIT) - 1));
881
882 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
883 pos = size_binop (MULT_EXPR,
884 convert (bitsizetype,
885 size_binop (PLUS_EXPR, room_st, voffset_st)),
886 bitsize_unit_node);
887
888 /* Craft the GCC record representation. We exceptionally do everything
889 manually here because 1) our generic circuitry is not quite ready to
890 handle the complex position/size expressions we are setting up, 2) we
891 have a strong simplifying factor at hand: we know the maximum possible
892 value of voffset, and 3) we have to set/reset at least the sizes in
893 accordance with this maximum value anyway, as we need them to convey
894 what should be "alloc"ated for this type.
895
896 Use -1 as the 'addressable' indication for the field to prevent the
897 creation of a bitfield. We don't need one, it would have damaging
898 consequences on the alignment computation, and create_field_decl would
899 make one without this special argument, for instance because of the
900 complex position expression. */
901 field = create_field_decl (get_identifier ("F"), type, record_type, size,
902 pos, 1, -1);
903 TYPE_FIELDS (record_type) = field;
904
905 TYPE_ALIGN (record_type) = base_align;
906 TYPE_USER_ALIGN (record_type) = 1;
907
908 TYPE_SIZE (record_type)
909 = size_binop (PLUS_EXPR,
910 size_binop (MULT_EXPR, convert (bitsizetype, size),
911 bitsize_unit_node),
912 bitsize_int (align + room * BITS_PER_UNIT));
913 TYPE_SIZE_UNIT (record_type)
914 = size_binop (PLUS_EXPR, size,
915 size_int (room + align / BITS_PER_UNIT));
916
917 SET_TYPE_MODE (record_type, BLKmode);
918 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
919
920 /* Declare it now since it will never be declared otherwise. This is
921 necessary to ensure that its subtrees are properly marked. */
74746d49 922 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
923
924 return record_type;
925}
926
927/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
928 as the field type of a packed record if IN_RECORD is true, or as the
929 component type of a packed array if IN_RECORD is false. See if we can
930 rewrite it either as a type that has a non-BLKmode, which we can pack
931 tighter in the packed record case, or as a smaller type. If so, return
932 the new type. If not, return the original type. */
933
934tree
935make_packable_type (tree type, bool in_record)
936{
ae7e9ddd 937 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
842d4ee2
EB
938 unsigned HOST_WIDE_INT new_size;
939 tree new_type, old_field, field_list = NULL_TREE;
940 unsigned int align;
941
942 /* No point in doing anything if the size is zero. */
943 if (size == 0)
944 return type;
945
946 new_type = make_node (TREE_CODE (type));
947
948 /* Copy the name and flags from the old type to that of the new.
949 Note that we rely on the pointer equality created here for
950 TYPE_NAME to look through conversions in various places. */
951 TYPE_NAME (new_type) = TYPE_NAME (type);
952 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
953 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
ee45a32d 954 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
842d4ee2
EB
955 if (TREE_CODE (type) == RECORD_TYPE)
956 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
957
958 /* If we are in a record and have a small size, set the alignment to
959 try for an integral mode. Otherwise set it to try for a smaller
960 type with BLKmode. */
961 if (in_record && size <= MAX_FIXED_MODE_SIZE)
962 {
963 align = ceil_pow2 (size);
964 TYPE_ALIGN (new_type) = align;
965 new_size = (size + align - 1) & -align;
966 }
967 else
968 {
969 unsigned HOST_WIDE_INT align;
970
971 /* Do not try to shrink the size if the RM size is not constant. */
972 if (TYPE_CONTAINS_TEMPLATE_P (type)
cc269bb6 973 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
842d4ee2
EB
974 return type;
975
976 /* Round the RM size up to a unit boundary to get the minimal size
977 for a BLKmode record. Give up if it's already the size. */
eb1ce453 978 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
842d4ee2
EB
979 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
980 if (new_size == size)
981 return type;
982
983 align = new_size & -new_size;
984 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
985 }
986
987 TYPE_USER_ALIGN (new_type) = 1;
988
989 /* Now copy the fields, keeping the position and size as we don't want
990 to change the layout by propagating the packedness downwards. */
991 for (old_field = TYPE_FIELDS (type); old_field;
992 old_field = DECL_CHAIN (old_field))
993 {
994 tree new_field_type = TREE_TYPE (old_field);
995 tree new_field, new_size;
996
997 if (RECORD_OR_UNION_TYPE_P (new_field_type)
998 && !TYPE_FAT_POINTER_P (new_field_type)
cc269bb6 999 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
842d4ee2
EB
1000 new_field_type = make_packable_type (new_field_type, true);
1001
1002 /* However, for the last field in a not already packed record type
1003 that is of an aggregate type, we need to use the RM size in the
1004 packable version of the record type, see finish_record_type. */
1005 if (!DECL_CHAIN (old_field)
1006 && !TYPE_PACKED (type)
1007 && RECORD_OR_UNION_TYPE_P (new_field_type)
1008 && !TYPE_FAT_POINTER_P (new_field_type)
1009 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1010 && TYPE_ADA_SIZE (new_field_type))
1011 new_size = TYPE_ADA_SIZE (new_field_type);
1012 else
1013 new_size = DECL_SIZE (old_field);
1014
1015 new_field
1016 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1017 new_size, bit_position (old_field),
1018 TYPE_PACKED (type),
1019 !DECL_NONADDRESSABLE_P (old_field));
1020
1021 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1022 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1023 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1024 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1025
1026 DECL_CHAIN (new_field) = field_list;
1027 field_list = new_field;
1028 }
1029
1030 finish_record_type (new_type, nreverse (field_list), 2, false);
1031 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
44e9e3ec
EB
1032 if (TYPE_STUB_DECL (type))
1033 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1034 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
842d4ee2
EB
1035
1036 /* If this is a padding record, we never want to make the size smaller
1037 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1038 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1039 {
1040 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1041 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1042 new_size = size;
1043 }
1044 else
1045 {
1046 TYPE_SIZE (new_type) = bitsize_int (new_size);
1047 TYPE_SIZE_UNIT (new_type)
1048 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1049 }
1050
1051 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1052 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1053
1054 compute_record_mode (new_type);
1055
1056 /* Try harder to get a packable type if necessary, for example
1057 in case the record itself contains a BLKmode field. */
1058 if (in_record && TYPE_MODE (new_type) == BLKmode)
1059 SET_TYPE_MODE (new_type,
1060 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1061
1062 /* If neither the mode nor the size has shrunk, return the old type. */
1063 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1064 return type;
1065
1066 return new_type;
1067}
1068
1069/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1070 If TYPE is the best type, return it. Otherwise, make a new type. We
1071 only support new integral and pointer types. FOR_BIASED is true if
1072 we are making a biased type. */
1073
1074tree
1075make_type_from_size (tree type, tree size_tree, bool for_biased)
1076{
1077 unsigned HOST_WIDE_INT size;
1078 bool biased_p;
1079 tree new_type;
1080
1081 /* If size indicates an error, just return TYPE to avoid propagating
1082 the error. Likewise if it's too large to represent. */
cc269bb6 1083 if (!size_tree || !tree_fits_uhwi_p (size_tree))
842d4ee2
EB
1084 return type;
1085
ae7e9ddd 1086 size = tree_to_uhwi (size_tree);
842d4ee2
EB
1087
1088 switch (TREE_CODE (type))
1089 {
1090 case INTEGER_TYPE:
1091 case ENUMERAL_TYPE:
1092 case BOOLEAN_TYPE:
1093 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1094 && TYPE_BIASED_REPRESENTATION_P (type));
1095
1096 /* Integer types with precision 0 are forbidden. */
1097 if (size == 0)
1098 size = 1;
1099
1100 /* Only do something if the type isn't a packed array type and doesn't
1101 already have the proper size and the size isn't too large. */
1102 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1103 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1104 || size > LONG_LONG_TYPE_SIZE)
1105 break;
1106
1107 biased_p |= for_biased;
1108 if (TYPE_UNSIGNED (type) || biased_p)
1109 new_type = make_unsigned_type (size);
1110 else
1111 new_type = make_signed_type (size);
1112 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1eb58520
AC
1113 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1114 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
842d4ee2
EB
1115 /* Copy the name to show that it's essentially the same type and
1116 not a subrange type. */
1117 TYPE_NAME (new_type) = TYPE_NAME (type);
1118 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1119 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1120 return new_type;
1121
1122 case RECORD_TYPE:
1123 /* Do something if this is a fat pointer, in which case we
1124 may need to return the thin pointer. */
1125 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1126 {
ef4bddc2 1127 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
842d4ee2
EB
1128 if (!targetm.valid_pointer_mode (p_mode))
1129 p_mode = ptr_mode;
1130 return
1131 build_pointer_type_for_mode
1132 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1133 p_mode, 0);
1134 }
1135 break;
1136
1137 case POINTER_TYPE:
1138 /* Only do something if this is a thin pointer, in which case we
1139 may need to return the fat pointer. */
1140 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1141 return
1142 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1143 break;
1144
1145 default:
1146 break;
1147 }
1148
1149 return type;
1150}
1151
1152/* See if the data pointed to by the hash table slot is marked. */
1153
08ec2754
RS
1154int
1155pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
842d4ee2 1156{
08ec2754 1157 return ggc_marked_p (t->type);
842d4ee2
EB
1158}
1159
d242408f 1160/* Return true iff the padded types are equivalent. */
842d4ee2 1161
d242408f
TS
1162bool
1163pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
842d4ee2 1164{
842d4ee2
EB
1165 tree type1, type2;
1166
1167 if (t1->hash != t2->hash)
1168 return 0;
1169
1170 type1 = t1->type;
1171 type2 = t2->type;
1172
ee45a32d
EB
1173 /* We consider that the padded types are equivalent if they pad the same type
1174 and have the same size, alignment, RM size and storage order. Taking the
1175 mode into account is redundant since it is determined by the others. */
842d4ee2
EB
1176 return
1177 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1178 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1179 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
ee45a32d
EB
1180 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1181 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
842d4ee2
EB
1182}
1183
5cb7516d
EB
1184/* Look up the padded TYPE in the hash table and return its canonical version
1185 if it exists; otherwise, insert it into the hash table. */
1186
1187static tree
1188lookup_and_insert_pad_type (tree type)
1189{
1190 hashval_t hashcode;
1191 struct pad_type_hash in, *h;
5cb7516d
EB
1192
1193 hashcode
1194 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1195 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1196 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1197 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1198
1199 in.hash = hashcode;
1200 in.type = type;
d242408f 1201 h = pad_type_hash_table->find_with_hash (&in, hashcode);
5cb7516d
EB
1202 if (h)
1203 return h->type;
1204
1205 h = ggc_alloc<pad_type_hash> ();
1206 h->hash = hashcode;
1207 h->type = type;
d242408f 1208 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
5cb7516d
EB
1209 return NULL_TREE;
1210}
1211
842d4ee2 1212/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
5cb7516d 1213 if needed. We have already verified that SIZE and ALIGN are large enough.
842d4ee2
EB
1214 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1215 IS_COMPONENT_TYPE is true if this is being done for the component type of
1216 an array. IS_USER_TYPE is true if the original type needs to be completed.
1217 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1218 the RM size of the resulting type is to be set to SIZE too. */
1219
1220tree
1221maybe_pad_type (tree type, tree size, unsigned int align,
1222 Entity_Id gnat_entity, bool is_component_type,
1223 bool is_user_type, bool definition, bool set_rm_size)
1224{
1225 tree orig_size = TYPE_SIZE (type);
44e9e3ec 1226 unsigned int orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1227 tree record, field;
1228
1229 /* If TYPE is a padded type, see if it agrees with any size and alignment
1230 we were given. If so, return the original type. Otherwise, strip
1231 off the padding, since we will either be returning the inner type
1232 or repadding it. If no size or alignment is specified, use that of
1233 the original padded type. */
1234 if (TYPE_IS_PADDING_P (type))
1235 {
1236 if ((!size
44e9e3ec
EB
1237 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1238 && (align == 0 || align == orig_align))
842d4ee2
EB
1239 return type;
1240
1241 if (!size)
44e9e3ec 1242 size = orig_size;
842d4ee2 1243 if (align == 0)
44e9e3ec 1244 align = orig_align;
842d4ee2
EB
1245
1246 type = TREE_TYPE (TYPE_FIELDS (type));
1247 orig_size = TYPE_SIZE (type);
44e9e3ec 1248 orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1249 }
1250
1251 /* If the size is either not being changed or is being made smaller (which
1252 is not done here and is only valid for bitfields anyway), show the size
1253 isn't changing. Likewise, clear the alignment if it isn't being
1254 changed. Then return if we aren't doing anything. */
1255 if (size
1256 && (operand_equal_p (size, orig_size, 0)
1257 || (TREE_CODE (orig_size) == INTEGER_CST
1258 && tree_int_cst_lt (size, orig_size))))
1259 size = NULL_TREE;
1260
44e9e3ec 1261 if (align == orig_align)
842d4ee2
EB
1262 align = 0;
1263
1264 if (align == 0 && !size)
1265 return type;
1266
1267 /* If requested, complete the original type and give it a name. */
1268 if (is_user_type)
1269 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1270 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1271 !(TYPE_NAME (type)
1272 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1273 && DECL_IGNORED_P (TYPE_NAME (type))),
1274 gnat_entity);
1275
1276 /* We used to modify the record in place in some cases, but that could
1277 generate incorrect debugging information. So make a new record
1278 type and name. */
1279 record = make_node (RECORD_TYPE);
1280 TYPE_PADDING_P (record) = 1;
1281
1282 if (Present (gnat_entity))
1283 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1284
44e9e3ec 1285 TYPE_ALIGN (record) = align ? align : orig_align;
842d4ee2
EB
1286 TYPE_SIZE (record) = size ? size : orig_size;
1287 TYPE_SIZE_UNIT (record)
1288 = convert (sizetype,
1289 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1290 bitsize_unit_node));
1291
1292 /* If we are changing the alignment and the input type is a record with
1293 BLKmode and a small constant size, try to make a form that has an
1294 integral mode. This might allow the padding record to also have an
1295 integral mode, which will be much more efficient. There is no point
1296 in doing so if a size is specified unless it is also a small constant
1297 size and it is incorrect to do so if we cannot guarantee that the mode
1298 will be naturally aligned since the field must always be addressable.
1299
1300 ??? This might not always be a win when done for a stand-alone object:
1301 since the nominal and the effective type of the object will now have
1302 different modes, a VIEW_CONVERT_EXPR will be required for converting
1303 between them and it might be hard to overcome afterwards, including
1304 at the RTL level when the stand-alone object is accessed as a whole. */
1305 if (align != 0
1306 && RECORD_OR_UNION_TYPE_P (type)
1307 && TYPE_MODE (type) == BLKmode
1308 && !TYPE_BY_REFERENCE_P (type)
1309 && TREE_CODE (orig_size) == INTEGER_CST
1310 && !TREE_OVERFLOW (orig_size)
1311 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1312 && (!size
1313 || (TREE_CODE (size) == INTEGER_CST
1314 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1315 {
1316 tree packable_type = make_packable_type (type, true);
1317 if (TYPE_MODE (packable_type) != BLKmode
1318 && align >= TYPE_ALIGN (packable_type))
1319 type = packable_type;
1320 }
1321
1322 /* Now create the field with the original size. */
1366ba41
EB
1323 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1324 bitsize_zero_node, 0, 1);
842d4ee2
EB
1325 DECL_INTERNAL_P (field) = 1;
1326
1327 /* Do not emit debug info until after the auxiliary record is built. */
1328 finish_record_type (record, field, 1, false);
1329
1330 /* Set the RM size if requested. */
1331 if (set_rm_size)
1332 {
5cb7516d
EB
1333 tree canonical_pad_type;
1334
842d4ee2
EB
1335 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1336
1337 /* If the padded type is complete and has constant size, we canonicalize
1338 it by means of the hash table. This is consistent with the language
1339 semantics and ensures that gigi and the middle-end have a common view
1340 of these padded types. */
5cb7516d
EB
1341 if (TREE_CONSTANT (TYPE_SIZE (record))
1342 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
842d4ee2 1343 {
5cb7516d
EB
1344 record = canonical_pad_type;
1345 goto built;
842d4ee2
EB
1346 }
1347 }
1348
1349 /* Unless debugging information isn't being written for the input type,
1350 write a record that shows what we are a subtype of and also make a
1351 variable that indicates our size, if still variable. */
1352 if (TREE_CODE (orig_size) != INTEGER_CST
1353 && TYPE_NAME (record)
1354 && TYPE_NAME (type)
1355 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1356 && DECL_IGNORED_P (TYPE_NAME (type))))
1357 {
9dba4b55 1358 tree name = TYPE_IDENTIFIER (record);
c1a569ef
EB
1359 tree size_unit = TYPE_SIZE_UNIT (record);
1360
1361 /* A variable that holds the size is required even with no encoding since
1362 it will be referenced by debugging information attributes. At global
1363 level, we need a single variable across all translation units. */
1364 if (size
1365 && TREE_CODE (size) != INTEGER_CST
1366 && (definition || global_bindings_p ()))
1367 {
1368 size_unit
1369 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1370 size_unit, true, global_bindings_p (),
1371 !definition && global_bindings_p (), false,
2056c5ed 1372 false, true, true, NULL, gnat_entity);
c1a569ef
EB
1373 TYPE_SIZE_UNIT (record) = size_unit;
1374 }
1375
1376 tree marker = make_node (RECORD_TYPE);
9dba4b55 1377 tree orig_name = TYPE_IDENTIFIER (type);
842d4ee2
EB
1378
1379 TYPE_NAME (marker) = concat_name (name, "XVS");
1380 finish_record_type (marker,
1381 create_field_decl (orig_name,
1382 build_reference_type (type),
1383 marker, NULL_TREE, NULL_TREE,
1384 0, 0),
1385 0, true);
c1a569ef 1386 TYPE_SIZE_UNIT (marker) = size_unit;
842d4ee2
EB
1387
1388 add_parallel_type (record, marker);
842d4ee2
EB
1389 }
1390
1391 rest_of_record_type_compilation (record);
1392
1393built:
1394 /* If the size was widened explicitly, maybe give a warning. Take the
1395 original size as the maximum size of the input if there was an
1396 unconstrained record involved and round it up to the specified alignment,
1397 if one was specified. But don't do it if we are just annotating types
1398 and the type is tagged, since tagged types aren't fully laid out in this
1399 mode. */
1400 if (!size
1401 || TREE_CODE (size) == COND_EXPR
1402 || TREE_CODE (size) == MAX_EXPR
1403 || No (gnat_entity)
1404 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1405 return record;
1406
1407 if (CONTAINS_PLACEHOLDER_P (orig_size))
1408 orig_size = max_size (orig_size, true);
1409
f42dd37f 1410 if (align && AGGREGATE_TYPE_P (type))
842d4ee2
EB
1411 orig_size = round_up (orig_size, align);
1412
1413 if (!operand_equal_p (size, orig_size, 0)
1414 && !(TREE_CODE (size) == INTEGER_CST
1415 && TREE_CODE (orig_size) == INTEGER_CST
1416 && (TREE_OVERFLOW (size)
1417 || TREE_OVERFLOW (orig_size)
1418 || tree_int_cst_lt (size, orig_size))))
1419 {
1420 Node_Id gnat_error_node = Empty;
1421
1a4cb227
AC
1422 /* For a packed array, post the message on the original array type. */
1423 if (Is_Packed_Array_Impl_Type (gnat_entity))
842d4ee2
EB
1424 gnat_entity = Original_Array_Type (gnat_entity);
1425
1426 if ((Ekind (gnat_entity) == E_Component
1427 || Ekind (gnat_entity) == E_Discriminant)
1428 && Present (Component_Clause (gnat_entity)))
1429 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1430 else if (Present (Size_Clause (gnat_entity)))
1431 gnat_error_node = Expression (Size_Clause (gnat_entity));
1432
1433 /* Generate message only for entities that come from source, since
1434 if we have an entity created by expansion, the message will be
1435 generated for some other corresponding source entity. */
1436 if (Comes_From_Source (gnat_entity))
1437 {
1438 if (Present (gnat_error_node))
1439 post_error_ne_tree ("{^ }bits of & unused?",
1440 gnat_error_node, gnat_entity,
1441 size_diffop (size, orig_size));
1442 else if (is_component_type)
1443 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1444 gnat_entity, gnat_entity,
1445 size_diffop (size, orig_size));
1446 }
1447 }
1448
1449 return record;
1450}
ee45a32d
EB
1451
1452/* Return a copy of the padded TYPE but with reverse storage order. */
1453
1454tree
1455set_reverse_storage_order_on_pad_type (tree type)
1456{
1457 tree field, canonical_pad_type;
1458
1459#ifdef ENABLE_CHECKING
1460 /* If the inner type is not scalar then the function does nothing. */
1461 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1462 gcc_assert (!AGGREGATE_TYPE_P (inner_type) && !VECTOR_TYPE_P (inner_type));
1463#endif
1464
1465 /* This is required for the canonicalization. */
1466 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1467
1468 field = copy_node (TYPE_FIELDS (type));
1469 type = copy_type (type);
1470 DECL_CONTEXT (field) = type;
1471 TYPE_FIELDS (type) = field;
1472 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1473 canonical_pad_type = lookup_and_insert_pad_type (type);
1474 return canonical_pad_type ? canonical_pad_type : type;
1475}
842d4ee2
EB
1476\f
1477/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1478 If this is a multi-dimensional array type, do this recursively.
1479
1480 OP may be
1481 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1482 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1483 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1484
1485void
1486relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1487{
1488 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1489 of a one-dimensional array, since the padding has the same alias set
1490 as the field type, but if it's a multi-dimensional array, we need to
1491 see the inner types. */
1492 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1493 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1494 || TYPE_PADDING_P (gnu_old_type)))
1495 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1496
1497 /* Unconstrained array types are deemed incomplete and would thus be given
1498 alias set 0. Retrieve the underlying array type. */
1499 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1500 gnu_old_type
1501 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1502 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1503 gnu_new_type
1504 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1505
1506 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1507 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1508 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1509 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1510
1511 switch (op)
1512 {
1513 case ALIAS_SET_COPY:
1514 /* The alias set shouldn't be copied between array types with different
1515 aliasing settings because this can break the aliasing relationship
1516 between the array type and its element type. */
9abe8b74 1517 if (flag_checking || flag_strict_aliasing)
842d4ee2
EB
1518 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1519 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1520 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1521 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1522
1523 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1524 break;
1525
1526 case ALIAS_SET_SUBSET:
1527 case ALIAS_SET_SUPERSET:
1528 {
1529 alias_set_type old_set = get_alias_set (gnu_old_type);
1530 alias_set_type new_set = get_alias_set (gnu_new_type);
1531
1532 /* Do nothing if the alias sets conflict. This ensures that we
1533 never call record_alias_subset several times for the same pair
1534 or at all for alias set 0. */
1535 if (!alias_sets_conflict_p (old_set, new_set))
1536 {
1537 if (op == ALIAS_SET_SUBSET)
1538 record_alias_subset (old_set, new_set);
1539 else
1540 record_alias_subset (new_set, old_set);
1541 }
1542 }
1543 break;
1544
1545 default:
1546 gcc_unreachable ();
1547 }
1548
1549 record_component_aliases (gnu_new_type);
1550}
1551\f
1aeb40dd 1552/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
c1a569ef 1553 ARTIFICIAL_P is true if the type was generated by the compiler. */
a1ab4c31
AC
1554
1555void
1aeb40dd 1556record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1557{
c172df28
AH
1558 tree type_decl = build_decl (input_location,
1559 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1560 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1561 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1562 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1563
10069d53
EB
1564 if (debug_hooks->type_decl)
1565 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1566}
1567\f
e3edbd56
EB
1568/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1569 finish constructing the record type as a fat pointer type. */
1570
1571void
1572finish_fat_pointer_type (tree record_type, tree field_list)
1573{
1574 /* Make sure we can put it into a register. */
5da8c011
EB
1575 if (STRICT_ALIGNMENT)
1576 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1577
1578 /* Show what it really is. */
1579 TYPE_FAT_POINTER_P (record_type) = 1;
1580
1581 /* Do not emit debug info for it since the types of its fields may still be
1582 incomplete at this point. */
1583 finish_record_type (record_type, field_list, 0, false);
1584
1585 /* Force type_contains_placeholder_p to return true on it. Although the
1586 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1587 type but the representation of the unconstrained array. */
1588 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1589}
1590
032d1b71 1591/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1592 finish constructing the record or union type. If REP_LEVEL is zero, this
1593 record has no representation clause and so will be entirely laid out here.
1594 If REP_LEVEL is one, this record has a representation clause and has been
1595 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1596 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1597 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1598 we need to write debug information about this type. */
a1ab4c31
AC
1599
1600void
032d1b71
EB
1601finish_record_type (tree record_type, tree field_list, int rep_level,
1602 bool debug_info_p)
a1ab4c31
AC
1603{
1604 enum tree_code code = TREE_CODE (record_type);
9dba4b55 1605 tree name = TYPE_IDENTIFIER (record_type);
a1ab4c31
AC
1606 tree ada_size = bitsize_zero_node;
1607 tree size = bitsize_zero_node;
1608 bool had_size = TYPE_SIZE (record_type) != 0;
1609 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1610 bool had_align = TYPE_ALIGN (record_type) != 0;
1611 tree field;
1612
032d1b71 1613 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1614
10069d53
EB
1615 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1616 generate debug info and have a parallel type. */
10069d53 1617 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1618
1619 /* Globally initialize the record first. If this is a rep'ed record,
1620 that just means some initializations; otherwise, layout the record. */
1621 if (rep_level > 0)
1622 {
1623 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1624
1625 if (!had_size_unit)
1626 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1627
a1ab4c31
AC
1628 if (!had_size)
1629 TYPE_SIZE (record_type) = bitsize_zero_node;
1630
1631 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1632 out just like a UNION_TYPE, since the size will be fixed. */
1633 else if (code == QUAL_UNION_TYPE)
1634 code = UNION_TYPE;
1635 }
1636 else
1637 {
1638 /* Ensure there isn't a size already set. There can be in an error
1639 case where there is a rep clause but all fields have errors and
1640 no longer have a position. */
1641 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1642
1643 /* Ensure we use the traditional GCC layout for bitfields when we need
1644 to pack the record type or have a representation clause. The other
1645 possible layout (Microsoft C compiler), if available, would prevent
1646 efficient packing in almost all cases. */
1647#ifdef TARGET_MS_BITFIELD_LAYOUT
1648 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1649 decl_attributes (&record_type,
1650 tree_cons (get_identifier ("gcc_struct"),
1651 NULL_TREE, NULL_TREE),
1652 ATTR_FLAG_TYPE_IN_PLACE);
1653#endif
1654
a1ab4c31
AC
1655 layout_type (record_type);
1656 }
1657
1658 /* At this point, the position and size of each field is known. It was
1659 either set before entry by a rep clause, or by laying out the type above.
1660
1661 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1662 to compute the Ada size; the GCC size and alignment (for rep'ed records
1663 that are not padding types); and the mode (for rep'ed records). We also
1664 clear the DECL_BIT_FIELD indication for the cases we know have not been
1665 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1666
1667 if (code == QUAL_UNION_TYPE)
032d1b71 1668 field_list = nreverse (field_list);
a1ab4c31 1669
910ad8de 1670 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1671 {
1672 tree type = TREE_TYPE (field);
1673 tree pos = bit_position (field);
1674 tree this_size = DECL_SIZE (field);
1675 tree this_ada_size;
1676
e1e5852c 1677 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1678 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1679 && !TYPE_CONTAINS_TEMPLATE_P (type)
1680 && TYPE_ADA_SIZE (type))
1681 this_ada_size = TYPE_ADA_SIZE (type);
1682 else
1683 this_ada_size = this_size;
1684
1685 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1686 if (DECL_BIT_FIELD (field)
1687 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1688 {
1689 unsigned int align = TYPE_ALIGN (type);
1690
1691 /* In the general case, type alignment is required. */
1692 if (value_factor_p (pos, align))
1693 {
1694 /* The enclosing record type must be sufficiently aligned.
1695 Otherwise, if no alignment was specified for it and it
1696 has been laid out already, bump its alignment to the
14ecca2e
EB
1697 desired one if this is compatible with its size and
1698 maximum alignment, if any. */
a1ab4c31
AC
1699 if (TYPE_ALIGN (record_type) >= align)
1700 {
1701 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1702 DECL_BIT_FIELD (field) = 0;
1703 }
1704 else if (!had_align
1705 && rep_level == 0
14ecca2e
EB
1706 && value_factor_p (TYPE_SIZE (record_type), align)
1707 && (!TYPE_MAX_ALIGN (record_type)
1708 || TYPE_MAX_ALIGN (record_type) >= align))
a1ab4c31
AC
1709 {
1710 TYPE_ALIGN (record_type) = align;
1711 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1712 DECL_BIT_FIELD (field) = 0;
1713 }
1714 }
1715
1716 /* In the non-strict alignment case, only byte alignment is. */
1717 if (!STRICT_ALIGNMENT
1718 && DECL_BIT_FIELD (field)
1719 && value_factor_p (pos, BITS_PER_UNIT))
1720 DECL_BIT_FIELD (field) = 0;
1721 }
1722
c1abd261
EB
1723 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1724 field is technically not addressable. Except that it can actually
1725 be addressed if it is BLKmode and happens to be properly aligned. */
1726 if (DECL_BIT_FIELD (field)
1727 && !(DECL_MODE (field) == BLKmode
1728 && value_factor_p (pos, BITS_PER_UNIT)))
1729 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1730
1731 /* A type must be as aligned as its most aligned field that is not
1732 a bit-field. But this is already enforced by layout_type. */
1733 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1734 TYPE_ALIGN (record_type)
1735 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1736
1737 switch (code)
1738 {
1739 case UNION_TYPE:
1740 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1741 size = size_binop (MAX_EXPR, size, this_size);
1742 break;
1743
1744 case QUAL_UNION_TYPE:
1745 ada_size
1746 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1747 this_ada_size, ada_size);
1748 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1749 this_size, size);
1750 break;
1751
1752 case RECORD_TYPE:
1753 /* Since we know here that all fields are sorted in order of
1754 increasing bit position, the size of the record is one
1755 higher than the ending bit of the last field processed
1756 unless we have a rep clause, since in that case we might
1757 have a field outside a QUAL_UNION_TYPE that has a higher ending
1758 position. So use a MAX in that case. Also, if this field is a
1759 QUAL_UNION_TYPE, we need to take into account the previous size in
1760 the case of empty variants. */
1761 ada_size
1762 = merge_sizes (ada_size, pos, this_ada_size,
1763 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1764 size
1765 = merge_sizes (size, pos, this_size,
1766 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1767 break;
1768
1769 default:
1770 gcc_unreachable ();
1771 }
1772 }
1773
1774 if (code == QUAL_UNION_TYPE)
032d1b71 1775 nreverse (field_list);
a1ab4c31
AC
1776
1777 if (rep_level < 2)
1778 {
1779 /* If this is a padding record, we never want to make the size smaller
1780 than what was specified in it, if any. */
315cff15 1781 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1782 size = TYPE_SIZE (record_type);
1783
1784 /* Now set any of the values we've just computed that apply. */
315cff15 1785 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1786 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1787 SET_TYPE_ADA_SIZE (record_type, ada_size);
1788
1789 if (rep_level > 0)
1790 {
1791 tree size_unit = had_size_unit
1792 ? TYPE_SIZE_UNIT (record_type)
1793 : convert (sizetype,
1794 size_binop (CEIL_DIV_EXPR, size,
1795 bitsize_unit_node));
1796 unsigned int align = TYPE_ALIGN (record_type);
1797
1798 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1799 TYPE_SIZE_UNIT (record_type)
1800 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1801
1802 compute_record_mode (record_type);
1803 }
1804 }
1805
14ecca2e
EB
1806 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1807 TYPE_MAX_ALIGN (record_type) = 0;
1808
032d1b71 1809 if (debug_info_p)
a1ab4c31
AC
1810 rest_of_record_type_compilation (record_type);
1811}
1812
24d4b3d5
AC
1813/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1814 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1815 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1816 moment TYPE will get a context. */
a5695aa2
EB
1817
1818void
1819add_parallel_type (tree type, tree parallel_type)
1820{
1821 tree decl = TYPE_STUB_DECL (type);
1822
1823 while (DECL_PARALLEL_TYPE (decl))
1824 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1825
1826 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
24d4b3d5
AC
1827
1828 /* If PARALLEL_TYPE already has a context, we are done. */
1829 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1830 return;
1831
1832 /* Otherwise, try to get one from TYPE's context. */
1833 if (TYPE_CONTEXT (type) != NULL_TREE)
1834 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1835 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1836
1837 /* ... otherwise TYPE has not context yet. We know it will thanks to
1838 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1839 So we have nothing to do in this case. */
a5695aa2
EB
1840}
1841
1842/* Return true if TYPE has a parallel type. */
1843
1844static bool
1845has_parallel_type (tree type)
1846{
1847 tree decl = TYPE_STUB_DECL (type);
1848
1849 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1850}
1851
032d1b71
EB
1852/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1853 associated with it. It need not be invoked directly in most cases since
1854 finish_record_type takes care of doing so, but this can be necessary if
1855 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1856
1857void
1858rest_of_record_type_compilation (tree record_type)
1859{
a1ab4c31 1860 bool var_size = false;
fb88e1dd 1861 tree field;
a1ab4c31 1862
fb88e1dd
EB
1863 /* If this is a padded type, the bulk of the debug info has already been
1864 generated for the field's type. */
1865 if (TYPE_IS_PADDING_P (record_type))
1866 return;
1867
a5695aa2
EB
1868 /* If the type already has a parallel type (XVS type), then we're done. */
1869 if (has_parallel_type (record_type))
1870 return;
1871
fb88e1dd 1872 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1873 {
1874 /* We need to make an XVE/XVU record if any field has variable size,
1875 whether or not the record does. For example, if we have a union,
1876 it may be that all fields, rounded up to the alignment, have the
1877 same size, in which case we'll use that size. But the debug
1878 output routines (except Dwarf2) won't be able to output the fields,
1879 so we need to make the special record. */
1880 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1881 /* If a field has a non-constant qualifier, the record will have
1882 variable size too. */
fb88e1dd 1883 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1884 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1885 {
1886 var_size = true;
1887 break;
1888 }
1889 }
1890
fb88e1dd
EB
1891 /* If this record type is of variable size, make a parallel record type that
1892 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1893 if (var_size)
a1ab4c31
AC
1894 {
1895 tree new_record_type
1896 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1897 ? UNION_TYPE : TREE_CODE (record_type));
9dba4b55 1898 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
a1ab4c31 1899 tree last_pos = bitsize_zero_node;
0fb2335d 1900 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1901
0fb2335d
EB
1902 new_name
1903 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1904 ? "XVU" : "XVE");
1905 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1906 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1907 TYPE_STUB_DECL (new_record_type)
0fb2335d 1908 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1909 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1910 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1911 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1912 TYPE_SIZE_UNIT (new_record_type)
1913 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1914
5c475ba9
EB
1915 /* Now scan all the fields, replacing each field with a new field
1916 corresponding to the new encoding. */
a1ab4c31 1917 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1918 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1919 {
1920 tree field_type = TREE_TYPE (old_field);
1921 tree field_name = DECL_NAME (old_field);
a1ab4c31 1922 tree curpos = bit_position (old_field);
5c475ba9 1923 tree pos, new_field;
a1ab4c31
AC
1924 bool var = false;
1925 unsigned int align = 0;
a1ab4c31 1926
5c475ba9
EB
1927 /* We're going to do some pattern matching below so remove as many
1928 conversions as possible. */
1929 curpos = remove_conversions (curpos, true);
a1ab4c31 1930
5c475ba9 1931 /* See how the position was modified from the last position.
a1ab4c31 1932
5c475ba9
EB
1933 There are two basic cases we support: a value was added
1934 to the last position or the last position was rounded to
1935 a boundary and they something was added. Check for the
1936 first case first. If not, see if there is any evidence
1937 of rounding. If so, round the last position and retry.
a1ab4c31 1938
5c475ba9 1939 If this is a union, the position can be taken as zero. */
a1ab4c31 1940 if (TREE_CODE (new_record_type) == UNION_TYPE)
5c475ba9 1941 pos = bitsize_zero_node;
a1ab4c31
AC
1942 else
1943 pos = compute_related_constant (curpos, last_pos);
1944
5c475ba9
EB
1945 if (!pos
1946 && TREE_CODE (curpos) == MULT_EXPR
cc269bb6 1947 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
a1ab4c31
AC
1948 {
1949 tree offset = TREE_OPERAND (curpos, 0);
ae7e9ddd 1950 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
5c475ba9
EB
1951 align = scale_by_factor_of (offset, align);
1952 last_pos = round_up (last_pos, align);
1953 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1954 }
5c475ba9
EB
1955 else if (!pos
1956 && TREE_CODE (curpos) == PLUS_EXPR
cc269bb6 1957 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
a1ab4c31 1958 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
5a36c51b
RS
1959 && tree_fits_uhwi_p
1960 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
a1ab4c31 1961 {
5c475ba9
EB
1962 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1963 unsigned HOST_WIDE_INT addend
ae7e9ddd 1964 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
a1ab4c31 1965 align
ae7e9ddd 1966 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
5c475ba9
EB
1967 align = scale_by_factor_of (offset, align);
1968 align = MIN (align, addend & -addend);
1969 last_pos = round_up (last_pos, align);
1970 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1971 }
5c475ba9 1972 else if (potential_alignment_gap (prev_old_field, old_field, pos))
a1ab4c31
AC
1973 {
1974 align = TYPE_ALIGN (field_type);
5c475ba9
EB
1975 last_pos = round_up (last_pos, align);
1976 pos = compute_related_constant (curpos, last_pos);
a1ab4c31
AC
1977 }
1978
1979 /* If we can't compute a position, set it to zero.
1980
5c475ba9
EB
1981 ??? We really should abort here, but it's too much work
1982 to get this correct for all cases. */
a1ab4c31
AC
1983 if (!pos)
1984 pos = bitsize_zero_node;
1985
1986 /* See if this type is variable-sized and make a pointer type
1987 and indicate the indirection if so. Beware that the debug
1988 back-end may adjust the position computed above according
1989 to the alignment of the field type, i.e. the pointer type
1990 in this case, if we don't preventively counter that. */
1991 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1992 {
1993 field_type = build_pointer_type (field_type);
1994 if (align != 0 && TYPE_ALIGN (field_type) > align)
1995 {
1996 field_type = copy_node (field_type);
1997 TYPE_ALIGN (field_type) = align;
1998 }
1999 var = true;
2000 }
2001
2002 /* Make a new field name, if necessary. */
2003 if (var || align != 0)
2004 {
2005 char suffix[16];
2006
2007 if (align != 0)
2008 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2009 align / BITS_PER_UNIT);
2010 else
2011 strcpy (suffix, "XVL");
2012
0fb2335d 2013 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
2014 }
2015
da01bfee
EB
2016 new_field
2017 = create_field_decl (field_name, field_type, new_record_type,
2018 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 2019 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
2020 TYPE_FIELDS (new_record_type) = new_field;
2021
2022 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2023 zero. The only time it's not the last field of the record
2024 is when there are other components at fixed positions after
2025 it (meaning there was a rep clause for every field) and we
2026 want to be able to encode them. */
2027 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2028 (TREE_CODE (TREE_TYPE (old_field))
2029 == QUAL_UNION_TYPE)
2030 ? bitsize_zero_node
2031 : DECL_SIZE (old_field));
2032 prev_old_field = old_field;
2033 }
2034
fb88e1dd 2035 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 2036
a5695aa2 2037 add_parallel_type (record_type, new_record_type);
a1ab4c31 2038 }
a1ab4c31
AC
2039}
2040
a1ab4c31 2041/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
2042 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2043 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2044 replace a value of zero with the old size. If HAS_REP is true, we take the
2045 MAX of the end position of this field with LAST_SIZE. In all other cases,
2046 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
2047
2048static tree
2049merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2050 bool has_rep)
2051{
2052 tree type = TREE_TYPE (last_size);
c6bd4220 2053 tree new_size;
a1ab4c31
AC
2054
2055 if (!special || TREE_CODE (size) != COND_EXPR)
2056 {
c6bd4220 2057 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 2058 if (has_rep)
c6bd4220 2059 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
2060 }
2061
2062 else
c6bd4220
EB
2063 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2064 integer_zerop (TREE_OPERAND (size, 1))
2065 ? last_size : merge_sizes (last_size, first_bit,
2066 TREE_OPERAND (size, 1),
2067 1, has_rep),
2068 integer_zerop (TREE_OPERAND (size, 2))
2069 ? last_size : merge_sizes (last_size, first_bit,
2070 TREE_OPERAND (size, 2),
2071 1, has_rep));
a1ab4c31
AC
2072
2073 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2074 when fed through substitute_in_expr) into thinking that a constant
2075 size is not constant. */
c6bd4220
EB
2076 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2077 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 2078
c6bd4220 2079 return new_size;
a1ab4c31
AC
2080}
2081
2082/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2083 related by the addition of a constant. Return that constant if so. */
2084
2085static tree
2086compute_related_constant (tree op0, tree op1)
2087{
2088 tree op0_var, op1_var;
2089 tree op0_con = split_plus (op0, &op0_var);
2090 tree op1_con = split_plus (op1, &op1_var);
2091 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2092
2093 if (operand_equal_p (op0_var, op1_var, 0))
2094 return result;
2095 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2096 return result;
2097 else
2098 return 0;
2099}
2100
2101/* Utility function of above to split a tree OP which may be a sum, into a
2102 constant part, which is returned, and a variable part, which is stored
2103 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2104 bitsizetype. */
2105
2106static tree
2107split_plus (tree in, tree *pvar)
2108{
722356ce
EB
2109 /* Strip conversions in order to ease the tree traversal and maximize the
2110 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
2111 to always return and set *pvar to bitsizetype trees, but it's worth
2112 the effort. */
722356ce 2113 in = remove_conversions (in, false);
a1ab4c31
AC
2114
2115 *pvar = convert (bitsizetype, in);
2116
2117 if (TREE_CODE (in) == INTEGER_CST)
2118 {
2119 *pvar = bitsize_zero_node;
2120 return convert (bitsizetype, in);
2121 }
2122 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2123 {
2124 tree lhs_var, rhs_var;
2125 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2126 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2127
2128 if (lhs_var == TREE_OPERAND (in, 0)
2129 && rhs_var == TREE_OPERAND (in, 1))
2130 return bitsize_zero_node;
2131
2132 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2133 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2134 }
2135 else
2136 return bitsize_zero_node;
2137}
2138\f
d47d0a8d
EB
2139/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2140 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2141 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2142 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2143 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2144 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2145 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2146 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2147 invisible reference. */
a1ab4c31
AC
2148
2149tree
2150create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
2151 bool return_unconstrained_p, bool return_by_direct_ref_p,
2152 bool return_by_invisi_ref_p)
a1ab4c31 2153{
e5b00edf
NF
2154 /* A list of the data type nodes of the subprogram formal parameters.
2155 This list is generated by traversing the input list of PARM_DECL
2156 nodes. */
9771b263 2157 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 2158 tree t, type;
a1ab4c31 2159
910ad8de 2160 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 2161 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 2162
e5b00edf 2163 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 2164
d47d0a8d
EB
2165 /* TYPE may have been shared since GCC hashes types. If it has a different
2166 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
2167 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2168 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
2169 {
2170 type = copy_type (type);
2171 TYPE_CI_CO_LIST (type) = cico_list;
2172 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2173 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2174 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2175 }
a1ab4c31 2176
a1ab4c31
AC
2177 return type;
2178}
2179\f
2180/* Return a copy of TYPE but safe to modify in any way. */
2181
2182tree
2183copy_type (tree type)
2184{
c6bd4220 2185 tree new_type = copy_node (type);
a1ab4c31 2186
90dcfecb
EB
2187 /* Unshare the language-specific data. */
2188 if (TYPE_LANG_SPECIFIC (type))
2189 {
2190 TYPE_LANG_SPECIFIC (new_type) = NULL;
2191 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2192 }
2193
2194 /* And the contents of the language-specific slot if needed. */
2195 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2196 && TYPE_RM_VALUES (type))
2197 {
2198 TYPE_RM_VALUES (new_type) = NULL_TREE;
2199 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2200 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2201 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2202 }
2203
a1ab4c31
AC
2204 /* copy_node clears this field instead of copying it, because it is
2205 aliased with TREE_CHAIN. */
c6bd4220 2206 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2207
c6bd4220
EB
2208 TYPE_POINTER_TO (new_type) = 0;
2209 TYPE_REFERENCE_TO (new_type) = 0;
2210 TYPE_MAIN_VARIANT (new_type) = new_type;
2211 TYPE_NEXT_VARIANT (new_type) = 0;
4b7bd260 2212 TYPE_CANONICAL (new_type) = new_type;
a1ab4c31 2213
c6bd4220 2214 return new_type;
a1ab4c31
AC
2215}
2216\f
c1abd261
EB
2217/* Return a subtype of sizetype with range MIN to MAX and whose
2218 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2219 of the associated TYPE_DECL. */
a1ab4c31
AC
2220
2221tree
2222create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2223{
2224 /* First build a type for the desired range. */
523e82a7 2225 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2226
523e82a7 2227 /* Then set the index type. */
a1ab4c31 2228 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2229 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2230
a1ab4c31
AC
2231 return type;
2232}
84fb43a1
EB
2233
2234/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2235 sizetype is used. */
2236
2237tree
2238create_range_type (tree type, tree min, tree max)
2239{
2240 tree range_type;
2241
2242 if (type == NULL_TREE)
2243 type = sizetype;
2244
2245 /* First build a type with the base range. */
523e82a7
EB
2246 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2247 TYPE_MAX_VALUE (type));
84fb43a1
EB
2248
2249 /* Then set the actual range. */
1eb58520
AC
2250 SET_TYPE_RM_MIN_VALUE (range_type, min);
2251 SET_TYPE_RM_MAX_VALUE (range_type, max);
84fb43a1
EB
2252
2253 return range_type;
2254}
a1ab4c31 2255\f
6249559b
EB
2256/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2257 NAME gives the name of the type to be used in the declaration. */
10069d53
EB
2258
2259tree
6249559b 2260create_type_stub_decl (tree name, tree type)
10069d53 2261{
6249559b 2262 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
10069d53 2263 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2264 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2265 return type_decl;
2266}
2267
6249559b
EB
2268/* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2269 used in the declaration. ARTIFICIAL_P is true if the declaration was
2270 generated by the compiler. DEBUG_INFO_P is true if we need to write
2271 debug information about this type. GNAT_NODE is used for the position
2272 of the decl. */
a1ab4c31
AC
2273
2274tree
6249559b
EB
2275create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2276 Node_Id gnat_node)
a1ab4c31 2277{
a1ab4c31 2278 enum tree_code code = TREE_CODE (type);
6249559b
EB
2279 bool is_named
2280 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
10069d53 2281 tree type_decl;
a1ab4c31 2282
10069d53
EB
2283 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2284 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2285
10069d53
EB
2286 /* If the type hasn't been named yet, we're naming it; preserve an existing
2287 TYPE_STUB_DECL that has been attached to it for some purpose. */
6249559b 2288 if (!is_named && TYPE_STUB_DECL (type))
10069d53
EB
2289 {
2290 type_decl = TYPE_STUB_DECL (type);
6249559b 2291 DECL_NAME (type_decl) = name;
10069d53
EB
2292 }
2293 else
6249559b 2294 type_decl = build_decl (input_location, TYPE_DECL, name, type);
a1ab4c31 2295
10069d53 2296 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2297 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2298
2299 /* Add this decl to the current binding level. */
10069d53 2300 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2301
aef308d0
PMR
2302 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2303 causes the name to be also viewed as a "tag" by the debug back-end, with
2304 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2305 types in DWARF.
2306
2307 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2308 from multiple contexts, and "type_decl" references a copy of it: in such a
2309 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2310 with the mechanism above. */
6249559b 2311 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
10069d53
EB
2312 TYPE_STUB_DECL (type) = type_decl;
2313
50741117
EB
2314 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2315 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2316 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2317 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2318
2319 return type_decl;
2320}
10069d53 2321\f
a1ab4c31
AC
2322/* Return a VAR_DECL or CONST_DECL node.
2323
6249559b
EB
2324 NAME gives the name of the variable. ASM_NAME is its assembler name
2325 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
a1ab4c31
AC
2326 the GCC tree for an optional initial expression; NULL_TREE if none.
2327
2328 CONST_FLAG is true if this variable is constant, in which case we might
2329 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2330
2331 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2332 definition to be made visible outside of the current compilation unit, for
2333 instance variable definitions in a package specification.
2334
1e17ef87 2335 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2336 opposed to a definition: no storage is to be allocated for the variable).
2337
2056c5ed
EB
2338 STATIC_FLAG is only relevant when not at top level and indicates whether
2339 to always allocate storage to the variable.
2340
2341 VOLATILE_FLAG is true if this variable is declared as volatile.
a1ab4c31 2342
c1a569ef
EB
2343 ARTIFICIAL_P is true if the variable was generated by the compiler.
2344
2345 DEBUG_INFO_P is true if we need to write debug information for it.
2346
2ade427a
EB
2347 ATTR_LIST is the list of attributes to be attached to the variable.
2348
a1ab4c31
AC
2349 GNAT_NODE is used for the position of the decl. */
2350
2351tree
6249559b
EB
2352create_var_decl (tree name, tree asm_name, tree type, tree init,
2353 bool const_flag, bool public_flag, bool extern_flag,
2056c5ed
EB
2354 bool static_flag, bool volatile_flag, bool artificial_p,
2355 bool debug_info_p, struct attrib *attr_list,
2356 Node_Id gnat_node, bool const_decl_allowed_p)
a1ab4c31 2357{
5fe48b3d
EB
2358 /* Whether the object has static storage duration, either explicitly or by
2359 virtue of being declared at the global level. */
2360 const bool static_storage = static_flag || global_bindings_p ();
2361
2362 /* Whether the initializer is constant: for an external object or an object
2363 with static storage duration, we check that the initializer is a valid
2364 constant expression for initializing a static variable; otherwise, we
2365 only check that it is constant. */
2366 const bool init_const
6249559b
EB
2367 = (init
2368 && gnat_types_compatible_p (type, TREE_TYPE (init))
5fe48b3d 2369 && (extern_flag || static_storage
6249559b 2370 ? initializer_constant_valid_p (init, TREE_TYPE (init))
5fe48b3d 2371 != NULL_TREE
6249559b 2372 : TREE_CONSTANT (init)));
a1ab4c31
AC
2373
2374 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
5fe48b3d 2375 case the initializer may be used in lieu of the DECL node (as done in
a1ab4c31 2376 Identifier_to_gnu). This is useful to prevent the need of elaboration
5fe48b3d
EB
2377 code when an identifier for which such a DECL is made is in turn used
2378 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2379 but extra constraints apply to this choice (see below) and they are not
2380 relevant to the distinction we wish to make. */
2381 const bool constant_p = const_flag && init_const;
a1ab4c31
AC
2382
2383 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2384 and may be used for scalars in general but not for aggregates. */
2385 tree var_decl
c172df28
AH
2386 = build_decl (input_location,
2387 (constant_p && const_decl_allowed_p
a1ab4c31 2388 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
6249559b 2389 name, type);
a1ab4c31
AC
2390
2391 /* If this is external, throw away any initializations (they will be done
2392 elsewhere) unless this is a constant for which we would like to remain
2393 able to get the initializer. If we are defining a global here, leave a
2394 constant initialization and save any variable elaborations for the
2395 elaboration routine. If we are just annotating types, throw away the
2396 initialization if it isn't a constant. */
2397 if ((extern_flag && !constant_p)
6249559b
EB
2398 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2399 init = NULL_TREE;
a1ab4c31 2400
5fe48b3d
EB
2401 /* At the global level, a non-constant initializer generates elaboration
2402 statements. Check that such statements are allowed, that is to say,
2403 not violating a No_Elaboration_Code restriction. */
6249559b 2404 if (init && !init_const && global_bindings_p ())
a1ab4c31 2405 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2406
c1a569ef 2407 /* Attach the initializer, if any. */
6249559b 2408 DECL_INITIAL (var_decl) = init;
c1a569ef
EB
2409
2410 /* Directly set some flags. */
2411 DECL_ARTIFICIAL (var_decl) = artificial_p;
8b7b0c36 2412 DECL_EXTERNAL (var_decl) = extern_flag;
a1ab4c31
AC
2413
2414 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2415 try to fiddle with DECL_COMMON. However, on platforms that don't
2416 support global BSS sections, uninitialized global variables would
2417 go in DATA instead, thus increasing the size of the executable. */
2418 if (!flag_no_common
2419 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2420 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2421 && !have_global_bss_p ())
2422 DECL_COMMON (var_decl) = 1;
a1ab4c31 2423
c1a569ef
EB
2424 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2425 since we will create an associated variable. Likewise for an external
2426 constant whose initializer is not absolute, because this would mean a
2427 global relocation in a read-only section which runs afoul of the PE-COFF
2428 run-time relocation mechanism. */
2429 if (!debug_info_p
2430 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2431 || (extern_flag
2432 && constant_p
6249559b
EB
2433 && init
2434 && initializer_constant_valid_p (init, TREE_TYPE (init))
c1a569ef 2435 != null_pointer_node))
5225a138
EB
2436 DECL_IGNORED_P (var_decl) = 1;
2437
2056c5ed
EB
2438 TREE_CONSTANT (var_decl) = constant_p;
2439 TREE_READONLY (var_decl) = const_flag;
2440
2441 /* The object is public if it is external or if it is declared public
2442 and has static storage duration. */
2443 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2444
2445 /* We need to allocate static storage for an object with static storage
2446 duration if it isn't external. */
2447 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2448
2449 TREE_SIDE_EFFECTS (var_decl)
2450 = TREE_THIS_VOLATILE (var_decl)
2451 = TYPE_VOLATILE (type) | volatile_flag;
5fe48b3d 2452
a1ab4c31
AC
2453 if (TREE_SIDE_EFFECTS (var_decl))
2454 TREE_ADDRESSABLE (var_decl) = 1;
2455
74746d49
EB
2456 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2457 if (TREE_CODE (var_decl) == VAR_DECL)
2458 process_attributes (&var_decl, &attr_list, true, gnat_node);
2459
2460 /* Add this decl to the current binding level. */
2461 gnat_pushdecl (var_decl, gnat_node);
2462
a22b794d 2463 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
a1ab4c31 2464 {
a22b794d
EB
2465 /* Let the target mangle the name if this isn't a verbatim asm. */
2466 if (*IDENTIFIER_POINTER (asm_name) != '*')
2467 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
74746d49 2468
a22b794d 2469 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
a1ab4c31 2470 }
a1ab4c31
AC
2471
2472 return var_decl;
2473}
2474\f
2475/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2476
2477static bool
2478aggregate_type_contains_array_p (tree type)
2479{
2480 switch (TREE_CODE (type))
2481 {
2482 case RECORD_TYPE:
2483 case UNION_TYPE:
2484 case QUAL_UNION_TYPE:
2485 {
2486 tree field;
910ad8de 2487 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2488 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2489 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2490 return true;
2491 return false;
2492 }
2493
2494 case ARRAY_TYPE:
2495 return true;
2496
2497 default:
2498 gcc_unreachable ();
2499 }
2500}
2501
6249559b
EB
2502/* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2503 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2504 is the specified size of the field. If POS is nonzero, it is the bit
2505 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2506 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2507 means we are allowed to take the address of the field; if it is negative,
2508 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2509
2510tree
6249559b
EB
2511create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2512 int packed, int addressable)
a1ab4c31 2513{
6249559b 2514 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
a1ab4c31
AC
2515
2516 DECL_CONTEXT (field_decl) = record_type;
6249559b 2517 TREE_READONLY (field_decl) = TYPE_READONLY (type);
a1ab4c31
AC
2518
2519 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2520 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2521 Likewise for an aggregate without specified position that contains an
2522 array, because in this case slices of variable length of this array
2523 must be handled by GCC and variable-sized objects need to be aligned
2524 to at least a byte boundary. */
6249559b 2525 if (packed && (TYPE_MODE (type) == BLKmode
a1ab4c31 2526 || (!pos
6249559b
EB
2527 && AGGREGATE_TYPE_P (type)
2528 && aggregate_type_contains_array_p (type))))
a1ab4c31
AC
2529 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2530
2531 /* If a size is specified, use it. Otherwise, if the record type is packed
2532 compute a size to use, which may differ from the object's natural size.
2533 We always set a size in this case to trigger the checks for bitfield
2534 creation below, which is typically required when no position has been
2535 specified. */
2536 if (size)
2537 size = convert (bitsizetype, size);
2538 else if (packed == 1)
2539 {
6249559b
EB
2540 size = rm_size (type);
2541 if (TYPE_MODE (type) == BLKmode)
62f9f3ce 2542 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2543 }
2544
2545 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2546 specified for two reasons: first if the size differs from the natural
2547 size. Second, if the alignment is insufficient. There are a number of
2548 ways the latter can be true.
2549
2550 We never make a bitfield if the type of the field has a nonconstant size,
2551 because no such entity requiring bitfield operations should reach here.
2552
2553 We do *preventively* make a bitfield when there might be the need for it
2554 but we don't have all the necessary information to decide, as is the case
2555 of a field with no specified position in a packed record.
2556
2557 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2558 in layout_decl or finish_record_type to clear the bit_field indication if
2559 it is in fact not needed. */
2560 if (addressable >= 0
2561 && size
2562 && TREE_CODE (size) == INTEGER_CST
6249559b
EB
2563 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2564 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2565 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
a1ab4c31
AC
2566 || packed
2567 || (TYPE_ALIGN (record_type) != 0
6249559b 2568 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
a1ab4c31
AC
2569 {
2570 DECL_BIT_FIELD (field_decl) = 1;
2571 DECL_SIZE (field_decl) = size;
2572 if (!packed && !pos)
feec4372
EB
2573 {
2574 if (TYPE_ALIGN (record_type) != 0
6249559b 2575 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
feec4372
EB
2576 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2577 else
6249559b 2578 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
feec4372 2579 }
a1ab4c31
AC
2580 }
2581
2582 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2583
2584 /* Bump the alignment if need be, either for bitfield/packing purposes or
2585 to satisfy the type requirements if no such consideration applies. When
2586 we get the alignment from the type, indicate if this is from an explicit
2587 user request, which prevents stor-layout from lowering it later on. */
2588 {
d9223014 2589 unsigned int bit_align
a1ab4c31 2590 = (DECL_BIT_FIELD (field_decl) ? 1
6249559b 2591 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
a1ab4c31
AC
2592
2593 if (bit_align > DECL_ALIGN (field_decl))
2594 DECL_ALIGN (field_decl) = bit_align;
6249559b 2595 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
a1ab4c31 2596 {
6249559b
EB
2597 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
2598 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
a1ab4c31
AC
2599 }
2600 }
2601
2602 if (pos)
2603 {
2604 /* We need to pass in the alignment the DECL is known to have.
2605 This is the lowest-order bit set in POS, but no more than
2606 the alignment of the record, if one is specified. Note
2607 that an alignment of 0 is taken as infinite. */
2608 unsigned int known_align;
2609
cc269bb6 2610 if (tree_fits_uhwi_p (pos))
ae7e9ddd 2611 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
a1ab4c31
AC
2612 else
2613 known_align = BITS_PER_UNIT;
2614
2615 if (TYPE_ALIGN (record_type)
2616 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2617 known_align = TYPE_ALIGN (record_type);
2618
2619 layout_decl (field_decl, known_align);
2620 SET_DECL_OFFSET_ALIGN (field_decl,
cc269bb6 2621 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
a1ab4c31
AC
2622 : BITS_PER_UNIT);
2623 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2624 &DECL_FIELD_BIT_OFFSET (field_decl),
2625 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2626 }
2627
2628 /* In addition to what our caller says, claim the field is addressable if we
2629 know that its type is not suitable.
2630
2631 The field may also be "technically" nonaddressable, meaning that even if
2632 we attempt to take the field's address we will actually get the address
2633 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2634 value we have at this point is not accurate enough, so we don't account
2635 for this here and let finish_record_type decide. */
6249559b 2636 if (!addressable && !type_for_nonaliased_component_p (type))
a1ab4c31
AC
2637 addressable = 1;
2638
2639 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2640
2641 return field_decl;
2642}
2643\f
6249559b
EB
2644/* Return a PARM_DECL node. NAME is the name of the parameter and TYPE is
2645 its type. READONLY is true if the parameter is readonly (either an In
2646 parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2647
2648tree
6249559b 2649create_param_decl (tree name, tree type, bool readonly)
a1ab4c31 2650{
6249559b 2651 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
a1ab4c31 2652
a8e05f92
EB
2653 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2654 can lead to various ABI violations. */
2655 if (targetm.calls.promote_prototypes (NULL_TREE)
6249559b
EB
2656 && INTEGRAL_TYPE_P (type)
2657 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
a1ab4c31
AC
2658 {
2659 /* We have to be careful about biased types here. Make a subtype
2660 of integer_type_node with the proper biasing. */
6249559b
EB
2661 if (TREE_CODE (type) == INTEGER_TYPE
2662 && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 2663 {
84fb43a1
EB
2664 tree subtype
2665 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2666 TREE_TYPE (subtype) = integer_type_node;
2667 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
6249559b
EB
2668 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2669 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2670 type = subtype;
a1ab4c31
AC
2671 }
2672 else
6249559b 2673 type = integer_type_node;
a1ab4c31
AC
2674 }
2675
6249559b 2676 DECL_ARG_TYPE (param_decl) = type;
a1ab4c31
AC
2677 TREE_READONLY (param_decl) = readonly;
2678 return param_decl;
2679}
2680\f
74746d49
EB
2681/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2682 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2683 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2684
74746d49
EB
2685void
2686process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2687 Node_Id gnat_node)
a1ab4c31 2688{
74746d49
EB
2689 struct attrib *attr;
2690
2691 for (attr = *attr_list; attr; attr = attr->next)
2692 switch (attr->type)
a1ab4c31
AC
2693 {
2694 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2695 Sloc_to_locus (Sloc (gnat_node), &input_location);
2696 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2697 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2698 break;
2699
2700 case ATTR_LINK_ALIAS:
74746d49 2701 if (!DECL_EXTERNAL (*node))
a1ab4c31 2702 {
74746d49
EB
2703 TREE_STATIC (*node) = 1;
2704 assemble_alias (*node, attr->name);
a1ab4c31
AC
2705 }
2706 break;
2707
2708 case ATTR_WEAK_EXTERNAL:
2709 if (SUPPORTS_WEAK)
74746d49 2710 declare_weak (*node);
a1ab4c31
AC
2711 else
2712 post_error ("?weak declarations not supported on this target",
74746d49 2713 attr->error_point);
a1ab4c31
AC
2714 break;
2715
2716 case ATTR_LINK_SECTION:
677f3fa8 2717 if (targetm_common.have_named_sections)
a1ab4c31 2718 {
0ab75824 2719 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
74746d49 2720 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2721 }
2722 else
2723 post_error ("?section attributes are not supported for this target",
74746d49 2724 attr->error_point);
a1ab4c31
AC
2725 break;
2726
2727 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2728 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2729 TREE_USED (*node) = 1;
a1ab4c31
AC
2730 break;
2731
2732 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2733 DECL_STATIC_DESTRUCTOR (*node) = 1;
2734 TREE_USED (*node) = 1;
a1ab4c31 2735 break;
40a14772
TG
2736
2737 case ATTR_THREAD_LOCAL_STORAGE:
56363ffd 2738 set_decl_tls_model (*node, decl_default_tls_model (*node));
74746d49 2739 DECL_COMMON (*node) = 0;
40a14772 2740 break;
a1ab4c31 2741 }
74746d49
EB
2742
2743 *attr_list = NULL;
a1ab4c31 2744}
a1ab4c31
AC
2745
2746/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2747 a power of 2. */
2748
2749bool
2750value_factor_p (tree value, HOST_WIDE_INT factor)
2751{
cc269bb6 2752 if (tree_fits_uhwi_p (value))
ae7e9ddd 2753 return tree_to_uhwi (value) % factor == 0;
a1ab4c31
AC
2754
2755 if (TREE_CODE (value) == MULT_EXPR)
2756 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2757 || value_factor_p (TREE_OPERAND (value, 1), factor));
2758
2759 return false;
2760}
2761
e8fa3dcd
PMR
2762/* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2763 from the parameter association for the instantiation of a generic. We do
2764 not want to emit source location for them: the code generated for their
2765 initialization is likely to disturb debugging. */
2766
2767bool
2768renaming_from_generic_instantiation_p (Node_Id gnat_node)
2769{
2770 if (Nkind (gnat_node) != N_Defining_Identifier
2771 || !IN (Ekind (gnat_node), Object_Kind)
2772 || Comes_From_Source (gnat_node)
2773 || !Present (Renamed_Object (gnat_node)))
2774 return false;
2775
2776 /* Get the object declaration of the renamed object, if any and if the
2777 renamed object is a mere identifier. */
2778 gnat_node = Renamed_Object (gnat_node);
2779 if (Nkind (gnat_node) != N_Identifier)
2780 return false;
2781
2782 gnat_node = Entity (gnat_node);
2783 if (!Present (Parent (gnat_node)))
2784 return false;
2785
2786 gnat_node = Parent (gnat_node);
2787 return
2788 (Present (gnat_node)
2789 && Nkind (gnat_node) == N_Object_Declaration
2790 && Present (Corresponding_Generic_Association (gnat_node)));
2791}
2792
9a30c7c4
AC
2793/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2794 feed it with the elaboration of GNAT_SCOPE. */
2795
2796static struct deferred_decl_context_node *
2797add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2798{
2799 struct deferred_decl_context_node *new_node;
2800
2801 new_node
2802 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2803 new_node->decl = decl;
2804 new_node->gnat_scope = gnat_scope;
2805 new_node->force_global = force_global;
2806 new_node->types.create (1);
2807 new_node->next = deferred_decl_context_queue;
2808 deferred_decl_context_queue = new_node;
2809 return new_node;
2810}
2811
2812/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2813 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2814 computed. */
2815
2816static void
2817add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2818{
2819 n->types.safe_push (type);
2820}
2821
2822/* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2823 NULL_TREE if it is not available. */
2824
2825static tree
2826compute_deferred_decl_context (Entity_Id gnat_scope)
2827{
2828 tree context;
2829
2830 if (present_gnu_tree (gnat_scope))
2831 context = get_gnu_tree (gnat_scope);
2832 else
2833 return NULL_TREE;
2834
2835 if (TREE_CODE (context) == TYPE_DECL)
2836 {
2837 const tree context_type = TREE_TYPE (context);
2838
2839 /* Skip dummy types: only the final ones can appear in the context
2840 chain. */
2841 if (TYPE_DUMMY_P (context_type))
2842 return NULL_TREE;
2843
2844 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2845 chain. */
2846 else
2847 context = context_type;
2848 }
2849
2850 return context;
2851}
2852
2853/* Try to process all deferred nodes in the queue. Keep in the queue the ones
2854 that cannot be processed yet, remove the other ones. If FORCE is true,
2855 force the processing for all nodes, use the global context when nodes don't
2856 have a GNU translation. */
2857
2858void
2859process_deferred_decl_context (bool force)
2860{
2861 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2862 struct deferred_decl_context_node *node;
2863
2864 while (*it != NULL)
2865 {
2866 bool processed = false;
2867 tree context = NULL_TREE;
2868 Entity_Id gnat_scope;
2869
2870 node = *it;
2871
2872 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2873 get the first scope. */
2874 gnat_scope = node->gnat_scope;
2875 while (Present (gnat_scope))
2876 {
2877 context = compute_deferred_decl_context (gnat_scope);
2878 if (!force || context != NULL_TREE)
2879 break;
2880 gnat_scope = get_debug_scope (gnat_scope, NULL);
2881 }
2882
2883 /* Imported declarations must not be in a local context (i.e. not inside
2884 a function). */
2885 if (context != NULL_TREE && node->force_global > 0)
2886 {
2887 tree ctx = context;
2888
2889 while (ctx != NULL_TREE)
2890 {
2891 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2892 ctx = (DECL_P (ctx))
2893 ? DECL_CONTEXT (ctx)
2894 : TYPE_CONTEXT (ctx);
2895 }
2896 }
2897
2898 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2899 was no elaborated scope, use the global context. */
2900 if (force && context == NULL_TREE)
2901 context = get_global_context ();
2902
2903 if (context != NULL_TREE)
2904 {
2905 tree t;
2906 int i;
2907
2908 DECL_CONTEXT (node->decl) = context;
2909
2910 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2911 ..._TYPE nodes. */
2912 FOR_EACH_VEC_ELT (node->types, i, t)
2913 {
24d4b3d5 2914 gnat_set_type_context (t, context);
9a30c7c4
AC
2915 }
2916 processed = true;
2917 }
2918
2919 /* If this node has been successfuly processed, remove it from the
2920 queue. Then move to the next node. */
2921 if (processed)
2922 {
2923 *it = node->next;
2924 node->types.release ();
2925 free (node);
2926 }
2927 else
2928 it = &node->next;
2929 }
2930}
2931
2932
5c475ba9
EB
2933/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2934
2935static unsigned int
2936scale_by_factor_of (tree expr, unsigned int value)
2937{
3b5d86ec
PMR
2938 unsigned HOST_WIDE_INT addend = 0;
2939 unsigned HOST_WIDE_INT factor = 1;
2940
2941 /* Peel conversions around EXPR and try to extract bodies from function
2942 calls: it is possible to get the scale factor from size functions. */
5c475ba9 2943 expr = remove_conversions (expr, true);
3b5d86ec
PMR
2944 if (TREE_CODE (expr) == CALL_EXPR)
2945 expr = maybe_inline_call_in_expr (expr);
2946
2947 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
2948 multiple of the scale factor we are looking for. */
2949 if (TREE_CODE (expr) == PLUS_EXPR
2950 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2951 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
2952 {
2953 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2954 expr = TREE_OPERAND (expr, 0);
2955 }
5c475ba9
EB
2956
2957 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2958 corresponding to the number of trailing zeros of the mask. */
2959 if (TREE_CODE (expr) == BIT_AND_EXPR
2960 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2961 {
2962 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2963 unsigned int i = 0;
2964
2965 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2966 {
2967 mask >>= 1;
3b5d86ec 2968 factor *= 2;
5c475ba9
EB
2969 i++;
2970 }
2971 }
2972
3b5d86ec
PMR
2973 /* If the addend is not a multiple of the factor we found, give up. In
2974 theory we could find a smaller common factor but it's useless for our
2975 needs. This situation arises when dealing with a field F1 with no
2976 alignment requirement but that is following a field F2 with such
2977 requirements. As long as we have F2's offset, we don't need alignment
2978 information to compute F1's. */
2979 if (addend % factor != 0)
2980 factor = 1;
2981
2982 return factor * value;
5c475ba9
EB
2983}
2984
7d7fcb08 2985/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2986 unless we can prove these 2 fields are laid out in such a way that no gap
2987 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2988 is the distance in bits between the end of PREV_FIELD and the starting
2989 position of CURR_FIELD. It is ignored if null. */
2990
2991static bool
2992potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2993{
2994 /* If this is the first field of the record, there cannot be any gap */
2995 if (!prev_field)
2996 return false;
2997
78df6221 2998 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
2999 time when such a field is not the last field of the record is when
3000 there are other components at fixed positions after it (meaning there
3001 was a rep clause for every field), in which case we don't want the
3002 alignment constraint to override them. */
3003 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3004 return false;
3005
3006 /* If the distance between the end of prev_field and the beginning of
3007 curr_field is constant, then there is a gap if the value of this
3008 constant is not null. */
cc269bb6 3009 if (offset && tree_fits_uhwi_p (offset))
a1ab4c31
AC
3010 return !integer_zerop (offset);
3011
3012 /* If the size and position of the previous field are constant,
3013 then check the sum of this size and position. There will be a gap
3014 iff it is not multiple of the current field alignment. */
cc269bb6
RS
3015 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3016 && tree_fits_uhwi_p (bit_position (prev_field)))
ae7e9ddd
RS
3017 return ((tree_to_uhwi (bit_position (prev_field))
3018 + tree_to_uhwi (DECL_SIZE (prev_field)))
a1ab4c31
AC
3019 % DECL_ALIGN (curr_field) != 0);
3020
3021 /* If both the position and size of the previous field are multiples
3022 of the current field alignment, there cannot be any gap. */
3023 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3024 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3025 return false;
3026
3027 /* Fallback, return that there may be a potential gap */
3028 return true;
3029}
3030
6249559b
EB
3031/* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3032 the decl. */
a1ab4c31
AC
3033
3034tree
6249559b 3035create_label_decl (tree name, Node_Id gnat_node)
a1ab4c31 3036{
88a94e2b 3037 tree label_decl
6249559b 3038 = build_decl (input_location, LABEL_DECL, name, void_type_node);
a1ab4c31 3039
88a94e2b
EB
3040 DECL_MODE (label_decl) = VOIDmode;
3041
3042 /* Add this decl to the current binding level. */
3043 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
3044
3045 return label_decl;
3046}
3047\f
6249559b
EB
3048/* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3049 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3050 the list of its parameters (a list of PARM_DECL nodes chained through the
3051 DECL_CHAIN field).
a1ab4c31 3052
2ade427a
EB
3053 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3054
3055 CONST_FLAG, PUBLIC_FLAG, EXTERN_FLAG, VOLATILE_FLAG are used to set the
3056 appropriate flags on the FUNCTION_DECL.
c1a569ef
EB
3057
3058 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3059
3060 DEBUG_INFO_P is true if we need to write debug information for it.
3061
2ade427a
EB
3062 ATTR_LIST is the list of attributes to be attached to the subprogram.
3063
c1a569ef 3064 GNAT_NODE is used for the position of the decl. */
a1ab4c31
AC
3065
3066tree
6249559b 3067create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
2056c5ed
EB
3068 enum inline_status_t inline_status, bool const_flag,
3069 bool public_flag, bool extern_flag, bool volatile_flag,
3070 bool artificial_p, bool debug_info_p,
6249559b 3071 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 3072{
6249559b
EB
3073 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3074 tree result_decl
3075 = build_decl (input_location, RESULT_DECL, NULL_TREE, TREE_TYPE (type));
7d7fcb08 3076 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 3077
c1a569ef 3078 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
7d7fcb08 3079 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
3080
3081 switch (inline_status)
3082 {
3083 case is_suppressed:
3084 DECL_UNINLINABLE (subprog_decl) = 1;
3085 break;
3086
3087 case is_disabled:
3088 break;
3089
f087ea44
AC
3090 case is_required:
3091 if (Back_End_Inlining)
1eb58520
AC
3092 decl_attributes (&subprog_decl,
3093 tree_cons (get_identifier ("always_inline"),
3094 NULL_TREE, NULL_TREE),
3095 ATTR_FLAG_TYPE_IN_PLACE);
3096
f087ea44
AC
3097 /* ... fall through ... */
3098
0e24192c
EB
3099 case is_enabled:
3100 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
c1a569ef 3101 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
0e24192c
EB
3102 break;
3103
3104 default:
3105 gcc_unreachable ();
3106 }
7d7fcb08 3107
c1a569ef
EB
3108 if (!debug_info_p)
3109 DECL_IGNORED_P (subprog_decl) = 1;
3110
2056c5ed 3111 TREE_READONLY (subprog_decl) = TYPE_READONLY (type) | const_flag;
7d7fcb08 3112 TREE_PUBLIC (subprog_decl) = public_flag;
2056c5ed
EB
3113 TREE_SIDE_EFFECTS (subprog_decl)
3114 = TREE_THIS_VOLATILE (subprog_decl)
3115 = TYPE_VOLATILE (type) | volatile_flag;
a1ab4c31 3116
d47d0a8d
EB
3117 DECL_ARTIFICIAL (result_decl) = 1;
3118 DECL_IGNORED_P (result_decl) = 1;
6249559b 3119 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
d47d0a8d 3120 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 3121
2b50232a
EB
3122 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3123
3124 /* Add this decl to the current binding level. */
3125 gnat_pushdecl (subprog_decl, gnat_node);
3126
a1ab4c31
AC
3127 if (asm_name)
3128 {
2b50232a
EB
3129 /* Let the target mangle the name if this isn't a verbatim asm. */
3130 if (*IDENTIFIER_POINTER (asm_name) != '*')
3131 asm_name = targetm.mangle_decl_assembler_name (subprog_decl, asm_name);
3132
a1ab4c31
AC
3133 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3134
3135 /* The expand_main_function circuitry expects "main_identifier_node" to
3136 designate the DECL_NAME of the 'main' entry point, in turn expected
3137 to be declared as the "main" function literally by default. Ada
3138 program entry points are typically declared with a different name
3139 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 3140 system expectations. Force main_identifier_node in this case. */
a1ab4c31 3141 if (asm_name == main_identifier_node)
cfbb663c 3142 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
3143 }
3144
a1ab4c31
AC
3145 /* Output the assembler code and/or RTL for the declaration. */
3146 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3147
3148 return subprog_decl;
3149}
3150\f
3151/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3152 body. This routine needs to be invoked before processing the declarations
3153 appearing in the subprogram. */
3154
3155void
3156begin_subprog_body (tree subprog_decl)
3157{
3158 tree param_decl;
3159
a1ab4c31
AC
3160 announce_function (subprog_decl);
3161
0ae44446
JR
3162 /* This function is being defined. */
3163 TREE_STATIC (subprog_decl) = 1;
3164
e2d13a4a
EB
3165 /* The failure of this assertion will likely come from a wrong context for
3166 the subprogram body, e.g. another procedure for a procedure declared at
3167 library level. */
3168 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3169
58c8f770
EB
3170 current_function_decl = subprog_decl;
3171
a1ab4c31
AC
3172 /* Enter a new binding level and show that all the parameters belong to
3173 this function. */
3174 gnat_pushlevel ();
a09d56d8 3175
a1ab4c31 3176 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 3177 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
3178 DECL_CONTEXT (param_decl) = subprog_decl;
3179
3180 make_decl_rtl (subprog_decl);
a1ab4c31
AC
3181}
3182
71196d4e 3183/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
3184
3185void
a406865a 3186end_subprog_body (tree body)
a1ab4c31
AC
3187{
3188 tree fndecl = current_function_decl;
3189
bd9c7fb9 3190 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
3191 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3192 DECL_INITIAL (fndecl) = current_binding_level->block;
3193 gnat_poplevel ();
3194
a1ab4c31
AC
3195 /* Mark the RESULT_DECL as being in this subprogram. */
3196 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3197
a963da4d
EB
3198 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3199 if (TREE_CODE (body) == BIND_EXPR)
3200 {
3201 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3202 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3203 }
3204
a1ab4c31
AC
3205 DECL_SAVED_TREE (fndecl) = body;
3206
228ee426 3207 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
3208}
3209
3210/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 3211
71196d4e
EB
3212void
3213rest_of_subprog_body_compilation (tree subprog_decl)
3214{
a1ab4c31
AC
3215 /* We cannot track the location of errors past this point. */
3216 error_gnat_node = Empty;
3217
3218 /* If we're only annotating types, don't actually compile this function. */
3219 if (type_annotate_only)
3220 return;
3221
a406865a 3222 /* Dump functions before gimplification. */
71196d4e 3223 dump_function (TDI_original, subprog_decl);
a406865a 3224
228ee426 3225 if (!decl_function_context (subprog_decl))
3dafb85c 3226 cgraph_node::finalize_function (subprog_decl, false);
a1ab4c31
AC
3227 else
3228 /* Register this function with cgraph just far enough to get it
3229 added to our parent's nested function list. */
037e5573 3230 (void) cgraph_node::get_create (subprog_decl);
a1ab4c31
AC
3231}
3232
a1ab4c31
AC
3233tree
3234gnat_builtin_function (tree decl)
3235{
3236 gnat_pushdecl (decl, Empty);
3237 return decl;
3238}
3239
3240/* Return an integer type with the number of bits of precision given by
3241 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3242 it is a signed type. */
3243
3244tree
3245gnat_type_for_size (unsigned precision, int unsignedp)
3246{
3247 tree t;
3248 char type_name[20];
3249
3250 if (precision <= 2 * MAX_BITS_PER_WORD
3251 && signed_and_unsigned_types[precision][unsignedp])
3252 return signed_and_unsigned_types[precision][unsignedp];
3253
3254 if (unsignedp)
3255 t = make_unsigned_type (precision);
3256 else
3257 t = make_signed_type (precision);
3258
3259 if (precision <= 2 * MAX_BITS_PER_WORD)
3260 signed_and_unsigned_types[precision][unsignedp] = t;
3261
3262 if (!TYPE_NAME (t))
3263 {
c3d79c60 3264 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
3265 TYPE_NAME (t) = get_identifier (type_name);
3266 }
3267
3268 return t;
3269}
3270
3271/* Likewise for floating-point types. */
3272
3273static tree
ef4bddc2 3274float_type_for_precision (int precision, machine_mode mode)
a1ab4c31
AC
3275{
3276 tree t;
3277 char type_name[20];
3278
3279 if (float_types[(int) mode])
3280 return float_types[(int) mode];
3281
3282 float_types[(int) mode] = t = make_node (REAL_TYPE);
3283 TYPE_PRECISION (t) = precision;
3284 layout_type (t);
3285
3286 gcc_assert (TYPE_MODE (t) == mode);
3287 if (!TYPE_NAME (t))
3288 {
3289 sprintf (type_name, "FLOAT_%d", precision);
3290 TYPE_NAME (t) = get_identifier (type_name);
3291 }
3292
3293 return t;
3294}
3295
3296/* Return a data type that has machine mode MODE. UNSIGNEDP selects
3297 an unsigned type; otherwise a signed type is returned. */
3298
3299tree
ef4bddc2 3300gnat_type_for_mode (machine_mode mode, int unsignedp)
a1ab4c31
AC
3301{
3302 if (mode == BLKmode)
3303 return NULL_TREE;
2799d18c
EB
3304
3305 if (mode == VOIDmode)
a1ab4c31 3306 return void_type_node;
2799d18c
EB
3307
3308 if (COMPLEX_MODE_P (mode))
a1ab4c31 3309 return NULL_TREE;
2799d18c
EB
3310
3311 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 3312 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
3313
3314 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 3315 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
3316
3317 if (VECTOR_MODE_P (mode))
3318 {
ef4bddc2 3319 machine_mode inner_mode = GET_MODE_INNER (mode);
2799d18c
EB
3320 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3321 if (inner_type)
3322 return build_vector_type_for_mode (inner_type, mode);
3323 }
3324
3325 return NULL_TREE;
a1ab4c31
AC
3326}
3327
3328/* Return the unsigned version of a TYPE_NODE, a scalar type. */
3329
3330tree
3331gnat_unsigned_type (tree type_node)
3332{
3333 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3334
3335 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3336 {
3337 type = copy_node (type);
3338 TREE_TYPE (type) = type_node;
3339 }
3340 else if (TREE_TYPE (type_node)
3341 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3342 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3343 {
3344 type = copy_node (type);
3345 TREE_TYPE (type) = TREE_TYPE (type_node);
3346 }
3347
3348 return type;
3349}
3350
3351/* Return the signed version of a TYPE_NODE, a scalar type. */
3352
3353tree
3354gnat_signed_type (tree type_node)
3355{
3356 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3357
3358 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3359 {
3360 type = copy_node (type);
3361 TREE_TYPE (type) = type_node;
3362 }
3363 else if (TREE_TYPE (type_node)
3364 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3365 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3366 {
3367 type = copy_node (type);
3368 TREE_TYPE (type) = TREE_TYPE (type_node);
3369 }
3370
3371 return type;
3372}
3373
3374/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3375 transparently converted to each other. */
3376
3377int
3378gnat_types_compatible_p (tree t1, tree t2)
3379{
3380 enum tree_code code;
3381
3382 /* This is the default criterion. */
3383 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3384 return 1;
3385
3386 /* We only check structural equivalence here. */
3387 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3388 return 0;
3389
7948ae37
OH
3390 /* Vector types are also compatible if they have the same number of subparts
3391 and the same form of (scalar) element type. */
3392 if (code == VECTOR_TYPE
3393 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3394 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3395 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3396 return 1;
3397
cfa0bd19 3398 /* Array types are also compatible if they are constrained and have the same
ee45a32d 3399 domain(s), the same component type and the same scalar storage order. */
a1ab4c31 3400 if (code == ARRAY_TYPE
0adef32b
JJ
3401 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3402 || (TYPE_DOMAIN (t1)
b4680ca1 3403 && TYPE_DOMAIN (t2)
0adef32b
JJ
3404 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3405 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3406 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 3407 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
3408 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3409 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
ee45a32d
EB
3410 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3411 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
a1ab4c31
AC
3412 return 1;
3413
a1ab4c31
AC
3414 return 0;
3415}
523e82a7 3416
71196d4e
EB
3417/* Return true if EXPR is a useless type conversion. */
3418
3419bool
3420gnat_useless_type_conversion (tree expr)
3421{
3422 if (CONVERT_EXPR_P (expr)
3423 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3424 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3425 return gnat_types_compatible_p (TREE_TYPE (expr),
3426 TREE_TYPE (TREE_OPERAND (expr, 0)));
3427
3428 return false;
3429}
3430
523e82a7
EB
3431/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3432
3433bool
3434fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3435 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3436{
3437 return TYPE_CI_CO_LIST (t) == cico_list
3438 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3439 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3440 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3441}
a1ab4c31
AC
3442\f
3443/* EXP is an expression for the size of an object. If this size contains
3444 discriminant references, replace them with the maximum (if MAX_P) or
3445 minimum (if !MAX_P) possible value of the discriminant. */
3446
3447tree
3448max_size (tree exp, bool max_p)
3449{
3450 enum tree_code code = TREE_CODE (exp);
3451 tree type = TREE_TYPE (exp);
3452
3453 switch (TREE_CODE_CLASS (code))
3454 {
3455 case tcc_declaration:
3456 case tcc_constant:
3457 return exp;
3458
3459 case tcc_vl_exp:
3460 if (code == CALL_EXPR)
3461 {
f82a627c
EB
3462 tree t, *argarray;
3463 int n, i;
3464
3465 t = maybe_inline_call_in_expr (exp);
3466 if (t)
3467 return max_size (t, max_p);
a1ab4c31 3468
f82a627c
EB
3469 n = call_expr_nargs (exp);
3470 gcc_assert (n > 0);
2bb1fc26 3471 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3472 for (i = 0; i < n; i++)
3473 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3474 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3475 }
3476 break;
3477
3478 case tcc_reference:
3479 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3480 modify. Otherwise, we treat it like a variable. */
1eb58520
AC
3481 if (CONTAINS_PLACEHOLDER_P (exp))
3482 {
3483 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3484 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3485 return max_size (convert (get_base_type (val_type), val), true);
3486 }
a1ab4c31 3487
1eb58520 3488 return exp;
a1ab4c31
AC
3489
3490 case tcc_comparison:
3491 return max_p ? size_one_node : size_zero_node;
3492
3493 case tcc_unary:
ce3da0d0
EB
3494 if (code == NON_LVALUE_EXPR)
3495 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3496
ce3da0d0
EB
3497 return fold_build1 (code, type,
3498 max_size (TREE_OPERAND (exp, 0),
3499 code == NEGATE_EXPR ? !max_p : max_p));
3500
a1ab4c31 3501 case tcc_binary:
ce3da0d0
EB
3502 {
3503 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3504 tree rhs = max_size (TREE_OPERAND (exp, 1),
3505 code == MINUS_EXPR ? !max_p : max_p);
3506
3507 /* Special-case wanting the maximum value of a MIN_EXPR.
3508 In that case, if one side overflows, return the other. */
3509 if (max_p && code == MIN_EXPR)
3510 {
3511 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3512 return lhs;
3513
3514 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3515 return rhs;
3516 }
3517
3518 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3519 overflowing and the RHS a variable. */
3520 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3521 && TREE_CODE (lhs) == INTEGER_CST
3522 && TREE_OVERFLOW (lhs)
396e67d2 3523 && TREE_CODE (rhs) != INTEGER_CST)
ce3da0d0
EB
3524 return lhs;
3525
396e67d2
EB
3526 /* If we are going to subtract a "negative" value in an unsigned type,
3527 do the operation as an addition of the negated value, in order to
3528 avoid creating a spurious overflow below. */
3529 if (code == MINUS_EXPR
3530 && TYPE_UNSIGNED (type)
3531 && TREE_CODE (rhs) == INTEGER_CST
3532 && !TREE_OVERFLOW (rhs)
3533 && tree_int_cst_sign_bit (rhs) != 0)
3534 {
3535 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3536 code = PLUS_EXPR;
3537 }
3538
3539 /* We need to detect overflows so we call size_binop here. */
ce3da0d0
EB
3540 return size_binop (code, lhs, rhs);
3541 }
3542
a1ab4c31
AC
3543 case tcc_expression:
3544 switch (TREE_CODE_LENGTH (code))
3545 {
3546 case 1:
722356ce
EB
3547 if (code == SAVE_EXPR)
3548 return exp;
ce3da0d0
EB
3549
3550 return fold_build1 (code, type,
3551 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3552
3553 case 2:
3554 if (code == COMPOUND_EXPR)
3555 return max_size (TREE_OPERAND (exp, 1), max_p);
3556
ce3da0d0
EB
3557 return fold_build2 (code, type,
3558 max_size (TREE_OPERAND (exp, 0), max_p),
3559 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3560
3561 case 3:
722356ce 3562 if (code == COND_EXPR)
a1ab4c31
AC
3563 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3564 max_size (TREE_OPERAND (exp, 1), max_p),
3565 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3566
3567 default:
3568 break;
a1ab4c31
AC
3569 }
3570
3571 /* Other tree classes cannot happen. */
3572 default:
3573 break;
3574 }
3575
3576 gcc_unreachable ();
3577}
3578\f
3579/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3580 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3581 Return a constructor for the template. */
3582
3583tree
3584build_template (tree template_type, tree array_type, tree expr)
3585{
9771b263 3586 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3587 tree bound_list = NULL_TREE;
3588 tree field;
3589
3590 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3591 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3592 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3593 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3594
3595 if (TREE_CODE (array_type) == ARRAY_TYPE
3596 || (TREE_CODE (array_type) == INTEGER_TYPE
3597 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3598 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3599
3600 /* First make the list for a CONSTRUCTOR for the template. Go down the
3601 field list of the template instead of the type chain because this
3602 array might be an Ada array of arrays and we can't tell where the
3603 nested arrays stop being the underlying object. */
3604
3605 for (field = TYPE_FIELDS (template_type); field;
3606 (bound_list
3607 ? (bound_list = TREE_CHAIN (bound_list))
3608 : (array_type = TREE_TYPE (array_type))),
910ad8de 3609 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3610 {
3611 tree bounds, min, max;
3612
3613 /* If we have a bound list, get the bounds from there. Likewise
3614 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3615 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3616 This will give us a maximum range. */
3617 if (bound_list)
3618 bounds = TREE_VALUE (bound_list);
3619 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3620 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3621 else if (expr && TREE_CODE (expr) == PARM_DECL
3622 && DECL_BY_COMPONENT_PTR_P (expr))
3623 bounds = TREE_TYPE (field);
3624 else
3625 gcc_unreachable ();
3626
3627 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3628 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3629
3630 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3631 substitute it from OBJECT. */
3632 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3633 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3634
0e228dd9 3635 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3636 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3637 }
3638
0e228dd9 3639 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3640}
3641\f
e63b36bd
EB
3642/* Return true if TYPE is suitable for the element type of a vector. */
3643
3644static bool
3645type_for_vector_element_p (tree type)
3646{
ef4bddc2 3647 machine_mode mode;
e63b36bd
EB
3648
3649 if (!INTEGRAL_TYPE_P (type)
3650 && !SCALAR_FLOAT_TYPE_P (type)
3651 && !FIXED_POINT_TYPE_P (type))
3652 return false;
3653
3654 mode = TYPE_MODE (type);
3655 if (GET_MODE_CLASS (mode) != MODE_INT
3656 && !SCALAR_FLOAT_MODE_P (mode)
3657 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3658 return false;
3659
3660 return true;
3661}
3662
3663/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3664 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3665 attribute declaration and want to issue error messages on failure. */
3666
3667static tree
3668build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3669{
3670 unsigned HOST_WIDE_INT size_int, inner_size_int;
3671 int nunits;
3672
3673 /* Silently punt on variable sizes. We can't make vector types for them,
3674 need to ignore them on front-end generated subtypes of unconstrained
3675 base types, and this attribute is for binding implementors, not end
3676 users, so we should never get there from legitimate explicit uses. */
3677 if (!tree_fits_uhwi_p (size))
3678 return NULL_TREE;
3679 size_int = tree_to_uhwi (size);
3680
3681 if (!type_for_vector_element_p (inner_type))
3682 {
3683 if (attribute)
3684 error ("invalid element type for attribute %qs",
3685 IDENTIFIER_POINTER (attribute));
3686 return NULL_TREE;
3687 }
3688 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3689
3690 if (size_int % inner_size_int)
3691 {
3692 if (attribute)
3693 error ("vector size not an integral multiple of component size");
3694 return NULL_TREE;
3695 }
3696
3697 if (size_int == 0)
3698 {
3699 if (attribute)
3700 error ("zero vector size");
3701 return NULL_TREE;
3702 }
3703
3704 nunits = size_int / inner_size_int;
3705 if (nunits & (nunits - 1))
3706 {
3707 if (attribute)
3708 error ("number of components of vector not a power of two");
3709 return NULL_TREE;
3710 }
3711
3712 return build_vector_type (inner_type, nunits);
3713}
3714
3715/* Return a vector type whose representative array type is ARRAY_TYPE, or
3716 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3717 processing the attribute and want to issue error messages on failure. */
3718
3719static tree
3720build_vector_type_for_array (tree array_type, tree attribute)
3721{
3722 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3723 TYPE_SIZE_UNIT (array_type),
3724 attribute);
3725 if (!vector_type)
3726 return NULL_TREE;
3727
3728 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3729 return vector_type;
3730}
3731\f
928dfa4b
EB
3732/* Build a type to be used to represent an aliased object whose nominal type
3733 is an unconstrained array. This consists of a RECORD_TYPE containing a
3734 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3735 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3736 an arbitrary unconstrained object. Use NAME as the name of the record.
3737 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
3738
3739tree
928dfa4b
EB
3740build_unc_object_type (tree template_type, tree object_type, tree name,
3741 bool debug_info_p)
a1ab4c31 3742{
24d4b3d5 3743 tree decl;
a1ab4c31 3744 tree type = make_node (RECORD_TYPE);
da01bfee
EB
3745 tree template_field
3746 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3747 NULL_TREE, NULL_TREE, 0, 1);
3748 tree array_field
3749 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3750 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
3751
3752 TYPE_NAME (type) = name;
3753 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 3754 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
3755 finish_record_type (type, template_field, 0, true);
3756
3757 /* Declare it now since it will never be declared otherwise. This is
3758 necessary to ensure that its subtrees are properly marked. */
24d4b3d5
AC
3759 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3760
3761 /* template_type will not be used elsewhere than here, so to keep the debug
3762 info clean and in order to avoid scoping issues, make decl its
3763 context. */
3764 gnat_set_type_context (template_type, decl);
a1ab4c31
AC
3765
3766 return type;
3767}
3768
3769/* Same, taking a thin or fat pointer type instead of a template type. */
3770
3771tree
3772build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 3773 tree name, bool debug_info_p)
a1ab4c31
AC
3774{
3775 tree template_type;
3776
315cff15 3777 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
3778
3779 template_type
315cff15 3780 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 3781 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 3782 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
3783
3784 return
3785 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 3786}
a1ab4c31 3787\f
229077b0
EB
3788/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3789 In the normal case this is just two adjustments, but we have more to
3790 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3791
3792void
3793update_pointer_to (tree old_type, tree new_type)
3794{
3795 tree ptr = TYPE_POINTER_TO (old_type);
3796 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 3797 tree t;
a1ab4c31
AC
3798
3799 /* If this is the main variant, process all the other variants first. */
3800 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
3801 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3802 update_pointer_to (t, new_type);
a1ab4c31 3803
229077b0 3804 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3805 if (!ptr && !ref)
3806 return;
3807
3808 /* Merge the old type qualifiers in the new type.
3809
3810 Each old variant has qualifiers for specific reasons, and the new
229077b0 3811 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3812 information grabbed at some point, and merging the two simply unifies
3813 these inputs into the final type description.
3814
3815 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3816 type designating it; after the designated type's freeze, we get here with
3817 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3818 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3819 designated type, because that's what it really is.
3820
229077b0
EB
3821 We might also get here for a non-dummy OLD_TYPE variant with different
3822 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3823 to private record type elaboration (see the comments around the call to
229077b0
EB
3824 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3825 the qualifiers in those cases too, to avoid accidentally discarding the
3826 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3827 new_type
3828 = build_qualified_type (new_type,
3829 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3830
3831 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3832 if (old_type == new_type)
3833 return;
3834
3835 /* Otherwise, first handle the simple case. */
3836 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3837 {
aeecf17c
EB
3838 tree new_ptr, new_ref;
3839
3840 /* If pointer or reference already points to new type, nothing to do.
3841 This can happen as update_pointer_to can be invoked multiple times
3842 on the same couple of types because of the type variants. */
3843 if ((ptr && TREE_TYPE (ptr) == new_type)
3844 || (ref && TREE_TYPE (ref) == new_type))
3845 return;
3846
3847 /* Chain PTR and its variants at the end. */
3848 new_ptr = TYPE_POINTER_TO (new_type);
3849 if (new_ptr)
3850 {
3851 while (TYPE_NEXT_PTR_TO (new_ptr))
3852 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3853 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3854 }
3855 else
3856 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 3857
aeecf17c 3858 /* Now adjust them. */
a1ab4c31 3859 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 3860 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
3861 {
3862 TREE_TYPE (t) = new_type;
3863 if (TYPE_NULL_BOUNDS (t))
3864 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3865 }
de9528f0 3866
aeecf17c
EB
3867 /* Chain REF and its variants at the end. */
3868 new_ref = TYPE_REFERENCE_TO (new_type);
3869 if (new_ref)
3870 {
3871 while (TYPE_NEXT_REF_TO (new_ref))
3872 new_ref = TYPE_NEXT_REF_TO (new_ref);
3873 TYPE_NEXT_REF_TO (new_ref) = ref;
3874 }
3875 else
3876 TYPE_REFERENCE_TO (new_type) = ref;
3877
3878 /* Now adjust them. */
a1ab4c31 3879 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
3880 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3881 TREE_TYPE (t) = new_type;
de9528f0
EB
3882
3883 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 3884 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3885 }
3886
aeecf17c
EB
3887 /* Now deal with the unconstrained array case. In this case the pointer
3888 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
3889 Turn them into pointers to the correct types using update_pointer_to.
3890 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
3891 else
3892 {
e3edbd56 3893 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
3894
3895 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3896
e3edbd56 3897 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
3898 since update_pointer_to can be invoked multiple times on the same
3899 couple of types because of the type variants. */
3900 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3901 return;
3902
a1ab4c31 3903 update_pointer_to
e3edbd56
EB
3904 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3905 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 3906
a1ab4c31 3907 update_pointer_to
e3edbd56
EB
3908 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3909 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 3910
e3edbd56
EB
3911 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3912 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 3913
e3edbd56 3914 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3915 }
3916}
3917\f
8df2e902
EB
3918/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3919 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3920
3921static tree
3922convert_to_fat_pointer (tree type, tree expr)
3923{
910ad8de 3924 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 3925 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3926 tree etype = TREE_TYPE (expr);
88293f03 3927 tree template_addr;
9771b263
DN
3928 vec<constructor_elt, va_gc> *v;
3929 vec_alloc (v, 2);
a1ab4c31 3930
50179d58
EB
3931 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3932 array (compare_fat_pointers ensures that this is the full discriminant)
3933 and a valid pointer to the bounds. This latter property is necessary
3934 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 3935 if (integer_zerop (expr))
0e228dd9 3936 {
50179d58
EB
3937 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3938 tree null_bounds, t;
3939
3940 if (TYPE_NULL_BOUNDS (ptr_template_type))
3941 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3942 else
3943 {
3944 /* The template type can still be dummy at this point so we build an
3945 empty constructor. The middle-end will fill it in with zeros. */
90b4c164 3946 t = build_constructor (template_type, NULL);
50179d58
EB
3947 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3948 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3949 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3950 }
3951
0e228dd9 3952 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
3953 fold_convert (p_array_type, null_pointer_node));
3954 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3955 t = build_constructor (type, v);
3956 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3957 TREE_CONSTANT (t) = 0;
3958 TREE_STATIC (t) = 1;
3959
3960 return t;
0e228dd9 3961 }
a1ab4c31 3962
0d7de0e1
EB
3963 /* If EXPR is a thin pointer, make template and data from the record. */
3964 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 3965 {
0d7de0e1 3966 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 3967
7d7a1fe8 3968 expr = gnat_protect_expr (expr);
88293f03
EB
3969
3970 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3971 the thin pointer value has been shifted so we shift it back to get
3972 the template address. */
3973 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
2b45154d 3974 {
88293f03
EB
3975 template_addr
3976 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3977 fold_build1 (NEGATE_EXPR, sizetype,
3978 byte_position
3979 (DECL_CHAIN (field))));
3980 template_addr
3981 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3982 template_addr);
2b45154d 3983 }
a1ab4c31 3984
88293f03
EB
3985 /* Otherwise we explicitly take the address of the fields. */
3986 else
3987 {
3988 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3989 template_addr
3990 = build_unary_op (ADDR_EXPR, NULL_TREE,
64235766 3991 build_component_ref (expr, field, false));
88293f03 3992 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
64235766 3993 build_component_ref (expr, DECL_CHAIN (field),
88293f03
EB
3994 false));
3995 }
a1ab4c31 3996 }
8df2e902
EB
3997
3998 /* Otherwise, build the constructor for the template. */
a1ab4c31 3999 else
88293f03
EB
4000 template_addr
4001 = build_unary_op (ADDR_EXPR, NULL_TREE,
4002 build_template (template_type, TREE_TYPE (etype),
4003 expr));
a1ab4c31 4004
8df2e902 4005 /* The final result is a constructor for the fat pointer.
a1ab4c31 4006
8df2e902
EB
4007 If EXPR is an argument of a foreign convention subprogram, the type it
4008 points to is directly the component type. In this case, the expression
a1ab4c31 4009 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4010 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4011 required, for instance because it ensures that possible later folding of
8df2e902 4012 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4013 same type as the initial reference.
4014
8df2e902
EB
4015 Note that the call to "build_template" above is still fine because it
4016 will only refer to the provided TEMPLATE_TYPE in this case. */
88293f03
EB
4017 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4018 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
0e228dd9 4019 return gnat_build_constructor (type, v);
a1ab4c31
AC
4020}
4021\f
a1ab4c31
AC
4022/* Create an expression whose value is that of EXPR,
4023 converted to type TYPE. The TREE_TYPE of the value
4024 is always TYPE. This function implements all reasonable
4025 conversions; callers should filter out those that are
4026 not permitted by the language being compiled. */
4027
4028tree
4029convert (tree type, tree expr)
4030{
a1ab4c31
AC
4031 tree etype = TREE_TYPE (expr);
4032 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4033 enum tree_code code = TREE_CODE (type);
a1ab4c31 4034
c34f3839
EB
4035 /* If the expression is already of the right type, we are done. */
4036 if (etype == type)
a1ab4c31
AC
4037 return expr;
4038
4039 /* If both input and output have padding and are of variable size, do this
4040 as an unchecked conversion. Likewise if one is a mere variant of the
4041 other, so we avoid a pointless unpad/repad sequence. */
4042 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4043 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4044 && (!TREE_CONSTANT (TYPE_SIZE (type))
4045 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4046 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4047 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4048 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4049 ;
4050
f88facfe
EB
4051 /* If the output type has padding, convert to the inner type and make a
4052 constructor to build the record, unless a variable size is involved. */
315cff15 4053 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4054 {
9771b263 4055 vec<constructor_elt, va_gc> *v;
0e228dd9 4056
a1ab4c31
AC
4057 /* If we previously converted from another type and our type is
4058 of variable size, remove the conversion to avoid the need for
f88facfe 4059 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4060 original and packable version. */
4061 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4062 && (!TREE_CONSTANT (TYPE_SIZE (type))
4063 || (ecode == RECORD_TYPE
4064 && TYPE_NAME (etype)
4065 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4066 expr = TREE_OPERAND (expr, 0);
4067
4068 /* If we are just removing the padding from expr, convert the original
4069 object if we have variable size in order to avoid the need for some
f88facfe 4070 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4071 of the other, so we avoid a pointless unpad/repad sequence. */
4072 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4073 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4074 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4075 || TYPE_MAIN_VARIANT (type)
4076 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4077 || (ecode == RECORD_TYPE
4078 && TYPE_NAME (etype)
4079 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4080 return convert (type, TREE_OPERAND (expr, 0));
4081
431cfac1
EB
4082 /* If the inner type is of self-referential size and the expression type
4083 is a record, do this as an unchecked conversion. But first pad the
4084 expression if possible to have the same size on both sides. */
c34f3839 4085 if (ecode == RECORD_TYPE
f88facfe 4086 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4087 {
980a0501 4088 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4089 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4090 false, false, false, true),
4091 expr);
431cfac1
EB
4092 return unchecked_convert (type, expr, false);
4093 }
a1ab4c31 4094
f88facfe
EB
4095 /* If we are converting between array types with variable size, do the
4096 final conversion as an unchecked conversion, again to avoid the need
4097 for some variable-sized temporaries. If valid, this conversion is
4098 very likely purely technical and without real effects. */
c34f3839 4099 if (ecode == ARRAY_TYPE
f88facfe
EB
4100 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4101 && !TREE_CONSTANT (TYPE_SIZE (etype))
4102 && !TREE_CONSTANT (TYPE_SIZE (type)))
4103 return unchecked_convert (type,
4104 convert (TREE_TYPE (TYPE_FIELDS (type)),
4105 expr),
4106 false);
4107
9771b263 4108 vec_alloc (v, 1);
0e228dd9
NF
4109 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4110 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4111 return gnat_build_constructor (type, v);
a1ab4c31
AC
4112 }
4113
4114 /* If the input type has padding, remove it and convert to the output type.
4115 The conditions ordering is arranged to ensure that the output type is not
4116 a padding type here, as it is not clear whether the conversion would
4117 always be correct if this was to happen. */
315cff15 4118 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4119 {
4120 tree unpadded;
4121
4122 /* If we have just converted to this padded type, just get the
4123 inner expression. */
2117b9bb
EB
4124 if (TREE_CODE (expr) == CONSTRUCTOR)
4125 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
a1ab4c31
AC
4126
4127 /* Otherwise, build an explicit component reference. */
4128 else
64235766 4129 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4130
4131 return convert (type, unpadded);
4132 }
4133
4134 /* If the input is a biased type, adjust first. */
4135 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4136 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
1eb58520 4137 fold_convert (TREE_TYPE (etype), expr),
a1ab4c31 4138 fold_convert (TREE_TYPE (etype),
1eb58520 4139 TYPE_MIN_VALUE (etype))));
a1ab4c31
AC
4140
4141 /* If the input is a justified modular type, we need to extract the actual
4142 object before converting it to any other type with the exceptions of an
4143 unconstrained array or of a mere type variant. It is useful to avoid the
4144 extraction and conversion in the type variant case because it could end
4145 up replacing a VAR_DECL expr by a constructor and we might be about the
4146 take the address of the result. */
4147 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4148 && code != UNCONSTRAINED_ARRAY_TYPE
4149 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
64235766
EB
4150 return
4151 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
a1ab4c31
AC
4152
4153 /* If converting to a type that contains a template, convert to the data
4154 type and then build the template. */
4155 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4156 {
910ad8de 4157 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4158 vec<constructor_elt, va_gc> *v;
4159 vec_alloc (v, 2);
a1ab4c31
AC
4160
4161 /* If the source already has a template, get a reference to the
4162 associated array only, as we are going to rebuild a template
4163 for the target type anyway. */
4164 expr = maybe_unconstrained_array (expr);
4165
0e228dd9
NF
4166 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4167 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4168 obj_type, NULL_TREE));
73a1a803
EB
4169 if (expr)
4170 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4171 convert (obj_type, expr));
0e228dd9 4172 return gnat_build_constructor (type, v);
a1ab4c31
AC
4173 }
4174
a1c7d797 4175 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4176 switch (TREE_CODE (expr))
4177 {
4178 case ERROR_MARK:
4179 return expr;
4180
4181 case NULL_EXPR:
4182 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4183 conversion in gnat_expand_expr. NULL_EXPR does not represent
4184 and actual value, so no conversion is needed. */
4185 expr = copy_node (expr);
4186 TREE_TYPE (expr) = type;
4187 return expr;
4188
4189 case STRING_CST:
4190 /* If we are converting a STRING_CST to another constrained array type,
4191 just make a new one in the proper type. */
4192 if (code == ecode && AGGREGATE_TYPE_P (etype)
4193 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4194 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4195 {
4196 expr = copy_node (expr);
4197 TREE_TYPE (expr) = type;
4198 return expr;
4199 }
4200 break;
4201
7948ae37 4202 case VECTOR_CST:
44e9e3ec 4203 /* If we are converting a VECTOR_CST to a mere type variant, just make
7948ae37
OH
4204 a new one in the proper type. */
4205 if (code == ecode && gnat_types_compatible_p (type, etype))
4206 {
4207 expr = copy_node (expr);
4208 TREE_TYPE (expr) = type;
4209 return expr;
4210 }
4211
a1ab4c31 4212 case CONSTRUCTOR:
44e9e3ec
EB
4213 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4214 another padding type around the same type, just make a new one in
4215 the proper type. */
4216 if (code == ecode
4217 && (gnat_types_compatible_p (type, etype)
4218 || (code == RECORD_TYPE
4219 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4220 && TREE_TYPE (TYPE_FIELDS (type))
4221 == TREE_TYPE (TYPE_FIELDS (etype)))))
a1ab4c31
AC
4222 {
4223 expr = copy_node (expr);
4224 TREE_TYPE (expr) = type;
9771b263 4225 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4226 return expr;
4227 }
4228
cb3d597d
EB
4229 /* Likewise for a conversion between original and packable version, or
4230 conversion between types of the same size and with the same list of
4231 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4232 if (code == ecode
4233 && code == RECORD_TYPE
cb3d597d
EB
4234 && (TYPE_NAME (type) == TYPE_NAME (etype)
4235 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4236
a1ab4c31 4237 {
9771b263
DN
4238 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4239 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4240 vec<constructor_elt, va_gc> *v;
4241 vec_alloc (v, len);
a1ab4c31
AC
4242 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4243 unsigned HOST_WIDE_INT idx;
4244 tree index, value;
4245
db868e1e
OH
4246 /* Whether we need to clear TREE_CONSTANT et al. on the output
4247 constructor when we convert in place. */
4248 bool clear_constant = false;
4249
a1ab4c31
AC
4250 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4251 {
44e9e3ec
EB
4252 /* Skip the missing fields in the CONSTRUCTOR. */
4253 while (efield && field && !SAME_FIELD_P (efield, index))
4254 {
4255 efield = DECL_CHAIN (efield);
4256 field = DECL_CHAIN (field);
4257 }
cb3d597d 4258 /* The field must be the same. */
44e9e3ec 4259 if (!(efield && field && SAME_FIELD_P (efield, field)))
a1ab4c31 4260 break;
44e9e3ec
EB
4261 constructor_elt elt
4262 = {field, convert (TREE_TYPE (field), value)};
9771b263 4263 v->quick_push (elt);
db868e1e
OH
4264
4265 /* If packing has made this field a bitfield and the input
4266 value couldn't be emitted statically any more, we need to
4267 clear TREE_CONSTANT on our output. */
ced57283
EB
4268 if (!clear_constant
4269 && TREE_CONSTANT (expr)
db868e1e
OH
4270 && !CONSTRUCTOR_BITFIELD_P (efield)
4271 && CONSTRUCTOR_BITFIELD_P (field)
4272 && !initializer_constant_valid_for_bitfield_p (value))
4273 clear_constant = true;
4274
910ad8de
NF
4275 efield = DECL_CHAIN (efield);
4276 field = DECL_CHAIN (field);
a1ab4c31
AC
4277 }
4278
db868e1e
OH
4279 /* If we have been able to match and convert all the input fields
4280 to their output type, convert in place now. We'll fallback to a
4281 view conversion downstream otherwise. */
a1ab4c31
AC
4282 if (idx == len)
4283 {
4284 expr = copy_node (expr);
4285 TREE_TYPE (expr) = type;
4286 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4287 if (clear_constant)
ced57283 4288 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4289 return expr;
4290 }
4291 }
7948ae37
OH
4292
4293 /* Likewise for a conversion between array type and vector type with a
4294 compatible representative array. */
4295 else if (code == VECTOR_TYPE
4296 && ecode == ARRAY_TYPE
4297 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4298 etype))
4299 {
9771b263
DN
4300 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4301 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4302 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4303 unsigned HOST_WIDE_INT ix;
4304 tree value;
4305
4306 /* Build a VECTOR_CST from a *constant* array constructor. */
4307 if (TREE_CONSTANT (expr))
4308 {
4309 bool constant_p = true;
4310
4311 /* Iterate through elements and check if all constructor
4312 elements are *_CSTs. */
4313 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4314 if (!CONSTANT_CLASS_P (value))
4315 {
4316 constant_p = false;
4317 break;
4318 }
4319
4320 if (constant_p)
4321 return build_vector_from_ctor (type,
4322 CONSTRUCTOR_ELTS (expr));
4323 }
4324
4325 /* Otherwise, build a regular vector constructor. */
9771b263 4326 vec_alloc (v, len);
7948ae37
OH
4327 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4328 {
f32682ca 4329 constructor_elt elt = {NULL_TREE, value};
9771b263 4330 v->quick_push (elt);
7948ae37
OH
4331 }
4332 expr = copy_node (expr);
4333 TREE_TYPE (expr) = type;
4334 CONSTRUCTOR_ELTS (expr) = v;
4335 return expr;
4336 }
a1ab4c31
AC
4337 break;
4338
4339 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4340 /* First retrieve the underlying array. */
4341 expr = maybe_unconstrained_array (expr);
4342 etype = TREE_TYPE (expr);
4343 ecode = TREE_CODE (etype);
4344 break;
a1ab4c31
AC
4345
4346 case VIEW_CONVERT_EXPR:
4347 {
4348 /* GCC 4.x is very sensitive to type consistency overall, and view
4349 conversions thus are very frequent. Even though just "convert"ing
4350 the inner operand to the output type is fine in most cases, it
4351 might expose unexpected input/output type mismatches in special
4352 circumstances so we avoid such recursive calls when we can. */
4353 tree op0 = TREE_OPERAND (expr, 0);
4354
4355 /* If we are converting back to the original type, we can just
4356 lift the input conversion. This is a common occurrence with
4357 switches back-and-forth amongst type variants. */
4358 if (type == TREE_TYPE (op0))
4359 return op0;
4360
7948ae37
OH
4361 /* Otherwise, if we're converting between two aggregate or vector
4362 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4363 target type in place or to just convert the inner expression. */
4364 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4365 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4366 {
4367 /* If we are converting between mere variants, we can just
4368 substitute the VIEW_CONVERT_EXPR in place. */
4369 if (gnat_types_compatible_p (type, etype))
4370 return build1 (VIEW_CONVERT_EXPR, type, op0);
4371
4372 /* Otherwise, we may just bypass the input view conversion unless
4373 one of the types is a fat pointer, which is handled by
4374 specialized code below which relies on exact type matching. */
315cff15
EB
4375 else if (!TYPE_IS_FAT_POINTER_P (type)
4376 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4377 return convert (type, op0);
4378 }
ad1d36ba
EB
4379
4380 break;
a1ab4c31 4381 }
a1ab4c31 4382
a1ab4c31
AC
4383 default:
4384 break;
4385 }
4386
4387 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4388 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4389 return convert_to_fat_pointer (type, expr);
4390
7948ae37
OH
4391 /* If we are converting between two aggregate or vector types that are mere
4392 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4393 to a vector type from its representative array type. */
4394 else if ((code == ecode
4395 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4396 && gnat_types_compatible_p (type, etype))
4397 || (code == VECTOR_TYPE
4398 && ecode == ARRAY_TYPE
4399 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4400 etype)))
a1ab4c31
AC
4401 return build1 (VIEW_CONVERT_EXPR, type, expr);
4402
76af763d
EB
4403 /* If we are converting between tagged types, try to upcast properly. */
4404 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4405 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4406 {
4407 tree child_etype = etype;
4408 do {
4409 tree field = TYPE_FIELDS (child_etype);
4410 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
64235766 4411 return build_component_ref (expr, field, false);
76af763d
EB
4412 child_etype = TREE_TYPE (field);
4413 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4414 }
4415
bb1f7929
EB
4416 /* If we are converting from a smaller form of record type back to it, just
4417 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4418 size on both sides. */
4419 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4420 && smaller_form_type_p (etype, type))
4421 {
4422 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4423 false, false, false, true),
4424 expr);
4425 return build1 (VIEW_CONVERT_EXPR, type, expr);
4426 }
4427
a1ab4c31 4428 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4429 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4430 return fold_convert (type, expr);
4431
4432 switch (code)
4433 {
4434 case VOID_TYPE:
4435 return fold_build1 (CONVERT_EXPR, type, expr);
4436
a1ab4c31
AC
4437 case INTEGER_TYPE:
4438 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4439 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4440 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4441 return unchecked_convert (type, expr, false);
4442 else if (TYPE_BIASED_REPRESENTATION_P (type))
4443 return fold_convert (type,
4444 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4445 convert (TREE_TYPE (type), expr),
1eb58520
AC
4446 convert (TREE_TYPE (type),
4447 TYPE_MIN_VALUE (type))));
a1ab4c31
AC
4448
4449 /* ... fall through ... */
4450
4451 case ENUMERAL_TYPE:
01ddebf2 4452 case BOOLEAN_TYPE:
a1ab4c31
AC
4453 /* If we are converting an additive expression to an integer type
4454 with lower precision, be wary of the optimization that can be
4455 applied by convert_to_integer. There are 2 problematic cases:
4456 - if the first operand was originally of a biased type,
4457 because we could be recursively called to convert it
4458 to an intermediate type and thus rematerialize the
4459 additive operator endlessly,
4460 - if the expression contains a placeholder, because an
4461 intermediate conversion that changes the sign could
4462 be inserted and thus introduce an artificial overflow
4463 at compile time when the placeholder is substituted. */
4464 if (code == INTEGER_TYPE
4465 && ecode == INTEGER_TYPE
4466 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4467 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4468 {
4469 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4470
4471 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4472 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4473 || CONTAINS_PLACEHOLDER_P (expr))
4474 return build1 (NOP_EXPR, type, expr);
4475 }
4476
4477 return fold (convert_to_integer (type, expr));
4478
4479 case POINTER_TYPE:
4480 case REFERENCE_TYPE:
0d7de0e1 4481 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4482 for differing offsets from the base pointer, depending on whether
4483 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4484 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4485 {
2b45154d
EB
4486 tree etype_pos
4487 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4488 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4489 : size_zero_node;
4490 tree type_pos
4491 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4492 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4493 : size_zero_node;
4494 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4495
a1ab4c31 4496 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4497 if (integer_zerop (byte_diff))
4498 return expr;
4499
4500 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4501 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4502 }
4503
0d7de0e1
EB
4504 /* If converting fat pointer to normal or thin pointer, get the pointer
4505 to the array and then convert it. */
4506 if (TYPE_IS_FAT_POINTER_P (etype))
64235766 4507 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4508
4509 return fold (convert_to_pointer (type, expr));
4510
4511 case REAL_TYPE:
4512 return fold (convert_to_real (type, expr));
4513
4514 case RECORD_TYPE:
4515 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4516 {
9771b263
DN
4517 vec<constructor_elt, va_gc> *v;
4518 vec_alloc (v, 1);
0e228dd9
NF
4519
4520 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4521 convert (TREE_TYPE (TYPE_FIELDS (type)),
4522 expr));
4523 return gnat_build_constructor (type, v);
4524 }
a1ab4c31
AC
4525
4526 /* ... fall through ... */
4527
4528 case ARRAY_TYPE:
4529 /* In these cases, assume the front-end has validated the conversion.
4530 If the conversion is valid, it will be a bit-wise conversion, so
4531 it can be viewed as an unchecked conversion. */
4532 return unchecked_convert (type, expr, false);
4533
4534 case UNION_TYPE:
4535 /* This is a either a conversion between a tagged type and some
4536 subtype, which we have to mark as a UNION_TYPE because of
4537 overlapping fields or a conversion of an Unchecked_Union. */
4538 return unchecked_convert (type, expr, false);
4539
4540 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4541 /* If the input is a VECTOR_TYPE, convert to the representative
4542 array type first. */
4543 if (ecode == VECTOR_TYPE)
4544 {
4545 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4546 etype = TREE_TYPE (expr);
4547 ecode = TREE_CODE (etype);
4548 }
4549
a1ab4c31
AC
4550 /* If EXPR is a constrained array, take its address, convert it to a
4551 fat pointer, and then dereference it. Likewise if EXPR is a
4552 record containing both a template and a constrained array.
4553 Note that a record representing a justified modular type
4554 always represents a packed constrained array. */
4555 if (ecode == ARRAY_TYPE
4556 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4557 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4558 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4559 return
4560 build_unary_op
4561 (INDIRECT_REF, NULL_TREE,
4562 convert_to_fat_pointer (TREE_TYPE (type),
4563 build_unary_op (ADDR_EXPR,
4564 NULL_TREE, expr)));
4565
4566 /* Do something very similar for converting one unconstrained
4567 array to another. */
4568 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4569 return
4570 build_unary_op (INDIRECT_REF, NULL_TREE,
4571 convert (TREE_TYPE (type),
4572 build_unary_op (ADDR_EXPR,
4573 NULL_TREE, expr)));
4574 else
4575 gcc_unreachable ();
4576
4577 case COMPLEX_TYPE:
4578 return fold (convert_to_complex (type, expr));
4579
4580 default:
4581 gcc_unreachable ();
4582 }
4583}
15bf7d19
EB
4584
4585/* Create an expression whose value is that of EXPR converted to the common
4586 index type, which is sizetype. EXPR is supposed to be in the base type
4587 of the GNAT index type. Calling it is equivalent to doing
4588
4589 convert (sizetype, expr)
4590
4591 but we try to distribute the type conversion with the knowledge that EXPR
4592 cannot overflow in its type. This is a best-effort approach and we fall
4593 back to the above expression as soon as difficulties are encountered.
4594
4595 This is necessary to overcome issues that arise when the GNAT base index
4596 type and the GCC common index type (sizetype) don't have the same size,
4597 which is quite frequent on 64-bit architectures. In this case, and if
4598 the GNAT base index type is signed but the iteration type of the loop has
4599 been forced to unsigned, the loop scalar evolution engine cannot compute
4600 a simple evolution for the general induction variables associated with the
4601 array indices, because it will preserve the wrap-around semantics in the
4602 unsigned type of their "inner" part. As a result, many loop optimizations
4603 are blocked.
4604
4605 The solution is to use a special (basic) induction variable that is at
4606 least as large as sizetype, and to express the aforementioned general
4607 induction variables in terms of this induction variable, eliminating
4608 the problematic intermediate truncation to the GNAT base index type.
4609 This is possible as long as the original expression doesn't overflow
4610 and if the middle-end hasn't introduced artificial overflows in the
4611 course of the various simplification it can make to the expression. */
4612
4613tree
4614convert_to_index_type (tree expr)
4615{
4616 enum tree_code code = TREE_CODE (expr);
4617 tree type = TREE_TYPE (expr);
4618
4619 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4620 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4621 if (TYPE_UNSIGNED (type) || !optimize)
4622 return convert (sizetype, expr);
4623
4624 switch (code)
4625 {
4626 case VAR_DECL:
4627 /* The main effect of the function: replace a loop parameter with its
4628 associated special induction variable. */
4629 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4630 expr = DECL_INDUCTION_VAR (expr);
4631 break;
4632
4633 CASE_CONVERT:
4634 {
4635 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4636 /* Bail out as soon as we suspect some sort of type frobbing. */
4637 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4638 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4639 break;
4640 }
4641
4642 /* ... fall through ... */
4643
4644 case NON_LVALUE_EXPR:
4645 return fold_build1 (code, sizetype,
4646 convert_to_index_type (TREE_OPERAND (expr, 0)));
4647
4648 case PLUS_EXPR:
4649 case MINUS_EXPR:
4650 case MULT_EXPR:
4651 return fold_build2 (code, sizetype,
4652 convert_to_index_type (TREE_OPERAND (expr, 0)),
4653 convert_to_index_type (TREE_OPERAND (expr, 1)));
4654
4655 case COMPOUND_EXPR:
4656 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4657 convert_to_index_type (TREE_OPERAND (expr, 1)));
4658
4659 case COND_EXPR:
4660 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4661 convert_to_index_type (TREE_OPERAND (expr, 1)),
4662 convert_to_index_type (TREE_OPERAND (expr, 2)));
4663
4664 default:
4665 break;
4666 }
4667
4668 return convert (sizetype, expr);
4669}
a1ab4c31
AC
4670\f
4671/* Remove all conversions that are done in EXP. This includes converting
4672 from a padded type or to a justified modular type. If TRUE_ADDRESS
4673 is true, always return the address of the containing object even if
4674 the address is not bit-aligned. */
4675
4676tree
4677remove_conversions (tree exp, bool true_address)
4678{
4679 switch (TREE_CODE (exp))
4680 {
4681 case CONSTRUCTOR:
4682 if (true_address
4683 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4684 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4685 return
2117b9bb 4686 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
a1ab4c31
AC
4687 break;
4688
4689 case COMPONENT_REF:
315cff15 4690 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
4691 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4692 break;
4693
a1ab4c31 4694 CASE_CONVERT:
722356ce
EB
4695 case VIEW_CONVERT_EXPR:
4696 case NON_LVALUE_EXPR:
a1ab4c31
AC
4697 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4698
4699 default:
4700 break;
4701 }
4702
4703 return exp;
4704}
4705\f
4706/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 4707 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
4708 likewise return an expression pointing to the underlying array. */
4709
4710tree
4711maybe_unconstrained_array (tree exp)
4712{
4713 enum tree_code code = TREE_CODE (exp);
1aa291f7 4714 tree type = TREE_TYPE (exp);
a1ab4c31 4715
1aa291f7 4716 switch (TREE_CODE (type))
a1ab4c31
AC
4717 {
4718 case UNCONSTRAINED_ARRAY_TYPE:
4719 if (code == UNCONSTRAINED_ARRAY_REF)
4720 {
7e169899 4721 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
4722 const bool no_trap = TREE_THIS_NOTRAP (exp);
4723
7e169899 4724 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
4725 type = TREE_TYPE (exp);
4726
7e169899
EB
4727 if (TREE_CODE (exp) == COND_EXPR)
4728 {
4729 tree op1
4730 = build_unary_op (INDIRECT_REF, NULL_TREE,
4731 build_component_ref (TREE_OPERAND (exp, 1),
1aa291f7 4732 TYPE_FIELDS (type),
7e169899
EB
4733 false));
4734 tree op2
4735 = build_unary_op (INDIRECT_REF, NULL_TREE,
4736 build_component_ref (TREE_OPERAND (exp, 2),
1aa291f7 4737 TYPE_FIELDS (type),
7e169899
EB
4738 false));
4739
4740 exp = build3 (COND_EXPR,
1aa291f7 4741 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
4742 TREE_OPERAND (exp, 0), op1, op2);
4743 }
4744 else
a1c7d797
EB
4745 {
4746 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
64235766
EB
4747 build_component_ref (exp,
4748 TYPE_FIELDS (type),
a1c7d797
EB
4749 false));
4750 TREE_READONLY (exp) = read_only;
4751 TREE_THIS_NOTRAP (exp) = no_trap;
4752 }
a1ab4c31
AC
4753 }
4754
4755 else if (code == NULL_EXPR)
1aa291f7
EB
4756 exp = build1 (NULL_EXPR,
4757 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4758 TREE_OPERAND (exp, 0));
4759 break;
a1ab4c31
AC
4760
4761 case RECORD_TYPE:
1aa291f7
EB
4762 /* If this is a padded type and it contains a template, convert to the
4763 unpadded type first. */
4764 if (TYPE_PADDING_P (type)
4765 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4766 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 4767 {
1aa291f7 4768 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
64235766 4769 code = TREE_CODE (exp);
1aa291f7
EB
4770 type = TREE_TYPE (exp);
4771 }
4772
4773 if (TYPE_CONTAINS_TEMPLATE_P (type))
4774 {
64235766
EB
4775 /* If the array initializer is a box, return NULL_TREE. */
4776 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4777 return NULL_TREE;
4778
4779 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4780 false);
4781 type = TREE_TYPE (exp);
1aa291f7
EB
4782
4783 /* If the array type is padded, convert to the unpadded type. */
64235766
EB
4784 if (TYPE_IS_PADDING_P (type))
4785 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 4786 }
a1ab4c31
AC
4787 break;
4788
4789 default:
4790 break;
4791 }
4792
4793 return exp;
4794}
4795\f
afcea859 4796/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 4797 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
4798
4799static bool
4800can_fold_for_view_convert_p (tree expr)
4801{
4802 tree t1, t2;
4803
4804 /* The folder will fold NOP_EXPRs between integral types with the same
4805 precision (in the middle-end's sense). We cannot allow it if the
4806 types don't have the same precision in the Ada sense as well. */
4807 if (TREE_CODE (expr) != NOP_EXPR)
4808 return true;
4809
4810 t1 = TREE_TYPE (expr);
4811 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4812
4813 /* Defer to the folder for non-integral conversions. */
4814 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4815 return true;
4816
4817 /* Only fold conversions that preserve both precisions. */
4818 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4819 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4820 return true;
4821
4822 return false;
4823}
4824
a1ab4c31 4825/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4826 If NOTRUNC_P is true, truncation operations should be suppressed.
4827
4828 Special care is required with (source or target) integral types whose
4829 precision is not equal to their size, to make sure we fetch or assign
4830 the value bits whose location might depend on the endianness, e.g.
4831
4832 Rmsize : constant := 8;
4833 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4834
4835 type Bit_Array is array (1 .. Rmsize) of Boolean;
4836 pragma Pack (Bit_Array);
4837
4838 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4839
4840 Value : Int := 2#1000_0001#;
4841 Vbits : Bit_Array := To_Bit_Array (Value);
4842
4843 we expect the 8 bits at Vbits'Address to always contain Value, while
4844 their original location depends on the endianness, at Value'Address
84fb43a1 4845 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
4846
4847tree
4848unchecked_convert (tree type, tree expr, bool notrunc_p)
4849{
4850 tree etype = TREE_TYPE (expr);
c34f3839
EB
4851 enum tree_code ecode = TREE_CODE (etype);
4852 enum tree_code code = TREE_CODE (type);
e63b36bd 4853 tree tem;
980a0501 4854 int c;
a1ab4c31 4855
c34f3839 4856 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
4857 if (etype == type)
4858 return expr;
4859
026c3cfd 4860 /* If both types are integral just do a normal conversion.
a1ab4c31 4861 Likewise for a conversion to an unconstrained array. */
1eb58520 4862 if (((INTEGRAL_TYPE_P (type)
0d7de0e1 4863 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 4864 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
1eb58520 4865 && (INTEGRAL_TYPE_P (etype)
315cff15 4866 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
4867 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4868 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 4869 {
c34f3839 4870 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
4871 {
4872 tree ntype = copy_type (etype);
a1ab4c31
AC
4873 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4874 TYPE_MAIN_VARIANT (ntype) = ntype;
4875 expr = build1 (NOP_EXPR, ntype, expr);
4876 }
4877
c34f3839 4878 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 4879 {
afcea859 4880 tree rtype = copy_type (type);
a1ab4c31
AC
4881 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4882 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4883 expr = convert (rtype, expr);
4884 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 4885 }
afcea859
EB
4886 else
4887 expr = convert (type, expr);
a1ab4c31
AC
4888 }
4889
afcea859 4890 /* If we are converting to an integral type whose precision is not equal
ee45a32d
EB
4891 to its size, first unchecked convert to a record type that contains a
4892 field of the given precision. Then extract the result from the field.
4893
4894 There is a subtlety if the source type is an aggregate type with reverse
4895 storage order because its representation is not contiguous in the native
4896 storage order, i.e. a direct unchecked conversion to an integral type
4897 with N bits of precision cannot read the first N bits of the aggregate
4898 type. To overcome it, we do an unchecked conversion to an integral type
4899 with reverse storage order and return the resulting value. This also
4900 ensures that the result of the unchecked conversion doesn't depend on
4901 the endianness of the target machine, but only on the storage order of
4902 the aggregate type.
4903
4904 Finally, for the sake of consistency, we do the unchecked conversion
4905 to an integral type with reverse storage order as soon as the source
4906 type is an aggregate type with reverse storage order, even if there
4907 are no considerations of precision or size involved. */
980a0501
EB
4908 else if (INTEGRAL_TYPE_P (type)
4909 && TYPE_RM_SIZE (type)
ee45a32d
EB
4910 && (0 != compare_tree_int (TYPE_RM_SIZE (type),
4911 GET_MODE_BITSIZE (TYPE_MODE (type)))
4912 || (AGGREGATE_TYPE_P (etype)
4913 && TYPE_REVERSE_STORAGE_ORDER (etype))))
a1ab4c31
AC
4914 {
4915 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
4916 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4917 tree field_type, field;
4918
ee45a32d
EB
4919 if (AGGREGATE_TYPE_P (etype))
4920 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4921 = TYPE_REVERSE_STORAGE_ORDER (etype);
4922
416de7d5
EB
4923 if (TYPE_UNSIGNED (type))
4924 field_type = make_unsigned_type (prec);
4925 else
4926 field_type = make_signed_type (prec);
4927 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4928
4929 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 4930 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 4931
44e9e3ec 4932 finish_record_type (rec_type, field, 1, false);
a1ab4c31
AC
4933
4934 expr = unchecked_convert (rec_type, expr, notrunc_p);
64235766 4935 expr = build_component_ref (expr, field, false);
416de7d5 4936 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4937 }
4938
416de7d5
EB
4939 /* Similarly if we are converting from an integral type whose precision is
4940 not equal to its size, first copy into a field of the given precision
ee45a32d
EB
4941 and unchecked convert the record type.
4942
4943 The same considerations as above apply if the target type is an aggregate
4944 type with reverse storage order and we also proceed similarly. */
980a0501
EB
4945 else if (INTEGRAL_TYPE_P (etype)
4946 && TYPE_RM_SIZE (etype)
ee45a32d
EB
4947 && (0 != compare_tree_int (TYPE_RM_SIZE (etype),
4948 GET_MODE_BITSIZE (TYPE_MODE (etype)))
4949 || (AGGREGATE_TYPE_P (type)
4950 && TYPE_REVERSE_STORAGE_ORDER (type))))
a1ab4c31
AC
4951 {
4952 tree rec_type = make_node (RECORD_TYPE);
416de7d5 4953 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
4954 vec<constructor_elt, va_gc> *v;
4955 vec_alloc (v, 1);
416de7d5
EB
4956 tree field_type, field;
4957
ee45a32d
EB
4958 if (AGGREGATE_TYPE_P (type))
4959 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4960 = TYPE_REVERSE_STORAGE_ORDER (type);
4961
416de7d5
EB
4962 if (TYPE_UNSIGNED (etype))
4963 field_type = make_unsigned_type (prec);
4964 else
4965 field_type = make_signed_type (prec);
4966 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4967
4968 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 4969 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 4970
44e9e3ec 4971 finish_record_type (rec_type, field, 1, false);
a1ab4c31 4972
416de7d5 4973 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
4974 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4975 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
4976 expr = unchecked_convert (type, expr, notrunc_p);
4977 }
4978
980a0501
EB
4979 /* If we are converting from a scalar type to a type with a different size,
4980 we need to pad to have the same size on both sides.
4981
4982 ??? We cannot do it unconditionally because unchecked conversions are
4983 used liberally by the front-end to implement polymorphism, e.g. in:
4984
4985 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4986 return p___size__4 (p__object!(S191s.all));
4987
4988 so we skip all expressions that are references. */
4989 else if (!REFERENCE_CLASS_P (expr)
4990 && !AGGREGATE_TYPE_P (etype)
4991 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4992 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4993 {
4994 if (c < 0)
4995 {
4996 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4997 false, false, false, true),
4998 expr);
4999 expr = unchecked_convert (type, expr, notrunc_p);
5000 }
5001 else
5002 {
5003 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5004 false, false, false, true);
5005 expr = unchecked_convert (rec_type, expr, notrunc_p);
64235766 5006 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
980a0501
EB
5007 }
5008 }
5009
7948ae37
OH
5010 /* We have a special case when we are converting between two unconstrained
5011 array types. In that case, take the address, convert the fat pointer
5012 types, and dereference. */
c34f3839 5013 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5014 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5015 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5016 build_unary_op (ADDR_EXPR, NULL_TREE,
5017 expr)));
7948ae37
OH
5018
5019 /* Another special case is when we are converting to a vector type from its
5020 representative array type; this a regular conversion. */
c34f3839
EB
5021 else if (code == VECTOR_TYPE
5022 && ecode == ARRAY_TYPE
7948ae37
OH
5023 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5024 etype))
5025 expr = convert (type, expr);
5026
e63b36bd
EB
5027 /* And, if the array type is not the representative, we try to build an
5028 intermediate vector type of which the array type is the representative
5029 and to do the unchecked conversion between the vector types, in order
5030 to enable further simplifications in the middle-end. */
5031 else if (code == VECTOR_TYPE
5032 && ecode == ARRAY_TYPE
5033 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5034 {
5035 expr = convert (tem, expr);
5036 return unchecked_convert (type, expr, notrunc_p);
5037 }
5038
44e9e3ec
EB
5039 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5040 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5041 else if (TREE_CODE (expr) == CONSTRUCTOR
5042 && code == RECORD_TYPE
5043 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5044 {
5045 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5046 Empty, false, false, false, true),
5047 expr);
5048 return unchecked_convert (type, expr, notrunc_p);
5049 }
5050
5051 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
a1ab4c31
AC
5052 else
5053 {
5054 expr = maybe_unconstrained_array (expr);
5055 etype = TREE_TYPE (expr);
c34f3839 5056 ecode = TREE_CODE (etype);
afcea859
EB
5057 if (can_fold_for_view_convert_p (expr))
5058 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5059 else
5060 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5061 }
5062
afcea859
EB
5063 /* If the result is an integral type whose precision is not equal to its
5064 size, sign- or zero-extend the result. We need not do this if the input
5065 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
5066 is a biased type or if both the input and output are unsigned. */
5067 if (!notrunc_p
5068 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 5069 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
5070 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5071 GET_MODE_BITSIZE (TYPE_MODE (type)))
5072 && !(INTEGRAL_TYPE_P (etype)
5073 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5074 && operand_equal_p (TYPE_RM_SIZE (type),
5075 (TYPE_RM_SIZE (etype) != 0
5076 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5077 0))
5078 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5079 {
c34f3839
EB
5080 tree base_type
5081 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
5082 tree shift_expr
5083 = convert (base_type,
5084 size_binop (MINUS_EXPR,
5085 bitsize_int
5086 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5087 TYPE_RM_SIZE (type)));
5088 expr
5089 = convert (type,
5090 build_binary_op (RSHIFT_EXPR, base_type,
5091 build_binary_op (LSHIFT_EXPR, base_type,
5092 convert (base_type, expr),
5093 shift_expr),
5094 shift_expr));
5095 }
5096
5097 /* An unchecked conversion should never raise Constraint_Error. The code
5098 below assumes that GCC's conversion routines overflow the same way that
5099 the underlying hardware does. This is probably true. In the rare case
5100 when it is false, we can rely on the fact that such conversions are
5101 erroneous anyway. */
5102 if (TREE_CODE (expr) == INTEGER_CST)
5103 TREE_OVERFLOW (expr) = 0;
5104
5105 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5106 show no longer constant. */
5107 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5108 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5109 OEP_ONLY_CONST))
5110 TREE_CONSTANT (expr) = 0;
5111
5112 return expr;
5113}
5114\f
feec4372 5115/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5116 the latter being a record type as predicated by Is_Record_Type. */
5117
5118enum tree_code
5119tree_code_for_record_type (Entity_Id gnat_type)
5120{
b1a785fb 5121 Node_Id component_list, component;
a1ab4c31 5122
b1a785fb
EB
5123 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5124 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5125 if (!Is_Unchecked_Union (gnat_type))
5126 return RECORD_TYPE;
5127
b1a785fb
EB
5128 gnat_type = Implementation_Base_Type (gnat_type);
5129 component_list
5130 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5131
a1ab4c31
AC
5132 for (component = First_Non_Pragma (Component_Items (component_list));
5133 Present (component);
5134 component = Next_Non_Pragma (component))
5135 if (Ekind (Defining_Entity (component)) == E_Component)
5136 return RECORD_TYPE;
5137
5138 return UNION_TYPE;
5139}
5140
caa9d12a
EB
5141/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5142 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5143 according to the presence of an alignment clause on the type or, if it
5144 is an array, on the component type. */
5145
5146bool
5147is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5148{
5149 gnat_type = Underlying_Type (gnat_type);
5150
5151 *align_clause = Present (Alignment_Clause (gnat_type));
5152
5153 if (Is_Array_Type (gnat_type))
5154 {
5155 gnat_type = Underlying_Type (Component_Type (gnat_type));
5156 if (Present (Alignment_Clause (gnat_type)))
5157 *align_clause = true;
5158 }
5159
5160 if (!Is_Floating_Point_Type (gnat_type))
5161 return false;
5162
5163 if (UI_To_Int (Esize (gnat_type)) != 64)
5164 return false;
5165
5166 return true;
5167}
5168
5169/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5170 size is greater or equal to 64 bits, or an array of such a type. Set
5171 ALIGN_CLAUSE according to the presence of an alignment clause on the
5172 type or, if it is an array, on the component type. */
5173
5174bool
5175is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5176{
5177 gnat_type = Underlying_Type (gnat_type);
5178
5179 *align_clause = Present (Alignment_Clause (gnat_type));
5180
5181 if (Is_Array_Type (gnat_type))
5182 {
5183 gnat_type = Underlying_Type (Component_Type (gnat_type));
5184 if (Present (Alignment_Clause (gnat_type)))
5185 *align_clause = true;
5186 }
5187
5188 if (!Is_Scalar_Type (gnat_type))
5189 return false;
5190
5191 if (UI_To_Int (Esize (gnat_type)) < 64)
5192 return false;
5193
5194 return true;
5195}
5196
a1ab4c31
AC
5197/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5198 component of an aggregate type. */
5199
5200bool
5201type_for_nonaliased_component_p (tree gnu_type)
5202{
5203 /* If the type is passed by reference, we may have pointers to the
5204 component so it cannot be made non-aliased. */
5205 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5206 return false;
5207
5208 /* We used to say that any component of aggregate type is aliased
5209 because the front-end may take 'Reference of it. The front-end
5210 has been enhanced in the meantime so as to use a renaming instead
5211 in most cases, but the back-end can probably take the address of
5212 such a component too so we go for the conservative stance.
5213
5214 For instance, we might need the address of any array type, even
5215 if normally passed by copy, to construct a fat pointer if the
5216 component is used as an actual for an unconstrained formal.
5217
5218 Likewise for record types: even if a specific record subtype is
5219 passed by copy, the parent type might be passed by ref (e.g. if
5220 it's of variable size) and we might take the address of a child
5221 component to pass to a parent formal. We have no way to check
5222 for such conditions here. */
5223 if (AGGREGATE_TYPE_P (gnu_type))
5224 return false;
5225
5226 return true;
5227}
5228
bb1f7929
EB
5229/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5230
5231bool
5232smaller_form_type_p (tree type, tree orig_type)
5233{
5234 tree size, osize;
5235
5236 /* We're not interested in variants here. */
5237 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5238 return false;
5239
5240 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5241 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5242 return false;
5243
5244 size = TYPE_SIZE (type);
5245 osize = TYPE_SIZE (orig_type);
5246
5247 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5248 return false;
5249
5250 return tree_int_cst_lt (size, osize) != 0;
5251}
5252
a22b794d 5253/* Perform final processing on global declarations. */
d7438551 5254
0f50b3db
EB
5255static GTY (()) tree dummy_global;
5256
a1ab4c31 5257void
a22b794d 5258gnat_write_global_declarations (void)
a1ab4c31 5259{
10e4d056
EB
5260 unsigned int i;
5261 tree iter;
5262
65444786 5263 /* If we have declared types as used at the global level, insert them in
755c71fa
EB
5264 the global hash table. We use a dummy variable for this purpose, but
5265 we need to build it unconditionally to avoid -fcompare-debug issues. */
5266 if (first_global_object_name)
65444786 5267 {
35e8bcf4 5268 struct varpool_node *node;
d3c268ab
EB
5269 char *label;
5270
5271 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5272 dummy_global
d3c268ab
EB
5273 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5274 void_type_node);
dd25fe0a 5275 DECL_HARD_REGISTER (dummy_global) = 1;
65444786 5276 TREE_STATIC (dummy_global) = 1;
037e5573 5277 node = varpool_node::get_create (dummy_global);
dd25fe0a 5278 node->definition = 1;
67348ccc 5279 node->force_output = 1;
65444786 5280
755c71fa
EB
5281 if (types_used_by_cur_var_decl)
5282 while (!types_used_by_cur_var_decl->is_empty ())
5283 {
5284 tree t = types_used_by_cur_var_decl->pop ();
5285 types_used_by_var_decl_insert (t, dummy_global);
5286 }
65444786
EB
5287 }
5288
a22b794d
EB
5289 /* Output debug information for all global type declarations first. This
5290 ensures that global types whose compilation hasn't been finalized yet,
5291 for example pointers to Taft amendment types, have their compilation
5292 finalized in the right context. */
5293 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5294 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
1f0e2688 5295 debug_hooks->type_decl (iter, false);
a22b794d
EB
5296
5297 /* Then output the global variables. We need to do that after the debug
8afaddaa 5298 information for global types is emitted so that they are finalized. */
a22b794d
EB
5299 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5300 if (TREE_CODE (iter) == VAR_DECL)
5301 rest_of_decl_compilation (iter, true, 0);
caadda8e
PMR
5302
5303 /* Output the imported modules/declarations. In GNAT, these are only
5304 materializing subprogram. */
5305 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5306 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5307 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5308 DECL_CONTEXT (iter), 0);
a1ab4c31
AC
5309}
5310
5311/* ************************************************************************
5312 * * GCC builtins support *
5313 * ************************************************************************ */
5314
5315/* The general scheme is fairly simple:
5316
5317 For each builtin function/type to be declared, gnat_install_builtins calls
aef308d0 5318 internal facilities which eventually get to gnat_pushdecl, which in turn
a1ab4c31
AC
5319 tracks the so declared builtin function decls in the 'builtin_decls' global
5320 datastructure. When an Intrinsic subprogram declaration is processed, we
5321 search this global datastructure to retrieve the associated BUILT_IN DECL
5322 node. */
5323
5324/* Search the chain of currently available builtin declarations for a node
5325 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5326 found, if any, or NULL_TREE otherwise. */
5327tree
5328builtin_decl_for (tree name)
5329{
5330 unsigned i;
5331 tree decl;
5332
9771b263 5333 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5334 if (DECL_NAME (decl) == name)
5335 return decl;
5336
5337 return NULL_TREE;
5338}
5339
5340/* The code below eventually exposes gnat_install_builtins, which declares
5341 the builtin types and functions we might need, either internally or as
5342 user accessible facilities.
5343
5344 ??? This is a first implementation shot, still in rough shape. It is
5345 heavily inspired from the "C" family implementation, with chunks copied
5346 verbatim from there.
5347
ba464315 5348 Two obvious improvement candidates are:
a1ab4c31
AC
5349 o Use a more efficient name/decl mapping scheme
5350 o Devise a middle-end infrastructure to avoid having to copy
5351 pieces between front-ends. */
5352
5353/* ----------------------------------------------------------------------- *
5354 * BUILTIN ELEMENTARY TYPES *
5355 * ----------------------------------------------------------------------- */
5356
5357/* Standard data types to be used in builtin argument declarations. */
5358
5359enum c_tree_index
5360{
5361 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5362 CTI_STRING_TYPE,
5363 CTI_CONST_STRING_TYPE,
5364
5365 CTI_MAX
5366};
5367
5368static tree c_global_trees[CTI_MAX];
5369
5370#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5371#define string_type_node c_global_trees[CTI_STRING_TYPE]
5372#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5373
5374/* ??? In addition some attribute handlers, we currently don't support a
5375 (small) number of builtin-types, which in turns inhibits support for a
5376 number of builtin functions. */
5377#define wint_type_node void_type_node
5378#define intmax_type_node void_type_node
5379#define uintmax_type_node void_type_node
5380
5381/* Build the void_list_node (void_type_node having been created). */
5382
5383static tree
5384build_void_list_node (void)
5385{
5386 tree t = build_tree_list (NULL_TREE, void_type_node);
5387 return t;
5388}
5389
5390/* Used to help initialize the builtin-types.def table. When a type of
5391 the correct size doesn't exist, use error_mark_node instead of NULL.
5392 The later results in segfaults even when a decl using the type doesn't
5393 get invoked. */
5394
5395static tree
5396builtin_type_for_size (int size, bool unsignedp)
5397{
ced57283 5398 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5399 return type ? type : error_mark_node;
5400}
5401
5402/* Build/push the elementary type decls that builtin functions/types
5403 will need. */
5404
5405static void
5406install_builtin_elementary_types (void)
5407{
728936bb 5408 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5409 pid_type_node = integer_type_node;
5410 void_list_node = build_void_list_node ();
5411
5412 string_type_node = build_pointer_type (char_type_node);
5413 const_string_type_node
5414 = build_pointer_type (build_qualified_type
5415 (char_type_node, TYPE_QUAL_CONST));
5416}
5417
5418/* ----------------------------------------------------------------------- *
5419 * BUILTIN FUNCTION TYPES *
5420 * ----------------------------------------------------------------------- */
5421
5422/* Now, builtin function types per se. */
5423
5424enum c_builtin_type
5425{
5426#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5427#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5428#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5429#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5430#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5431#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5432#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
f6a7cffc
TS
5433#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5434 ARG6) NAME,
5435#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5436 ARG6, ARG7) NAME,
5437#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5438 ARG6, ARG7, ARG8) NAME,
d9a6bd32
JJ
5439#define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5440 ARG6, ARG7, ARG8, ARG9) NAME,
5441#define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5442 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5443#define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5444 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
a1ab4c31
AC
5445#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5446#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5447#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5448#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5449#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
f6a7cffc 5450#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
56a9f6bc 5451 NAME,
3e32ee19
NS
5452#define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5453 ARG6) NAME,
56a9f6bc
TS
5454#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5455 ARG6, ARG7) NAME,
a1ab4c31
AC
5456#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5457#include "builtin-types.def"
5458#undef DEF_PRIMITIVE_TYPE
5459#undef DEF_FUNCTION_TYPE_0
5460#undef DEF_FUNCTION_TYPE_1
5461#undef DEF_FUNCTION_TYPE_2
5462#undef DEF_FUNCTION_TYPE_3
5463#undef DEF_FUNCTION_TYPE_4
5464#undef DEF_FUNCTION_TYPE_5
5465#undef DEF_FUNCTION_TYPE_6
5466#undef DEF_FUNCTION_TYPE_7
acf0174b 5467#undef DEF_FUNCTION_TYPE_8
d9a6bd32
JJ
5468#undef DEF_FUNCTION_TYPE_9
5469#undef DEF_FUNCTION_TYPE_10
5470#undef DEF_FUNCTION_TYPE_11
a1ab4c31
AC
5471#undef DEF_FUNCTION_TYPE_VAR_0
5472#undef DEF_FUNCTION_TYPE_VAR_1
5473#undef DEF_FUNCTION_TYPE_VAR_2
5474#undef DEF_FUNCTION_TYPE_VAR_3
5475#undef DEF_FUNCTION_TYPE_VAR_4
5476#undef DEF_FUNCTION_TYPE_VAR_5
3e32ee19 5477#undef DEF_FUNCTION_TYPE_VAR_6
56a9f6bc 5478#undef DEF_FUNCTION_TYPE_VAR_7
a1ab4c31
AC
5479#undef DEF_POINTER_TYPE
5480 BT_LAST
5481};
5482
5483typedef enum c_builtin_type builtin_type;
5484
5485/* A temporary array used in communication with def_fn_type. */
5486static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5487
5488/* A helper function for install_builtin_types. Build function type
5489 for DEF with return type RET and N arguments. If VAR is true, then the
5490 function should be variadic after those N arguments.
5491
5492 Takes special care not to ICE if any of the types involved are
5493 error_mark_node, which indicates that said type is not in fact available
5494 (see builtin_type_for_size). In which case the function type as a whole
5495 should be error_mark_node. */
5496
5497static void
5498def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5499{
e5b00edf
NF
5500 tree t;
5501 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5502 va_list list;
5503 int i;
5504
5505 va_start (list, n);
5506 for (i = 0; i < n; ++i)
5507 {
c6bd4220 5508 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5509 t = builtin_types[a];
5510 if (t == error_mark_node)
5511 goto egress;
e5b00edf 5512 args[i] = t;
a1ab4c31 5513 }
a1ab4c31 5514
a1ab4c31
AC
5515 t = builtin_types[ret];
5516 if (t == error_mark_node)
5517 goto egress;
e5b00edf
NF
5518 if (var)
5519 t = build_varargs_function_type_array (t, n, args);
5520 else
5521 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5522
5523 egress:
5524 builtin_types[def] = t;
0edf1bb2 5525 va_end (list);
a1ab4c31
AC
5526}
5527
5528/* Build the builtin function types and install them in the builtin_types
5529 array for later use in builtin function decls. */
5530
5531static void
5532install_builtin_function_types (void)
5533{
5534 tree va_list_ref_type_node;
5535 tree va_list_arg_type_node;
5536
5537 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5538 {
5539 va_list_arg_type_node = va_list_ref_type_node =
5540 build_pointer_type (TREE_TYPE (va_list_type_node));
5541 }
5542 else
5543 {
5544 va_list_arg_type_node = va_list_type_node;
5545 va_list_ref_type_node = build_reference_type (va_list_type_node);
5546 }
5547
5548#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5549 builtin_types[ENUM] = VALUE;
5550#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5551 def_fn_type (ENUM, RETURN, 0, 0);
5552#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5553 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5554#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5555 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5556#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5557 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5558#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5559 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5560#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5561 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5562#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5563 ARG6) \
5564 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5565#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5566 ARG6, ARG7) \
5567 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
5568#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5569 ARG6, ARG7, ARG8) \
5570 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5571 ARG7, ARG8);
d9a6bd32
JJ
5572#define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5573 ARG6, ARG7, ARG8, ARG9) \
5574 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5575 ARG7, ARG8, ARG9);
5576#define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5577 ARG6, ARG7, ARG8, ARG9, ARG10) \
5578 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5579 ARG7, ARG8, ARG9, ARG10);
5580#define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5581 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5582 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5583 ARG7, ARG8, ARG9, ARG10, ARG11);
a1ab4c31
AC
5584#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5585 def_fn_type (ENUM, RETURN, 1, 0);
5586#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5587 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5588#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5589 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5590#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5591 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5592#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5593 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5594#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5595 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
3e32ee19
NS
5596#define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5597 ARG6) \
5598 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
56a9f6bc
TS
5599#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5600 ARG6, ARG7) \
5601 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
a1ab4c31
AC
5602#define DEF_POINTER_TYPE(ENUM, TYPE) \
5603 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5604
5605#include "builtin-types.def"
5606
5607#undef DEF_PRIMITIVE_TYPE
f6a7cffc 5608#undef DEF_FUNCTION_TYPE_0
a1ab4c31
AC
5609#undef DEF_FUNCTION_TYPE_1
5610#undef DEF_FUNCTION_TYPE_2
5611#undef DEF_FUNCTION_TYPE_3
5612#undef DEF_FUNCTION_TYPE_4
5613#undef DEF_FUNCTION_TYPE_5
5614#undef DEF_FUNCTION_TYPE_6
f6a7cffc
TS
5615#undef DEF_FUNCTION_TYPE_7
5616#undef DEF_FUNCTION_TYPE_8
d9a6bd32
JJ
5617#undef DEF_FUNCTION_TYPE_9
5618#undef DEF_FUNCTION_TYPE_10
5619#undef DEF_FUNCTION_TYPE_11
a1ab4c31
AC
5620#undef DEF_FUNCTION_TYPE_VAR_0
5621#undef DEF_FUNCTION_TYPE_VAR_1
5622#undef DEF_FUNCTION_TYPE_VAR_2
5623#undef DEF_FUNCTION_TYPE_VAR_3
5624#undef DEF_FUNCTION_TYPE_VAR_4
5625#undef DEF_FUNCTION_TYPE_VAR_5
3e32ee19 5626#undef DEF_FUNCTION_TYPE_VAR_6
56a9f6bc 5627#undef DEF_FUNCTION_TYPE_VAR_7
a1ab4c31
AC
5628#undef DEF_POINTER_TYPE
5629 builtin_types[(int) BT_LAST] = NULL_TREE;
5630}
5631
5632/* ----------------------------------------------------------------------- *
5633 * BUILTIN ATTRIBUTES *
5634 * ----------------------------------------------------------------------- */
5635
5636enum built_in_attribute
5637{
5638#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5639#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5640#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5641#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5642#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5643#include "builtin-attrs.def"
5644#undef DEF_ATTR_NULL_TREE
5645#undef DEF_ATTR_INT
e384e6b5 5646#undef DEF_ATTR_STRING
a1ab4c31
AC
5647#undef DEF_ATTR_IDENT
5648#undef DEF_ATTR_TREE_LIST
5649 ATTR_LAST
5650};
5651
5652static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5653
5654static void
5655install_builtin_attributes (void)
5656{
5657 /* Fill in the built_in_attributes array. */
5658#define DEF_ATTR_NULL_TREE(ENUM) \
5659 built_in_attributes[(int) ENUM] = NULL_TREE;
5660#define DEF_ATTR_INT(ENUM, VALUE) \
5661 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5662#define DEF_ATTR_STRING(ENUM, VALUE) \
5663 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5664#define DEF_ATTR_IDENT(ENUM, STRING) \
5665 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5666#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5667 built_in_attributes[(int) ENUM] \
5668 = tree_cons (built_in_attributes[(int) PURPOSE], \
5669 built_in_attributes[(int) VALUE], \
5670 built_in_attributes[(int) CHAIN]);
5671#include "builtin-attrs.def"
5672#undef DEF_ATTR_NULL_TREE
5673#undef DEF_ATTR_INT
e384e6b5 5674#undef DEF_ATTR_STRING
a1ab4c31
AC
5675#undef DEF_ATTR_IDENT
5676#undef DEF_ATTR_TREE_LIST
5677}
5678
5679/* Handle a "const" attribute; arguments as in
5680 struct attribute_spec.handler. */
5681
5682static tree
5683handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5684 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5685 bool *no_add_attrs)
5686{
5687 if (TREE_CODE (*node) == FUNCTION_DECL)
5688 TREE_READONLY (*node) = 1;
5689 else
5690 *no_add_attrs = true;
5691
5692 return NULL_TREE;
5693}
5694
5695/* Handle a "nothrow" attribute; arguments as in
5696 struct attribute_spec.handler. */
5697
5698static tree
5699handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5700 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5701 bool *no_add_attrs)
5702{
5703 if (TREE_CODE (*node) == FUNCTION_DECL)
5704 TREE_NOTHROW (*node) = 1;
5705 else
5706 *no_add_attrs = true;
5707
5708 return NULL_TREE;
5709}
5710
5711/* Handle a "pure" attribute; arguments as in
5712 struct attribute_spec.handler. */
5713
5714static tree
5715handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5716 int ARG_UNUSED (flags), bool *no_add_attrs)
5717{
5718 if (TREE_CODE (*node) == FUNCTION_DECL)
5719 DECL_PURE_P (*node) = 1;
ba464315 5720 /* TODO: support types. */
a1ab4c31
AC
5721 else
5722 {
7948ae37
OH
5723 warning (OPT_Wattributes, "%qs attribute ignored",
5724 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5725 *no_add_attrs = true;
5726 }
5727
5728 return NULL_TREE;
5729}
5730
5731/* Handle a "no vops" attribute; arguments as in
5732 struct attribute_spec.handler. */
5733
5734static tree
5735handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5736 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5737 bool *ARG_UNUSED (no_add_attrs))
5738{
5739 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5740 DECL_IS_NOVOPS (*node) = 1;
5741 return NULL_TREE;
5742}
5743
5744/* Helper for nonnull attribute handling; fetch the operand number
5745 from the attribute argument list. */
5746
5747static bool
5748get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5749{
5750 /* Verify the arg number is a constant. */
807e902e 5751 if (!tree_fits_uhwi_p (arg_num_expr))
a1ab4c31
AC
5752 return false;
5753
5754 *valp = TREE_INT_CST_LOW (arg_num_expr);
5755 return true;
5756}
5757
5758/* Handle the "nonnull" attribute. */
5759static tree
5760handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5761 tree args, int ARG_UNUSED (flags),
5762 bool *no_add_attrs)
5763{
5764 tree type = *node;
5765 unsigned HOST_WIDE_INT attr_arg_num;
5766
5767 /* If no arguments are specified, all pointer arguments should be
5768 non-null. Verify a full prototype is given so that the arguments
5769 will have the correct types when we actually check them later. */
5770 if (!args)
5771 {
f4da8dce 5772 if (!prototype_p (type))
a1ab4c31
AC
5773 {
5774 error ("nonnull attribute without arguments on a non-prototype");
5775 *no_add_attrs = true;
5776 }
5777 return NULL_TREE;
5778 }
5779
5780 /* Argument list specified. Verify that each argument number references
5781 a pointer argument. */
5782 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5783 {
a1ab4c31
AC
5784 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5785
5786 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5787 {
5788 error ("nonnull argument has invalid operand number (argument %lu)",
5789 (unsigned long) attr_arg_num);
5790 *no_add_attrs = true;
5791 return NULL_TREE;
5792 }
5793
d7d058c5 5794 if (prototype_p (type))
a1ab4c31 5795 {
d7d058c5
NF
5796 function_args_iterator iter;
5797 tree argument;
5798
5799 function_args_iter_init (&iter, type);
5800 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 5801 {
d7d058c5 5802 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
5803 if (!argument || ck_num == arg_num)
5804 break;
a1ab4c31
AC
5805 }
5806
5807 if (!argument
d7d058c5 5808 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 5809 {
58c8f770
EB
5810 error ("nonnull argument with out-of-range operand number "
5811 "(argument %lu, operand %lu)",
a1ab4c31
AC
5812 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5813 *no_add_attrs = true;
5814 return NULL_TREE;
5815 }
5816
d7d058c5 5817 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 5818 {
58c8f770
EB
5819 error ("nonnull argument references non-pointer operand "
5820 "(argument %lu, operand %lu)",
a1ab4c31
AC
5821 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5822 *no_add_attrs = true;
5823 return NULL_TREE;
5824 }
5825 }
5826 }
5827
5828 return NULL_TREE;
5829}
5830
5831/* Handle a "sentinel" attribute. */
5832
5833static tree
5834handle_sentinel_attribute (tree *node, tree name, tree args,
5835 int ARG_UNUSED (flags), bool *no_add_attrs)
5836{
f4da8dce 5837 if (!prototype_p (*node))
a1ab4c31
AC
5838 {
5839 warning (OPT_Wattributes,
7948ae37
OH
5840 "%qs attribute requires prototypes with named arguments",
5841 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5842 *no_add_attrs = true;
5843 }
5844 else
5845 {
dcf0c47e 5846 if (!stdarg_p (*node))
a1ab4c31
AC
5847 {
5848 warning (OPT_Wattributes,
7948ae37
OH
5849 "%qs attribute only applies to variadic functions",
5850 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5851 *no_add_attrs = true;
5852 }
5853 }
5854
5855 if (args)
5856 {
5857 tree position = TREE_VALUE (args);
5858
5859 if (TREE_CODE (position) != INTEGER_CST)
5860 {
5861 warning (0, "requested position is not an integer constant");
5862 *no_add_attrs = true;
5863 }
5864 else
5865 {
5866 if (tree_int_cst_lt (position, integer_zero_node))
5867 {
5868 warning (0, "requested position is less than zero");
5869 *no_add_attrs = true;
5870 }
5871 }
5872 }
5873
5874 return NULL_TREE;
5875}
5876
5877/* Handle a "noreturn" attribute; arguments as in
5878 struct attribute_spec.handler. */
5879
5880static tree
5881handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5882 int ARG_UNUSED (flags), bool *no_add_attrs)
5883{
5884 tree type = TREE_TYPE (*node);
5885
5886 /* See FIXME comment in c_common_attribute_table. */
5887 if (TREE_CODE (*node) == FUNCTION_DECL)
5888 TREE_THIS_VOLATILE (*node) = 1;
5889 else if (TREE_CODE (type) == POINTER_TYPE
5890 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5891 TREE_TYPE (*node)
5892 = build_pointer_type
5893 (build_type_variant (TREE_TYPE (type),
5894 TYPE_READONLY (TREE_TYPE (type)), 1));
5895 else
5896 {
7948ae37
OH
5897 warning (OPT_Wattributes, "%qs attribute ignored",
5898 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5899 *no_add_attrs = true;
5900 }
5901
5902 return NULL_TREE;
5903}
5904
0d6e14fd
JH
5905/* Handle a "leaf" attribute; arguments as in
5906 struct attribute_spec.handler. */
5907
5908static tree
f087ea44 5909handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
0d6e14fd
JH
5910 int ARG_UNUSED (flags), bool *no_add_attrs)
5911{
5912 if (TREE_CODE (*node) != FUNCTION_DECL)
5913 {
5914 warning (OPT_Wattributes, "%qE attribute ignored", name);
5915 *no_add_attrs = true;
5916 }
5917 if (!TREE_PUBLIC (*node))
5918 {
32a5388a 5919 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd 5920 *no_add_attrs = true;
f087ea44
AC
5921 }
5922
5923 return NULL_TREE;
5924}
5925
5926/* Handle a "always_inline" attribute; arguments as in
5927 struct attribute_spec.handler. */
5928
5929static tree
5930handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5931 int ARG_UNUSED (flags), bool *no_add_attrs)
5932{
5933 if (TREE_CODE (*node) == FUNCTION_DECL)
5934 {
5935 /* Set the attribute and mark it for disregarding inline limits. */
5936 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5937 }
5938 else
5939 {
5940 warning (OPT_Wattributes, "%qE attribute ignored", name);
5941 *no_add_attrs = true;
0d6e14fd
JH
5942 }
5943
5944 return NULL_TREE;
5945}
5946
a1ab4c31
AC
5947/* Handle a "malloc" attribute; arguments as in
5948 struct attribute_spec.handler. */
5949
5950static tree
5951handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5952 int ARG_UNUSED (flags), bool *no_add_attrs)
5953{
5954 if (TREE_CODE (*node) == FUNCTION_DECL
5955 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5956 DECL_IS_MALLOC (*node) = 1;
5957 else
5958 {
7948ae37
OH
5959 warning (OPT_Wattributes, "%qs attribute ignored",
5960 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5961 *no_add_attrs = true;
5962 }
5963
5964 return NULL_TREE;
5965}
5966
5967/* Fake handler for attributes we don't properly support. */
5968
5969tree
5970fake_attribute_handler (tree * ARG_UNUSED (node),
5971 tree ARG_UNUSED (name),
5972 tree ARG_UNUSED (args),
5973 int ARG_UNUSED (flags),
5974 bool * ARG_UNUSED (no_add_attrs))
5975{
5976 return NULL_TREE;
5977}
5978
5979/* Handle a "type_generic" attribute. */
5980
5981static tree
5982handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5983 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5984 bool * ARG_UNUSED (no_add_attrs))
5985{
a1ab4c31
AC
5986 /* Ensure we have a function type. */
5987 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 5988
a1ab4c31 5989 /* Ensure we have a variadic function. */
dcf0c47e 5990 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
5991
5992 return NULL_TREE;
5993}
5994
2724e58f
OH
5995/* Handle a "vector_size" attribute; arguments as in
5996 struct attribute_spec.handler. */
5997
5998static tree
5999handle_vector_size_attribute (tree *node, tree name, tree args,
e63b36bd 6000 int ARG_UNUSED (flags), bool *no_add_attrs)
2724e58f 6001{
e63b36bd
EB
6002 tree type = *node;
6003 tree vector_type;
2724e58f
OH
6004
6005 *no_add_attrs = true;
6006
2724e58f
OH
6007 /* We need to provide for vector pointers, vector arrays, and
6008 functions returning vectors. For example:
6009
6010 __attribute__((vector_size(16))) short *foo;
6011
6012 In this case, the mode is SI, but the type being modified is
6013 HI, so we need to look further. */
2724e58f
OH
6014 while (POINTER_TYPE_P (type)
6015 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6016 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6017 type = TREE_TYPE (type);
6018
e63b36bd
EB
6019 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6020 if (!vector_type)
6021 return NULL_TREE;
2724e58f
OH
6022
6023 /* Build back pointers if needed. */
e63b36bd 6024 *node = reconstruct_complex_type (*node, vector_type);
2724e58f
OH
6025
6026 return NULL_TREE;
6027}
6028
7948ae37
OH
6029/* Handle a "vector_type" attribute; arguments as in
6030 struct attribute_spec.handler. */
6031
6032static tree
6033handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
e63b36bd 6034 int ARG_UNUSED (flags), bool *no_add_attrs)
7948ae37 6035{
e63b36bd
EB
6036 tree type = *node;
6037 tree vector_type;
7948ae37
OH
6038
6039 *no_add_attrs = true;
6040
e63b36bd 6041 if (TREE_CODE (type) != ARRAY_TYPE)
7948ae37
OH
6042 {
6043 error ("attribute %qs applies to array types only",
6044 IDENTIFIER_POINTER (name));
6045 return NULL_TREE;
6046 }
6047
e63b36bd
EB
6048 vector_type = build_vector_type_for_array (type, name);
6049 if (!vector_type)
7948ae37
OH
6050 return NULL_TREE;
6051
e63b36bd
EB
6052 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6053 *node = vector_type;
7948ae37
OH
6054
6055 return NULL_TREE;
6056}
6057
a1ab4c31
AC
6058/* ----------------------------------------------------------------------- *
6059 * BUILTIN FUNCTIONS *
6060 * ----------------------------------------------------------------------- */
6061
6062/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6063 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6064 if nonansi_p and flag_no_nonansi_builtin. */
6065
6066static void
6067def_builtin_1 (enum built_in_function fncode,
6068 const char *name,
6069 enum built_in_class fnclass,
6070 tree fntype, tree libtype,
6071 bool both_p, bool fallback_p,
6072 bool nonansi_p ATTRIBUTE_UNUSED,
6073 tree fnattrs, bool implicit_p)
6074{
6075 tree decl;
6076 const char *libname;
6077
6078 /* Preserve an already installed decl. It most likely was setup in advance
6079 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 6080 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
6081 return;
6082
6083 gcc_assert ((!both_p && !fallback_p)
6084 || !strncmp (name, "__builtin_",
6085 strlen ("__builtin_")));
6086
6087 libname = name + strlen ("__builtin_");
6088 decl = add_builtin_function (name, fntype, fncode, fnclass,
6089 (fallback_p ? libname : NULL),
6090 fnattrs);
6091 if (both_p)
6092 /* ??? This is normally further controlled by command-line options
6093 like -fno-builtin, but we don't have them for Ada. */
6094 add_builtin_function (libname, libtype, fncode, fnclass,
6095 NULL, fnattrs);
6096
e79983f4 6097 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6098}
6099
6100static int flag_isoc94 = 0;
6101static int flag_isoc99 = 0;
22869a37 6102static int flag_isoc11 = 0;
a1ab4c31
AC
6103
6104/* Install what the common builtins.def offers. */
6105
6106static void
6107install_builtin_functions (void)
6108{
6109#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6110 NONANSI_P, ATTRS, IMPLICIT, COND) \
6111 if (NAME && COND) \
6112 def_builtin_1 (ENUM, NAME, CLASS, \
6113 builtin_types[(int) TYPE], \
6114 builtin_types[(int) LIBTYPE], \
6115 BOTH_P, FALLBACK_P, NONANSI_P, \
6116 built_in_attributes[(int) ATTRS], IMPLICIT);
6117#include "builtins.def"
a1ab4c31
AC
6118}
6119
6120/* ----------------------------------------------------------------------- *
6121 * BUILTIN FUNCTIONS *
6122 * ----------------------------------------------------------------------- */
6123
6124/* Install the builtin functions we might need. */
6125
6126void
6127gnat_install_builtins (void)
6128{
6129 install_builtin_elementary_types ();
6130 install_builtin_function_types ();
6131 install_builtin_attributes ();
6132
6133 /* Install builtins used by generic middle-end pieces first. Some of these
6134 know about internal specificities and control attributes accordingly, for
6135 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6136 the generic definition from builtins.def. */
384c400a 6137 build_common_builtin_nodes ();
a1ab4c31
AC
6138
6139 /* Now, install the target specific builtins, such as the AltiVec family on
6140 ppc, and the common set as exposed by builtins.def. */
6141 targetm.init_builtins ();
6142 install_builtin_functions ();
6143}
6144
6145#include "gt-ada-utils.h"
6146#include "gtype-ada.h"
This page took 3.32169 seconds and 5 git commands to generate.