]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
gigi.h (build_atomic_load): Adjust prototype.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
e1872645 9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
40e23961
MC
30#include "hash-set.h"
31#include "machmode.h"
32#include "vec.h"
33#include "double-int.h"
34#include "input.h"
35#include "alias.h"
36#include "symtab.h"
37#include "wide-int.h"
38#include "inchash.h"
a1ab4c31 39#include "tree.h"
40e23961 40#include "fold-const.h"
d8a2d370
DN
41#include "stringpool.h"
42#include "stor-layout.h"
43#include "attribs.h"
44#include "varasm.h"
a1ab4c31 45#include "flags.h"
a1ab4c31 46#include "toplev.h"
718f9c0f 47#include "diagnostic-core.h"
a1ab4c31
AC
48#include "output.h"
49#include "ggc.h"
50#include "debug.h"
51#include "convert.h"
52#include "target.h"
677f3fa8 53#include "common/common-target.h"
8713b7e4 54#include "langhooks.h"
c582198b
AM
55#include "hash-map.h"
56#include "is-a.h"
57#include "plugin-api.h"
c582198b
AM
58#include "hard-reg-set.h"
59#include "input.h"
60#include "function.h"
61#include "ipa-ref.h"
a1ab4c31 62#include "cgraph.h"
10e4d056 63#include "diagnostic.h"
7ee2468b 64#include "timevar.h"
8713b7e4 65#include "tree-dump.h"
a1ab4c31
AC
66#include "tree-inline.h"
67#include "tree-iterator.h"
a1ab4c31
AC
68
69#include "ada.h"
70#include "types.h"
71#include "atree.h"
72#include "elists.h"
73#include "namet.h"
74#include "nlists.h"
75#include "stringt.h"
76#include "uintp.h"
77#include "fe.h"
78#include "sinfo.h"
79#include "einfo.h"
80#include "ada-tree.h"
81#include "gigi.h"
82
a1ab4c31
AC
83/* If nonzero, pretend we are allocating at global level. */
84int force_global;
85
caa9d12a
EB
86/* The default alignment of "double" floating-point types, i.e. floating
87 point types whose size is equal to 64 bits, or 0 if this alignment is
88 not specifically capped. */
89int double_float_alignment;
90
91/* The default alignment of "double" or larger scalar types, i.e. scalar
92 types whose size is greater or equal to 64 bits, or 0 if this alignment
93 is not specifically capped. */
94int double_scalar_alignment;
95
24228312
AC
96/* True if floating-point arithmetics may use wider intermediate results. */
97bool fp_arith_may_widen = true;
98
a1ab4c31
AC
99/* Tree nodes for the various types and decls we create. */
100tree gnat_std_decls[(int) ADT_LAST];
101
102/* Functions to call for each of the possible raise reasons. */
103tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
104
ca8e13e8 105/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
106tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
107
a1ab4c31
AC
108/* Forward declarations for handlers of attributes. */
109static tree handle_const_attribute (tree *, tree, tree, int, bool *);
110static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
111static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
112static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
113static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
114static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
115static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 116static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
f087ea44 117static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
118static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
119static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 120static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 121static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
122
123/* Fake handler for attributes we don't properly support, typically because
124 they'd require dragging a lot of the common-c front-end circuitry. */
125static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
126
127/* Table of machine-independent internal attributes for Ada. We support
128 this minimal set of attributes to accommodate the needs of builtins. */
129const struct attribute_spec gnat_internal_attribute_table[] =
130{
62d784f7
KT
131 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
132 affects_type_identity } */
133 { "const", 0, 0, true, false, false, handle_const_attribute,
134 false },
135 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
136 false },
137 { "pure", 0, 0, true, false, false, handle_pure_attribute,
138 false },
139 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
140 false },
141 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
142 false },
143 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
144 false },
145 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
146 false },
147 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
148 false },
f087ea44
AC
149 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
150 false },
62d784f7
KT
151 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
152 false },
153 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
154 false },
155
156 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
157 false },
158 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
159 false },
160 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
161
162 /* ??? format and format_arg are heavy and not supported, which actually
163 prevents support for stdio builtins, which we however declare as part
164 of the common builtins.def contents. */
62d784f7
KT
165 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
166 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 167
62d784f7 168 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
169};
170
171/* Associates a GNAT tree node to a GCC tree node. It is used in
172 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
173 of `save_gnu_tree' for more info. */
174static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
175
176#define GET_GNU_TREE(GNAT_ENTITY) \
177 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
178
179#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
180 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
181
182#define PRESENT_GNU_TREE(GNAT_ENTITY) \
183 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
184
185/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
186static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
187
188#define GET_DUMMY_NODE(GNAT_ENTITY) \
189 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
190
191#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
192 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
193
194#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
195 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
196
197/* This variable keeps a table for types for each precision so that we only
198 allocate each of them once. Signed and unsigned types are kept separate.
199
200 Note that these types are only used when fold-const requests something
201 special. Perhaps we should NOT share these types; we'll see how it
202 goes later. */
203static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
204
205/* Likewise for float types, but record these by mode. */
206static GTY(()) tree float_types[NUM_MACHINE_MODES];
207
208/* For each binding contour we allocate a binding_level structure to indicate
209 the binding depth. */
210
d1b38208 211struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
212 /* The binding level containing this one (the enclosing binding level). */
213 struct gnat_binding_level *chain;
214 /* The BLOCK node for this level. */
215 tree block;
216 /* If nonzero, the setjmp buffer that needs to be updated for any
217 variable-sized definition within this context. */
218 tree jmpbuf_decl;
219};
220
221/* The binding level currently in effect. */
222static GTY(()) struct gnat_binding_level *current_binding_level;
223
224/* A chain of gnat_binding_level structures awaiting reuse. */
225static GTY((deletable)) struct gnat_binding_level *free_binding_level;
226
228ee426
EB
227/* The context to be used for global declarations. */
228static GTY(()) tree global_context;
229
a1ab4c31 230/* An array of global declarations. */
9771b263 231static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
232
233/* An array of builtin function declarations. */
9771b263 234static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31 235
ea292448
EB
236/* An array of global non-constant renamings. */
237static GTY(()) vec<tree, va_gc> *global_nonconstant_renamings;
a1ab4c31
AC
238
239/* A chain of unused BLOCK nodes. */
240static GTY((deletable)) tree free_block_chain;
241
842d4ee2
EB
242/* A hash table of padded types. It is modelled on the generic type
243 hash table in tree.c, which must thus be used as a reference. */
d242408f
TS
244
245struct GTY((for_user)) pad_type_hash {
842d4ee2
EB
246 unsigned long hash;
247 tree type;
248};
249
d242408f
TS
250struct pad_type_hasher : ggc_cache_hasher<pad_type_hash *>
251{
252 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
253 static bool equal (pad_type_hash *a, pad_type_hash *b);
254 static void handle_cache_entry (pad_type_hash *&);
255};
256
257static GTY ((cache))
258 hash_table<pad_type_hasher> *pad_type_hash_table;
842d4ee2 259
a1ab4c31
AC
260static tree merge_sizes (tree, tree, tree, bool, bool);
261static tree compute_related_constant (tree, tree);
262static tree split_plus (tree, tree *);
ef4bddc2 263static tree float_type_for_precision (int, machine_mode);
a1ab4c31 264static tree convert_to_fat_pointer (tree, tree);
5c475ba9 265static unsigned int scale_by_factor_of (tree, unsigned int);
a1ab4c31 266static bool potential_alignment_gap (tree, tree, tree);
9a30c7c4
AC
267
268/* A linked list used as a queue to defer the initialization of the
269 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
270 of ..._TYPE nodes. */
271struct deferred_decl_context_node
272{
273 tree decl; /* The ..._DECL node to work on. */
274 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
275 int force_global; /* force_global value when pushing DECL. */
276 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
277 context to. */
278 struct deferred_decl_context_node *next; /* The next queue item. */
279};
280
281static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
282
283/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
284 feed it with the elaboration of GNAT_SCOPE. */
285static struct deferred_decl_context_node *
286add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
287
288/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
289 feed it with the DECL_CONTEXT computed as part of N as soon as it is
290 computed. */
291static void add_deferred_type_context (struct deferred_decl_context_node *n,
292 tree type);
a1ab4c31 293\f
842d4ee2 294/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
295
296void
842d4ee2 297init_gnat_utils (void)
a1ab4c31 298{
842d4ee2 299 /* Initialize the association of GNAT nodes to GCC trees. */
766090c2 300 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
301
302 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
766090c2 303 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
304
305 /* Initialize the hash table of padded types. */
d242408f 306 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
a1ab4c31
AC
307}
308
842d4ee2 309/* Destroy data structures of the utils.c module. */
f04b8d69
EB
310
311void
842d4ee2 312destroy_gnat_utils (void)
f04b8d69 313{
842d4ee2 314 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
315 ggc_free (associate_gnat_to_gnu);
316 associate_gnat_to_gnu = NULL;
f04b8d69 317
842d4ee2
EB
318 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
319 ggc_free (dummy_node_table);
320 dummy_node_table = NULL;
321
322 /* Destroy the hash table of padded types. */
d242408f 323 pad_type_hash_table->empty ();
842d4ee2
EB
324 pad_type_hash_table = NULL;
325
ea292448
EB
326 /* Invalidate the global non-constant renamings. */
327 invalidate_global_nonconstant_renamings ();
842d4ee2
EB
328}
329\f
a1d8cc63
EB
330/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
331 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
332 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 333
a1d8cc63 334 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
335
336void
337save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
338{
339 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 340 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
341 means GNAT_ENTITY is defined twice, but occasionally is due to some
342 Gigi problem. */
343 gcc_assert (!(gnu_decl
344 && (PRESENT_GNU_TREE (gnat_entity)
345 || (!no_check && !DECL_P (gnu_decl)))));
346
347 SET_GNU_TREE (gnat_entity, gnu_decl);
348}
349
a1d8cc63
EB
350/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
351 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
352
353 In some cases, such as delayed elaboration or expressions that need to
354 be elaborated only once, GNAT_ENTITY is really not an entity. */
355
356tree
357get_gnu_tree (Entity_Id gnat_entity)
358{
359 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
360 return GET_GNU_TREE (gnat_entity);
361}
362
363/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
364
365bool
366present_gnu_tree (Entity_Id gnat_entity)
367{
368 return PRESENT_GNU_TREE (gnat_entity);
369}
370\f
a1ab4c31
AC
371/* Make a dummy type corresponding to GNAT_TYPE. */
372
373tree
374make_dummy_type (Entity_Id gnat_type)
375{
bf0b0e5e 376 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
a1ab4c31
AC
377 tree gnu_type;
378
a1ab4c31
AC
379 /* If there was no equivalent type (can only happen when just annotating
380 types) or underlying type, go back to the original type. */
bf0b0e5e
AC
381 if (No (gnat_equiv))
382 gnat_equiv = gnat_type;
a1ab4c31
AC
383
384 /* If it there already a dummy type, use that one. Else make one. */
bf0b0e5e
AC
385 if (PRESENT_DUMMY_NODE (gnat_equiv))
386 return GET_DUMMY_NODE (gnat_equiv);
a1ab4c31
AC
387
388 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
389 an ENUMERAL_TYPE. */
bf0b0e5e
AC
390 gnu_type = make_node (Is_Record_Type (gnat_equiv)
391 ? tree_code_for_record_type (gnat_equiv)
a1ab4c31
AC
392 : ENUMERAL_TYPE);
393 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
394 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
395 TYPE_STUB_DECL (gnu_type)
396 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
bf0b0e5e 397 if (Is_By_Reference_Type (gnat_equiv))
a0b8b1b7 398 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31 399
bf0b0e5e 400 SET_DUMMY_NODE (gnat_equiv, gnu_type);
a1ab4c31
AC
401
402 return gnu_type;
403}
e3edbd56
EB
404
405/* Return the dummy type that was made for GNAT_TYPE, if any. */
406
407tree
408get_dummy_type (Entity_Id gnat_type)
409{
410 return GET_DUMMY_NODE (gnat_type);
411}
412
413/* Build dummy fat and thin pointer types whose designated type is specified
414 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
415
416void
417build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
418{
419 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
420 tree gnu_fat_type, fields, gnu_object_type;
421
422 gnu_template_type = make_node (RECORD_TYPE);
423 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
424 TYPE_DUMMY_P (gnu_template_type) = 1;
425 gnu_ptr_template = build_pointer_type (gnu_template_type);
426
427 gnu_array_type = make_node (ENUMERAL_TYPE);
428 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
429 TYPE_DUMMY_P (gnu_array_type) = 1;
430 gnu_ptr_array = build_pointer_type (gnu_array_type);
431
432 gnu_fat_type = make_node (RECORD_TYPE);
433 /* Build a stub DECL to trigger the special processing for fat pointer types
434 in gnat_pushdecl. */
435 TYPE_NAME (gnu_fat_type)
436 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
437 gnu_fat_type);
438 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
439 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
440 DECL_CHAIN (fields)
441 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
442 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
443 finish_fat_pointer_type (gnu_fat_type, fields);
444 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
445 /* Suppress debug info until after the type is completed. */
446 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
447
448 gnu_object_type = make_node (RECORD_TYPE);
449 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
450 TYPE_DUMMY_P (gnu_object_type) = 1;
451
452 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
453 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
454}
a1ab4c31 455\f
c99c0026 456/* Return true if we are in the global binding level. */
a1ab4c31 457
c99c0026 458bool
a1ab4c31
AC
459global_bindings_p (void)
460{
c99c0026 461 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
462}
463
a09d56d8 464/* Enter a new binding level. */
a1ab4c31
AC
465
466void
c6bd4220 467gnat_pushlevel (void)
a1ab4c31
AC
468{
469 struct gnat_binding_level *newlevel = NULL;
470
471 /* Reuse a struct for this binding level, if there is one. */
472 if (free_binding_level)
473 {
474 newlevel = free_binding_level;
475 free_binding_level = free_binding_level->chain;
476 }
477 else
766090c2 478 newlevel = ggc_alloc<gnat_binding_level> ();
a1ab4c31
AC
479
480 /* Use a free BLOCK, if any; otherwise, allocate one. */
481 if (free_block_chain)
482 {
483 newlevel->block = free_block_chain;
484 free_block_chain = BLOCK_CHAIN (free_block_chain);
485 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
486 }
487 else
488 newlevel->block = make_node (BLOCK);
489
490 /* Point the BLOCK we just made to its parent. */
491 if (current_binding_level)
492 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
493
a09d56d8
EB
494 BLOCK_VARS (newlevel->block) = NULL_TREE;
495 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
496 TREE_USED (newlevel->block) = 1;
497
a09d56d8 498 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
499 newlevel->chain = current_binding_level;
500 newlevel->jmpbuf_decl = NULL_TREE;
501 current_binding_level = newlevel;
502}
503
504/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
505 and point FNDECL to this BLOCK. */
506
507void
508set_current_block_context (tree fndecl)
509{
510 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
511 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 512 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
513}
514
515/* Set the jmpbuf_decl for the current binding level to DECL. */
516
517void
518set_block_jmpbuf_decl (tree decl)
519{
520 current_binding_level->jmpbuf_decl = decl;
521}
522
523/* Get the jmpbuf_decl, if any, for the current binding level. */
524
525tree
c6bd4220 526get_block_jmpbuf_decl (void)
a1ab4c31
AC
527{
528 return current_binding_level->jmpbuf_decl;
529}
530
a09d56d8 531/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
532
533void
c6bd4220 534gnat_poplevel (void)
a1ab4c31
AC
535{
536 struct gnat_binding_level *level = current_binding_level;
537 tree block = level->block;
538
539 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 540 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
541
542 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
543 are no variables free the block and merge its subblocks into those of its
a09d56d8 544 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
545 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
546 ;
547 else if (BLOCK_VARS (block) == NULL_TREE)
548 {
549 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
550 = block_chainon (BLOCK_SUBBLOCKS (block),
551 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
552 BLOCK_CHAIN (block) = free_block_chain;
553 free_block_chain = block;
554 }
555 else
556 {
557 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
558 BLOCK_SUBBLOCKS (level->chain->block) = block;
559 TREE_USED (block) = 1;
560 set_block_for_group (block);
561 }
562
563 /* Free this binding structure. */
564 current_binding_level = level->chain;
565 level->chain = free_binding_level;
566 free_binding_level = level;
567}
568
2231f17f
EB
569/* Exit a binding level and discard the associated BLOCK. */
570
571void
572gnat_zaplevel (void)
573{
574 struct gnat_binding_level *level = current_binding_level;
575 tree block = level->block;
576
577 BLOCK_CHAIN (block) = free_block_chain;
578 free_block_chain = block;
579
580 /* Free this binding structure. */
581 current_binding_level = level->chain;
582 level->chain = free_binding_level;
583 free_binding_level = level;
584}
a1ab4c31 585\f
4708440c
EB
586/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
587
588static void
589gnat_set_type_context (tree type, tree context)
590{
591 tree decl = TYPE_STUB_DECL (type);
592
593 TYPE_CONTEXT (type) = context;
594
595 while (decl && DECL_PARALLEL_TYPE (decl))
596 {
24d4b3d5
AC
597 tree parallel_type = DECL_PARALLEL_TYPE (decl);
598
599 /* Give a context to the parallel types and their stub decl, if any.
600 Some parallel types seems to be present in multiple parallel type
601 chains, so don't mess with their context if they already have one. */
602 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
603 {
604 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
605 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
606 TYPE_CONTEXT (parallel_type) = context;
607 }
608
4708440c
EB
609 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
610 }
611}
612
9a30c7c4
AC
613/* Return the innermost scope, starting at GNAT_NODE, we are be interested in
614 the debug info, or Empty if there is no such scope. If not NULL, set
615 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
616
617static Entity_Id
618get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
619{
620 Entity_Id gnat_entity;
621
622 if (is_subprogram)
623 *is_subprogram = false;
624
625 if (Nkind (gnat_node) == N_Defining_Identifier)
626 gnat_entity = Scope (gnat_node);
627 else
628 return Empty;
629
630 while (Present (gnat_entity))
631 {
632 switch (Ekind (gnat_entity))
633 {
634 case E_Function:
635 case E_Procedure:
636 if (Present (Protected_Body_Subprogram (gnat_entity)))
637 gnat_entity = Protected_Body_Subprogram (gnat_entity);
638
639 /* If the scope is a subprogram, then just rely on
640 current_function_decl, so that we don't have to defer
641 anything. This is needed because other places rely on the
642 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
643 if (is_subprogram)
644 *is_subprogram = true;
645 return gnat_entity;
646
647 case E_Record_Type:
648 case E_Record_Subtype:
649 return gnat_entity;
650
651 default:
652 /* By default, we are not interested in this particular scope: go to
653 the outer one. */
654 break;
655 }
656 gnat_entity = Scope (gnat_entity);
657 }
658 return Empty;
659}
660
661/* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
662 N otherwise. */
663
664static void
665defer_or_set_type_context (tree type,
666 tree context,
667 struct deferred_decl_context_node *n)
668{
669 if (n)
670 add_deferred_type_context (n, type);
671 else
672 gnat_set_type_context (type, context);
673}
674
675/* Return global_context. Create it if needed, first. */
676
677static tree
678get_global_context (void)
679{
680 if (!global_context)
681 global_context = build_translation_unit_decl (NULL_TREE);
682 return global_context;
683}
684
228ee426
EB
685/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
686 for location information and flag propagation. */
a1ab4c31
AC
687
688void
689gnat_pushdecl (tree decl, Node_Id gnat_node)
690{
9a30c7c4
AC
691 tree context = NULL_TREE;
692 struct deferred_decl_context_node *deferred_decl_context = NULL;
693
694 /* If explicitely asked to make DECL global or if it's an imported nested
695 object, short-circuit the regular Scope-based context computation. */
696 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
a1ab4c31 697 {
9a30c7c4
AC
698 /* Rely on the GNAT scope, or fallback to the current_function_decl if
699 the GNAT scope reached the global scope, if it reached a subprogram
700 or the declaration is a subprogram or a variable (for them we skip
701 intermediate context types because the subprogram body elaboration
702 machinery and the inliner both expect a subprogram context).
703
704 Falling back to current_function_decl is necessary for implicit
705 subprograms created by gigi, such as the elaboration subprograms. */
706 bool context_is_subprogram = false;
707 const Entity_Id gnat_scope
708 = get_debug_scope (gnat_node, &context_is_subprogram);
709
710 if (Present (gnat_scope)
711 && !context_is_subprogram
712 && TREE_CODE (decl) != FUNCTION_DECL
713 && TREE_CODE (decl) != VAR_DECL)
714 /* Always assume the scope has not been elaborated, thus defer the
715 context propagation to the time its elaboration will be
716 available. */
717 deferred_decl_context
718 = add_deferred_decl_context (decl, gnat_scope, force_global);
719
720 /* External declarations (when force_global > 0) may not be in a
721 local context. */
722 else if (current_function_decl != NULL_TREE && force_global == 0)
723 context = current_function_decl;
a1ab4c31
AC
724 }
725
9a30c7c4
AC
726 /* If either we are forced to be in global mode or if both the GNAT scope and
727 the current_function_decl did not help determining the context, use the
728 global scope. */
729 if (!deferred_decl_context && context == NULL_TREE)
730 context = get_global_context ();
731
732 /* Functions imported in another function are not really nested.
733 For really nested functions mark them initially as needing
734 a static chain for uses of that flag before unnesting;
735 lower_nested_functions will then recompute it. */
736 if (TREE_CODE (decl) == FUNCTION_DECL
737 && !TREE_PUBLIC (decl)
738 && context != NULL_TREE
739 && (TREE_CODE (context) == FUNCTION_DECL
740 || decl_function_context (context) != NULL_TREE))
741 DECL_STATIC_CHAIN (decl) = 1;
742
743 if (!deferred_decl_context)
744 DECL_CONTEXT (decl) = context;
745
228ee426 746 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
747
748 /* Set the location of DECL and emit a declaration for it. */
e8fa3dcd 749 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
a1ab4c31 750 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 751
a1ab4c31
AC
752 add_decl_expr (decl, gnat_node);
753
754 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
755 order. The list will be reversed later. Put global declarations in the
756 globals list and local ones in the current block. But skip TYPE_DECLs
757 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
758 with the debugger and aren't needed anyway. */
759 if (!(TREE_CODE (decl) == TYPE_DECL
760 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 761 {
9083aacd 762 if (DECL_EXTERNAL (decl))
a1ab4c31 763 {
a1ab4c31 764 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 765 vec_safe_push (builtin_decls, decl);
a1ab4c31 766 }
9083aacd 767 else if (global_bindings_p ())
9771b263 768 vec_safe_push (global_decls, decl);
9083aacd 769 else
a1ab4c31 770 {
a963da4d
EB
771 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
772 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
773 }
774 }
775
aef308d0 776 /* For the declaration of a type, set its name either if it isn't already
10069d53 777 set or if the previous type name was not derived from a source name.
a1ab4c31 778 We'd rather have the type named with a real name and all the pointer
aef308d0
PMR
779 types to the same object have the same node, except when the names are
780 both derived from source names. */
a1ab4c31
AC
781 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
782 {
783 tree t = TREE_TYPE (decl);
784
aef308d0
PMR
785 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
786 && (TREE_CODE (t) != POINTER_TYPE || DECL_ARTIFICIAL (decl)))
1aeb40dd 787 {
aef308d0
PMR
788 /* Array types aren't "tagged" types so we force the type to be
789 associated with its typedef in the DWARF back-end, in order to
1e039275
EB
790 make sure that the latter is always preserved, by creating an
791 on-side copy for DECL_ORIGINAL_TYPE. We used to do the same
792 for pointer types, but to have consistent DWARF output we now
793 create a copy for the type itself and use the original type
794 for DECL_ORIGINAL_TYPE like the C front-end. */
aef308d0 795 if (!DECL_ARTIFICIAL (decl) && TREE_CODE (t) == ARRAY_TYPE)
1aeb40dd
EB
796 {
797 tree tt = build_distinct_type_copy (t);
1e039275
EB
798 /* Array types need to have a name so that they can be related
799 to their GNAT encodings. */
800 TYPE_NAME (tt) = DECL_NAME (decl);
9a30c7c4
AC
801 defer_or_set_type_context (tt,
802 DECL_CONTEXT (decl),
803 deferred_decl_context);
1aeb40dd
EB
804 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
805 DECL_ORIGINAL_TYPE (decl) = tt;
806 }
807 }
aef308d0
PMR
808 else if (!DECL_ARTIFICIAL (decl)
809 && (TREE_CODE (t) == POINTER_TYPE || TYPE_IS_FAT_POINTER_P (t)))
a1ab4c31 810 {
aef308d0 811 tree tt;
1e039275
EB
812 /* ??? Copy and original type are not supposed to be variant but we
813 really need a variant for the placeholder machinery to work. */
aef308d0
PMR
814 if (TYPE_IS_FAT_POINTER_P (t))
815 tt = build_variant_type_copy (t);
816 else
1e039275
EB
817 {
818 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
819 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
820 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
821 tt = build_qualified_type (tt, TYPE_QUALS (t));
822 }
a1ab4c31 823 TYPE_NAME (tt) = decl;
9a30c7c4
AC
824 defer_or_set_type_context (tt,
825 DECL_CONTEXT (decl),
826 deferred_decl_context);
a1ab4c31
AC
827 TREE_USED (tt) = TREE_USED (t);
828 TREE_TYPE (decl) = tt;
aef308d0
PMR
829 if (TYPE_NAME (t) != NULL_TREE
830 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
831 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
40c88b94
EB
832 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
833 else
834 DECL_ORIGINAL_TYPE (decl) = t;
e3edbd56 835 t = NULL_TREE;
a1ab4c31 836 }
aef308d0
PMR
837 else if (TYPE_NAME (t) != NULL_TREE
838 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
839 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
a1ab4c31
AC
840 ;
841 else
842 t = NULL_TREE;
843
e3edbd56 844 /* Propagate the name to all the anonymous variants. This is needed
aef308d0
PMR
845 for the type qualifiers machinery to work properly (see
846 check_qualified_type). Also propagate the context to them. Note that
847 the context will be propagated to all parallel types too thanks to
848 gnat_set_type_context. */
a1ab4c31
AC
849 if (t)
850 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
e3edbd56 851 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
852 {
853 TYPE_NAME (t) = decl;
9a30c7c4
AC
854 defer_or_set_type_context (t,
855 DECL_CONTEXT (decl),
856 deferred_decl_context);
d4d05b52 857 }
a1ab4c31
AC
858 }
859}
860\f
842d4ee2
EB
861/* Create a record type that contains a SIZE bytes long field of TYPE with a
862 starting bit position so that it is aligned to ALIGN bits, and leaving at
863 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
864 record is guaranteed to get. GNAT_NODE is used for the position of the
865 associated TYPE_DECL. */
842d4ee2
EB
866
867tree
868make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 869 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
870{
871 /* We will be crafting a record type with one field at a position set to be
872 the next multiple of ALIGN past record'address + room bytes. We use a
873 record placeholder to express record'address. */
874 tree record_type = make_node (RECORD_TYPE);
875 tree record = build0 (PLACEHOLDER_EXPR, record_type);
876
877 tree record_addr_st
878 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
879
880 /* The diagram below summarizes the shape of what we manipulate:
881
882 <--------- pos ---------->
883 { +------------+-------------+-----------------+
884 record =>{ |############| ... | field (type) |
885 { +------------+-------------+-----------------+
886 |<-- room -->|<- voffset ->|<---- size ----->|
887 o o
888 | |
889 record_addr vblock_addr
890
891 Every length is in sizetype bytes there, except "pos" which has to be
892 set as a bit position in the GCC tree for the record. */
893 tree room_st = size_int (room);
894 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
895 tree voffset_st, pos, field;
896
9dba4b55 897 tree name = TYPE_IDENTIFIER (type);
842d4ee2 898
842d4ee2
EB
899 name = concat_name (name, "ALIGN");
900 TYPE_NAME (record_type) = name;
901
902 /* Compute VOFFSET and then POS. The next byte position multiple of some
903 alignment after some address is obtained by "and"ing the alignment minus
904 1 with the two's complement of the address. */
905 voffset_st = size_binop (BIT_AND_EXPR,
906 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
907 size_int ((align / BITS_PER_UNIT) - 1));
908
909 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
910 pos = size_binop (MULT_EXPR,
911 convert (bitsizetype,
912 size_binop (PLUS_EXPR, room_st, voffset_st)),
913 bitsize_unit_node);
914
915 /* Craft the GCC record representation. We exceptionally do everything
916 manually here because 1) our generic circuitry is not quite ready to
917 handle the complex position/size expressions we are setting up, 2) we
918 have a strong simplifying factor at hand: we know the maximum possible
919 value of voffset, and 3) we have to set/reset at least the sizes in
920 accordance with this maximum value anyway, as we need them to convey
921 what should be "alloc"ated for this type.
922
923 Use -1 as the 'addressable' indication for the field to prevent the
924 creation of a bitfield. We don't need one, it would have damaging
925 consequences on the alignment computation, and create_field_decl would
926 make one without this special argument, for instance because of the
927 complex position expression. */
928 field = create_field_decl (get_identifier ("F"), type, record_type, size,
929 pos, 1, -1);
930 TYPE_FIELDS (record_type) = field;
931
932 TYPE_ALIGN (record_type) = base_align;
933 TYPE_USER_ALIGN (record_type) = 1;
934
935 TYPE_SIZE (record_type)
936 = size_binop (PLUS_EXPR,
937 size_binop (MULT_EXPR, convert (bitsizetype, size),
938 bitsize_unit_node),
939 bitsize_int (align + room * BITS_PER_UNIT));
940 TYPE_SIZE_UNIT (record_type)
941 = size_binop (PLUS_EXPR, size,
942 size_int (room + align / BITS_PER_UNIT));
943
944 SET_TYPE_MODE (record_type, BLKmode);
945 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
946
947 /* Declare it now since it will never be declared otherwise. This is
948 necessary to ensure that its subtrees are properly marked. */
74746d49 949 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
950
951 return record_type;
952}
953
954/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
955 as the field type of a packed record if IN_RECORD is true, or as the
956 component type of a packed array if IN_RECORD is false. See if we can
957 rewrite it either as a type that has a non-BLKmode, which we can pack
958 tighter in the packed record case, or as a smaller type. If so, return
959 the new type. If not, return the original type. */
960
961tree
962make_packable_type (tree type, bool in_record)
963{
ae7e9ddd 964 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
842d4ee2
EB
965 unsigned HOST_WIDE_INT new_size;
966 tree new_type, old_field, field_list = NULL_TREE;
967 unsigned int align;
968
969 /* No point in doing anything if the size is zero. */
970 if (size == 0)
971 return type;
972
973 new_type = make_node (TREE_CODE (type));
974
975 /* Copy the name and flags from the old type to that of the new.
976 Note that we rely on the pointer equality created here for
977 TYPE_NAME to look through conversions in various places. */
978 TYPE_NAME (new_type) = TYPE_NAME (type);
979 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
980 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
981 if (TREE_CODE (type) == RECORD_TYPE)
982 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
983
984 /* If we are in a record and have a small size, set the alignment to
985 try for an integral mode. Otherwise set it to try for a smaller
986 type with BLKmode. */
987 if (in_record && size <= MAX_FIXED_MODE_SIZE)
988 {
989 align = ceil_pow2 (size);
990 TYPE_ALIGN (new_type) = align;
991 new_size = (size + align - 1) & -align;
992 }
993 else
994 {
995 unsigned HOST_WIDE_INT align;
996
997 /* Do not try to shrink the size if the RM size is not constant. */
998 if (TYPE_CONTAINS_TEMPLATE_P (type)
cc269bb6 999 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
842d4ee2
EB
1000 return type;
1001
1002 /* Round the RM size up to a unit boundary to get the minimal size
1003 for a BLKmode record. Give up if it's already the size. */
eb1ce453 1004 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
842d4ee2
EB
1005 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1006 if (new_size == size)
1007 return type;
1008
1009 align = new_size & -new_size;
1010 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
1011 }
1012
1013 TYPE_USER_ALIGN (new_type) = 1;
1014
1015 /* Now copy the fields, keeping the position and size as we don't want
1016 to change the layout by propagating the packedness downwards. */
1017 for (old_field = TYPE_FIELDS (type); old_field;
1018 old_field = DECL_CHAIN (old_field))
1019 {
1020 tree new_field_type = TREE_TYPE (old_field);
1021 tree new_field, new_size;
1022
1023 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1024 && !TYPE_FAT_POINTER_P (new_field_type)
cc269bb6 1025 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
842d4ee2
EB
1026 new_field_type = make_packable_type (new_field_type, true);
1027
1028 /* However, for the last field in a not already packed record type
1029 that is of an aggregate type, we need to use the RM size in the
1030 packable version of the record type, see finish_record_type. */
1031 if (!DECL_CHAIN (old_field)
1032 && !TYPE_PACKED (type)
1033 && RECORD_OR_UNION_TYPE_P (new_field_type)
1034 && !TYPE_FAT_POINTER_P (new_field_type)
1035 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1036 && TYPE_ADA_SIZE (new_field_type))
1037 new_size = TYPE_ADA_SIZE (new_field_type);
1038 else
1039 new_size = DECL_SIZE (old_field);
1040
1041 new_field
1042 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1043 new_size, bit_position (old_field),
1044 TYPE_PACKED (type),
1045 !DECL_NONADDRESSABLE_P (old_field));
1046
1047 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1048 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1049 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1050 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1051
1052 DECL_CHAIN (new_field) = field_list;
1053 field_list = new_field;
1054 }
1055
1056 finish_record_type (new_type, nreverse (field_list), 2, false);
1057 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
44e9e3ec
EB
1058 if (TYPE_STUB_DECL (type))
1059 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1060 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
842d4ee2
EB
1061
1062 /* If this is a padding record, we never want to make the size smaller
1063 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1064 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1065 {
1066 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1067 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1068 new_size = size;
1069 }
1070 else
1071 {
1072 TYPE_SIZE (new_type) = bitsize_int (new_size);
1073 TYPE_SIZE_UNIT (new_type)
1074 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1075 }
1076
1077 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1078 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1079
1080 compute_record_mode (new_type);
1081
1082 /* Try harder to get a packable type if necessary, for example
1083 in case the record itself contains a BLKmode field. */
1084 if (in_record && TYPE_MODE (new_type) == BLKmode)
1085 SET_TYPE_MODE (new_type,
1086 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1087
1088 /* If neither the mode nor the size has shrunk, return the old type. */
1089 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1090 return type;
1091
1092 return new_type;
1093}
1094
1095/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1096 If TYPE is the best type, return it. Otherwise, make a new type. We
1097 only support new integral and pointer types. FOR_BIASED is true if
1098 we are making a biased type. */
1099
1100tree
1101make_type_from_size (tree type, tree size_tree, bool for_biased)
1102{
1103 unsigned HOST_WIDE_INT size;
1104 bool biased_p;
1105 tree new_type;
1106
1107 /* If size indicates an error, just return TYPE to avoid propagating
1108 the error. Likewise if it's too large to represent. */
cc269bb6 1109 if (!size_tree || !tree_fits_uhwi_p (size_tree))
842d4ee2
EB
1110 return type;
1111
ae7e9ddd 1112 size = tree_to_uhwi (size_tree);
842d4ee2
EB
1113
1114 switch (TREE_CODE (type))
1115 {
1116 case INTEGER_TYPE:
1117 case ENUMERAL_TYPE:
1118 case BOOLEAN_TYPE:
1119 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1120 && TYPE_BIASED_REPRESENTATION_P (type));
1121
1122 /* Integer types with precision 0 are forbidden. */
1123 if (size == 0)
1124 size = 1;
1125
1126 /* Only do something if the type isn't a packed array type and doesn't
1127 already have the proper size and the size isn't too large. */
1128 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1129 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1130 || size > LONG_LONG_TYPE_SIZE)
1131 break;
1132
1133 biased_p |= for_biased;
1134 if (TYPE_UNSIGNED (type) || biased_p)
1135 new_type = make_unsigned_type (size);
1136 else
1137 new_type = make_signed_type (size);
1138 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1eb58520
AC
1139 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1140 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
842d4ee2
EB
1141 /* Copy the name to show that it's essentially the same type and
1142 not a subrange type. */
1143 TYPE_NAME (new_type) = TYPE_NAME (type);
1144 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1145 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1146 return new_type;
1147
1148 case RECORD_TYPE:
1149 /* Do something if this is a fat pointer, in which case we
1150 may need to return the thin pointer. */
1151 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1152 {
ef4bddc2 1153 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
842d4ee2
EB
1154 if (!targetm.valid_pointer_mode (p_mode))
1155 p_mode = ptr_mode;
1156 return
1157 build_pointer_type_for_mode
1158 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1159 p_mode, 0);
1160 }
1161 break;
1162
1163 case POINTER_TYPE:
1164 /* Only do something if this is a thin pointer, in which case we
1165 may need to return the fat pointer. */
1166 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1167 return
1168 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1169 break;
1170
1171 default:
1172 break;
1173 }
1174
1175 return type;
1176}
1177
1178/* See if the data pointed to by the hash table slot is marked. */
1179
d242408f
TS
1180void
1181pad_type_hasher::handle_cache_entry (pad_type_hash *&t)
842d4ee2 1182{
d242408f
TS
1183 extern void gt_ggc_mx (pad_type_hash *&);
1184 if (t == HTAB_EMPTY_ENTRY || t == HTAB_DELETED_ENTRY)
1185 return;
1186 else if (ggc_marked_p (t->type))
1187 gt_ggc_mx (t);
1188 else
1189 t = static_cast<pad_type_hash *> (HTAB_DELETED_ENTRY);
842d4ee2
EB
1190}
1191
d242408f 1192/* Return true iff the padded types are equivalent. */
842d4ee2 1193
d242408f
TS
1194bool
1195pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
842d4ee2 1196{
842d4ee2
EB
1197 tree type1, type2;
1198
1199 if (t1->hash != t2->hash)
1200 return 0;
1201
1202 type1 = t1->type;
1203 type2 = t2->type;
1204
1205 /* We consider that the padded types are equivalent if they pad the same
1206 type and have the same size, alignment and RM size. Taking the mode
1207 into account is redundant since it is determined by the others. */
1208 return
1209 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1210 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1211 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1212 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1213}
1214
5cb7516d
EB
1215/* Look up the padded TYPE in the hash table and return its canonical version
1216 if it exists; otherwise, insert it into the hash table. */
1217
1218static tree
1219lookup_and_insert_pad_type (tree type)
1220{
1221 hashval_t hashcode;
1222 struct pad_type_hash in, *h;
5cb7516d
EB
1223
1224 hashcode
1225 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1226 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1227 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1228 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1229
1230 in.hash = hashcode;
1231 in.type = type;
d242408f 1232 h = pad_type_hash_table->find_with_hash (&in, hashcode);
5cb7516d
EB
1233 if (h)
1234 return h->type;
1235
1236 h = ggc_alloc<pad_type_hash> ();
1237 h->hash = hashcode;
1238 h->type = type;
d242408f 1239 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
5cb7516d
EB
1240 return NULL_TREE;
1241}
1242
842d4ee2 1243/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
5cb7516d 1244 if needed. We have already verified that SIZE and ALIGN are large enough.
842d4ee2
EB
1245 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1246 IS_COMPONENT_TYPE is true if this is being done for the component type of
1247 an array. IS_USER_TYPE is true if the original type needs to be completed.
1248 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1249 the RM size of the resulting type is to be set to SIZE too. */
1250
1251tree
1252maybe_pad_type (tree type, tree size, unsigned int align,
1253 Entity_Id gnat_entity, bool is_component_type,
1254 bool is_user_type, bool definition, bool set_rm_size)
1255{
1256 tree orig_size = TYPE_SIZE (type);
44e9e3ec 1257 unsigned int orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1258 tree record, field;
1259
1260 /* If TYPE is a padded type, see if it agrees with any size and alignment
1261 we were given. If so, return the original type. Otherwise, strip
1262 off the padding, since we will either be returning the inner type
1263 or repadding it. If no size or alignment is specified, use that of
1264 the original padded type. */
1265 if (TYPE_IS_PADDING_P (type))
1266 {
1267 if ((!size
44e9e3ec
EB
1268 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1269 && (align == 0 || align == orig_align))
842d4ee2
EB
1270 return type;
1271
1272 if (!size)
44e9e3ec 1273 size = orig_size;
842d4ee2 1274 if (align == 0)
44e9e3ec 1275 align = orig_align;
842d4ee2
EB
1276
1277 type = TREE_TYPE (TYPE_FIELDS (type));
1278 orig_size = TYPE_SIZE (type);
44e9e3ec 1279 orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1280 }
1281
1282 /* If the size is either not being changed or is being made smaller (which
1283 is not done here and is only valid for bitfields anyway), show the size
1284 isn't changing. Likewise, clear the alignment if it isn't being
1285 changed. Then return if we aren't doing anything. */
1286 if (size
1287 && (operand_equal_p (size, orig_size, 0)
1288 || (TREE_CODE (orig_size) == INTEGER_CST
1289 && tree_int_cst_lt (size, orig_size))))
1290 size = NULL_TREE;
1291
44e9e3ec 1292 if (align == orig_align)
842d4ee2
EB
1293 align = 0;
1294
1295 if (align == 0 && !size)
1296 return type;
1297
1298 /* If requested, complete the original type and give it a name. */
1299 if (is_user_type)
1300 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1301 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1302 !(TYPE_NAME (type)
1303 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1304 && DECL_IGNORED_P (TYPE_NAME (type))),
1305 gnat_entity);
1306
1307 /* We used to modify the record in place in some cases, but that could
1308 generate incorrect debugging information. So make a new record
1309 type and name. */
1310 record = make_node (RECORD_TYPE);
1311 TYPE_PADDING_P (record) = 1;
1312
1313 if (Present (gnat_entity))
1314 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1315
44e9e3ec 1316 TYPE_ALIGN (record) = align ? align : orig_align;
842d4ee2
EB
1317 TYPE_SIZE (record) = size ? size : orig_size;
1318 TYPE_SIZE_UNIT (record)
1319 = convert (sizetype,
1320 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1321 bitsize_unit_node));
1322
1323 /* If we are changing the alignment and the input type is a record with
1324 BLKmode and a small constant size, try to make a form that has an
1325 integral mode. This might allow the padding record to also have an
1326 integral mode, which will be much more efficient. There is no point
1327 in doing so if a size is specified unless it is also a small constant
1328 size and it is incorrect to do so if we cannot guarantee that the mode
1329 will be naturally aligned since the field must always be addressable.
1330
1331 ??? This might not always be a win when done for a stand-alone object:
1332 since the nominal and the effective type of the object will now have
1333 different modes, a VIEW_CONVERT_EXPR will be required for converting
1334 between them and it might be hard to overcome afterwards, including
1335 at the RTL level when the stand-alone object is accessed as a whole. */
1336 if (align != 0
1337 && RECORD_OR_UNION_TYPE_P (type)
1338 && TYPE_MODE (type) == BLKmode
1339 && !TYPE_BY_REFERENCE_P (type)
1340 && TREE_CODE (orig_size) == INTEGER_CST
1341 && !TREE_OVERFLOW (orig_size)
1342 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1343 && (!size
1344 || (TREE_CODE (size) == INTEGER_CST
1345 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1346 {
1347 tree packable_type = make_packable_type (type, true);
1348 if (TYPE_MODE (packable_type) != BLKmode
1349 && align >= TYPE_ALIGN (packable_type))
1350 type = packable_type;
1351 }
1352
1353 /* Now create the field with the original size. */
1354 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1355 bitsize_zero_node, 0, 1);
1356 DECL_INTERNAL_P (field) = 1;
1357
1358 /* Do not emit debug info until after the auxiliary record is built. */
1359 finish_record_type (record, field, 1, false);
1360
1361 /* Set the RM size if requested. */
1362 if (set_rm_size)
1363 {
5cb7516d
EB
1364 tree canonical_pad_type;
1365
842d4ee2
EB
1366 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1367
1368 /* If the padded type is complete and has constant size, we canonicalize
1369 it by means of the hash table. This is consistent with the language
1370 semantics and ensures that gigi and the middle-end have a common view
1371 of these padded types. */
5cb7516d
EB
1372 if (TREE_CONSTANT (TYPE_SIZE (record))
1373 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
842d4ee2 1374 {
5cb7516d
EB
1375 record = canonical_pad_type;
1376 goto built;
842d4ee2
EB
1377 }
1378 }
1379
1380 /* Unless debugging information isn't being written for the input type,
1381 write a record that shows what we are a subtype of and also make a
1382 variable that indicates our size, if still variable. */
1383 if (TREE_CODE (orig_size) != INTEGER_CST
1384 && TYPE_NAME (record)
1385 && TYPE_NAME (type)
1386 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1387 && DECL_IGNORED_P (TYPE_NAME (type))))
1388 {
1389 tree marker = make_node (RECORD_TYPE);
9dba4b55
PC
1390 tree name = TYPE_IDENTIFIER (record);
1391 tree orig_name = TYPE_IDENTIFIER (type);
842d4ee2
EB
1392
1393 TYPE_NAME (marker) = concat_name (name, "XVS");
1394 finish_record_type (marker,
1395 create_field_decl (orig_name,
1396 build_reference_type (type),
1397 marker, NULL_TREE, NULL_TREE,
1398 0, 0),
1399 0, true);
1400
1401 add_parallel_type (record, marker);
1402
1403 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1404 TYPE_SIZE_UNIT (marker)
1405 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1406 TYPE_SIZE_UNIT (record), false, false, false,
1407 false, NULL, gnat_entity);
1408 }
1409
1410 rest_of_record_type_compilation (record);
1411
1412built:
1413 /* If the size was widened explicitly, maybe give a warning. Take the
1414 original size as the maximum size of the input if there was an
1415 unconstrained record involved and round it up to the specified alignment,
1416 if one was specified. But don't do it if we are just annotating types
1417 and the type is tagged, since tagged types aren't fully laid out in this
1418 mode. */
1419 if (!size
1420 || TREE_CODE (size) == COND_EXPR
1421 || TREE_CODE (size) == MAX_EXPR
1422 || No (gnat_entity)
1423 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1424 return record;
1425
1426 if (CONTAINS_PLACEHOLDER_P (orig_size))
1427 orig_size = max_size (orig_size, true);
1428
1429 if (align)
1430 orig_size = round_up (orig_size, align);
1431
1432 if (!operand_equal_p (size, orig_size, 0)
1433 && !(TREE_CODE (size) == INTEGER_CST
1434 && TREE_CODE (orig_size) == INTEGER_CST
1435 && (TREE_OVERFLOW (size)
1436 || TREE_OVERFLOW (orig_size)
1437 || tree_int_cst_lt (size, orig_size))))
1438 {
1439 Node_Id gnat_error_node = Empty;
1440
1a4cb227
AC
1441 /* For a packed array, post the message on the original array type. */
1442 if (Is_Packed_Array_Impl_Type (gnat_entity))
842d4ee2
EB
1443 gnat_entity = Original_Array_Type (gnat_entity);
1444
1445 if ((Ekind (gnat_entity) == E_Component
1446 || Ekind (gnat_entity) == E_Discriminant)
1447 && Present (Component_Clause (gnat_entity)))
1448 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1449 else if (Present (Size_Clause (gnat_entity)))
1450 gnat_error_node = Expression (Size_Clause (gnat_entity));
1451
1452 /* Generate message only for entities that come from source, since
1453 if we have an entity created by expansion, the message will be
1454 generated for some other corresponding source entity. */
1455 if (Comes_From_Source (gnat_entity))
1456 {
1457 if (Present (gnat_error_node))
1458 post_error_ne_tree ("{^ }bits of & unused?",
1459 gnat_error_node, gnat_entity,
1460 size_diffop (size, orig_size));
1461 else if (is_component_type)
1462 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1463 gnat_entity, gnat_entity,
1464 size_diffop (size, orig_size));
1465 }
1466 }
1467
1468 return record;
1469}
1470\f
1471/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1472 If this is a multi-dimensional array type, do this recursively.
1473
1474 OP may be
1475 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1476 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1477 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1478
1479void
1480relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1481{
1482 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1483 of a one-dimensional array, since the padding has the same alias set
1484 as the field type, but if it's a multi-dimensional array, we need to
1485 see the inner types. */
1486 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1487 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1488 || TYPE_PADDING_P (gnu_old_type)))
1489 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1490
1491 /* Unconstrained array types are deemed incomplete and would thus be given
1492 alias set 0. Retrieve the underlying array type. */
1493 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1494 gnu_old_type
1495 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1496 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1497 gnu_new_type
1498 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1499
1500 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1501 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1502 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1503 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1504
1505 switch (op)
1506 {
1507 case ALIAS_SET_COPY:
1508 /* The alias set shouldn't be copied between array types with different
1509 aliasing settings because this can break the aliasing relationship
1510 between the array type and its element type. */
1511#ifndef ENABLE_CHECKING
1512 if (flag_strict_aliasing)
1513#endif
1514 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1515 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1516 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1517 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1518
1519 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1520 break;
1521
1522 case ALIAS_SET_SUBSET:
1523 case ALIAS_SET_SUPERSET:
1524 {
1525 alias_set_type old_set = get_alias_set (gnu_old_type);
1526 alias_set_type new_set = get_alias_set (gnu_new_type);
1527
1528 /* Do nothing if the alias sets conflict. This ensures that we
1529 never call record_alias_subset several times for the same pair
1530 or at all for alias set 0. */
1531 if (!alias_sets_conflict_p (old_set, new_set))
1532 {
1533 if (op == ALIAS_SET_SUBSET)
1534 record_alias_subset (old_set, new_set);
1535 else
1536 record_alias_subset (new_set, old_set);
1537 }
1538 }
1539 break;
1540
1541 default:
1542 gcc_unreachable ();
1543 }
1544
1545 record_component_aliases (gnu_new_type);
1546}
1547\f
1aeb40dd
EB
1548/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1549 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
a1ab4c31
AC
1550
1551void
1aeb40dd 1552record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1553{
c172df28
AH
1554 tree type_decl = build_decl (input_location,
1555 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1556 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1557 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1558 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1559
10069d53
EB
1560 if (debug_hooks->type_decl)
1561 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1562}
1563\f
e3edbd56
EB
1564/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1565 finish constructing the record type as a fat pointer type. */
1566
1567void
1568finish_fat_pointer_type (tree record_type, tree field_list)
1569{
1570 /* Make sure we can put it into a register. */
5da8c011
EB
1571 if (STRICT_ALIGNMENT)
1572 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1573
1574 /* Show what it really is. */
1575 TYPE_FAT_POINTER_P (record_type) = 1;
1576
1577 /* Do not emit debug info for it since the types of its fields may still be
1578 incomplete at this point. */
1579 finish_record_type (record_type, field_list, 0, false);
1580
1581 /* Force type_contains_placeholder_p to return true on it. Although the
1582 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1583 type but the representation of the unconstrained array. */
1584 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1585}
1586
032d1b71 1587/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1588 finish constructing the record or union type. If REP_LEVEL is zero, this
1589 record has no representation clause and so will be entirely laid out here.
1590 If REP_LEVEL is one, this record has a representation clause and has been
1591 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1592 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1593 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1594 we need to write debug information about this type. */
a1ab4c31
AC
1595
1596void
032d1b71
EB
1597finish_record_type (tree record_type, tree field_list, int rep_level,
1598 bool debug_info_p)
a1ab4c31
AC
1599{
1600 enum tree_code code = TREE_CODE (record_type);
9dba4b55 1601 tree name = TYPE_IDENTIFIER (record_type);
a1ab4c31
AC
1602 tree ada_size = bitsize_zero_node;
1603 tree size = bitsize_zero_node;
1604 bool had_size = TYPE_SIZE (record_type) != 0;
1605 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1606 bool had_align = TYPE_ALIGN (record_type) != 0;
1607 tree field;
1608
032d1b71 1609 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1610
10069d53
EB
1611 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1612 generate debug info and have a parallel type. */
10069d53 1613 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1614
1615 /* Globally initialize the record first. If this is a rep'ed record,
1616 that just means some initializations; otherwise, layout the record. */
1617 if (rep_level > 0)
1618 {
1619 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1620
1621 if (!had_size_unit)
1622 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1623
a1ab4c31
AC
1624 if (!had_size)
1625 TYPE_SIZE (record_type) = bitsize_zero_node;
1626
1627 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1628 out just like a UNION_TYPE, since the size will be fixed. */
1629 else if (code == QUAL_UNION_TYPE)
1630 code = UNION_TYPE;
1631 }
1632 else
1633 {
1634 /* Ensure there isn't a size already set. There can be in an error
1635 case where there is a rep clause but all fields have errors and
1636 no longer have a position. */
1637 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1638
1639 /* Ensure we use the traditional GCC layout for bitfields when we need
1640 to pack the record type or have a representation clause. The other
1641 possible layout (Microsoft C compiler), if available, would prevent
1642 efficient packing in almost all cases. */
1643#ifdef TARGET_MS_BITFIELD_LAYOUT
1644 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1645 decl_attributes (&record_type,
1646 tree_cons (get_identifier ("gcc_struct"),
1647 NULL_TREE, NULL_TREE),
1648 ATTR_FLAG_TYPE_IN_PLACE);
1649#endif
1650
a1ab4c31
AC
1651 layout_type (record_type);
1652 }
1653
1654 /* At this point, the position and size of each field is known. It was
1655 either set before entry by a rep clause, or by laying out the type above.
1656
1657 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1658 to compute the Ada size; the GCC size and alignment (for rep'ed records
1659 that are not padding types); and the mode (for rep'ed records). We also
1660 clear the DECL_BIT_FIELD indication for the cases we know have not been
1661 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1662
1663 if (code == QUAL_UNION_TYPE)
032d1b71 1664 field_list = nreverse (field_list);
a1ab4c31 1665
910ad8de 1666 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1667 {
1668 tree type = TREE_TYPE (field);
1669 tree pos = bit_position (field);
1670 tree this_size = DECL_SIZE (field);
1671 tree this_ada_size;
1672
e1e5852c 1673 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1674 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1675 && !TYPE_CONTAINS_TEMPLATE_P (type)
1676 && TYPE_ADA_SIZE (type))
1677 this_ada_size = TYPE_ADA_SIZE (type);
1678 else
1679 this_ada_size = this_size;
1680
1681 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1682 if (DECL_BIT_FIELD (field)
1683 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1684 {
1685 unsigned int align = TYPE_ALIGN (type);
1686
1687 /* In the general case, type alignment is required. */
1688 if (value_factor_p (pos, align))
1689 {
1690 /* The enclosing record type must be sufficiently aligned.
1691 Otherwise, if no alignment was specified for it and it
1692 has been laid out already, bump its alignment to the
1693 desired one if this is compatible with its size. */
1694 if (TYPE_ALIGN (record_type) >= align)
1695 {
1696 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1697 DECL_BIT_FIELD (field) = 0;
1698 }
1699 else if (!had_align
1700 && rep_level == 0
1701 && value_factor_p (TYPE_SIZE (record_type), align))
1702 {
1703 TYPE_ALIGN (record_type) = align;
1704 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1705 DECL_BIT_FIELD (field) = 0;
1706 }
1707 }
1708
1709 /* In the non-strict alignment case, only byte alignment is. */
1710 if (!STRICT_ALIGNMENT
1711 && DECL_BIT_FIELD (field)
1712 && value_factor_p (pos, BITS_PER_UNIT))
1713 DECL_BIT_FIELD (field) = 0;
1714 }
1715
c1abd261
EB
1716 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1717 field is technically not addressable. Except that it can actually
1718 be addressed if it is BLKmode and happens to be properly aligned. */
1719 if (DECL_BIT_FIELD (field)
1720 && !(DECL_MODE (field) == BLKmode
1721 && value_factor_p (pos, BITS_PER_UNIT)))
1722 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1723
1724 /* A type must be as aligned as its most aligned field that is not
1725 a bit-field. But this is already enforced by layout_type. */
1726 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1727 TYPE_ALIGN (record_type)
1728 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1729
1730 switch (code)
1731 {
1732 case UNION_TYPE:
1733 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1734 size = size_binop (MAX_EXPR, size, this_size);
1735 break;
1736
1737 case QUAL_UNION_TYPE:
1738 ada_size
1739 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1740 this_ada_size, ada_size);
1741 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1742 this_size, size);
1743 break;
1744
1745 case RECORD_TYPE:
1746 /* Since we know here that all fields are sorted in order of
1747 increasing bit position, the size of the record is one
1748 higher than the ending bit of the last field processed
1749 unless we have a rep clause, since in that case we might
1750 have a field outside a QUAL_UNION_TYPE that has a higher ending
1751 position. So use a MAX in that case. Also, if this field is a
1752 QUAL_UNION_TYPE, we need to take into account the previous size in
1753 the case of empty variants. */
1754 ada_size
1755 = merge_sizes (ada_size, pos, this_ada_size,
1756 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1757 size
1758 = merge_sizes (size, pos, this_size,
1759 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1760 break;
1761
1762 default:
1763 gcc_unreachable ();
1764 }
1765 }
1766
1767 if (code == QUAL_UNION_TYPE)
032d1b71 1768 nreverse (field_list);
a1ab4c31
AC
1769
1770 if (rep_level < 2)
1771 {
1772 /* If this is a padding record, we never want to make the size smaller
1773 than what was specified in it, if any. */
315cff15 1774 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1775 size = TYPE_SIZE (record_type);
1776
1777 /* Now set any of the values we've just computed that apply. */
315cff15 1778 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1779 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1780 SET_TYPE_ADA_SIZE (record_type, ada_size);
1781
1782 if (rep_level > 0)
1783 {
1784 tree size_unit = had_size_unit
1785 ? TYPE_SIZE_UNIT (record_type)
1786 : convert (sizetype,
1787 size_binop (CEIL_DIV_EXPR, size,
1788 bitsize_unit_node));
1789 unsigned int align = TYPE_ALIGN (record_type);
1790
1791 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1792 TYPE_SIZE_UNIT (record_type)
1793 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1794
1795 compute_record_mode (record_type);
1796 }
1797 }
1798
032d1b71 1799 if (debug_info_p)
a1ab4c31
AC
1800 rest_of_record_type_compilation (record_type);
1801}
1802
24d4b3d5
AC
1803/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1804 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1805 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1806 moment TYPE will get a context. */
a5695aa2
EB
1807
1808void
1809add_parallel_type (tree type, tree parallel_type)
1810{
1811 tree decl = TYPE_STUB_DECL (type);
1812
1813 while (DECL_PARALLEL_TYPE (decl))
1814 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1815
1816 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
24d4b3d5
AC
1817
1818 /* If PARALLEL_TYPE already has a context, we are done. */
1819 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1820 return;
1821
1822 /* Otherwise, try to get one from TYPE's context. */
1823 if (TYPE_CONTEXT (type) != NULL_TREE)
1824 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1825 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1826
1827 /* ... otherwise TYPE has not context yet. We know it will thanks to
1828 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1829 So we have nothing to do in this case. */
a5695aa2
EB
1830}
1831
1832/* Return true if TYPE has a parallel type. */
1833
1834static bool
1835has_parallel_type (tree type)
1836{
1837 tree decl = TYPE_STUB_DECL (type);
1838
1839 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1840}
1841
032d1b71
EB
1842/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1843 associated with it. It need not be invoked directly in most cases since
1844 finish_record_type takes care of doing so, but this can be necessary if
1845 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1846
1847void
1848rest_of_record_type_compilation (tree record_type)
1849{
a1ab4c31 1850 bool var_size = false;
fb88e1dd 1851 tree field;
a1ab4c31 1852
fb88e1dd
EB
1853 /* If this is a padded type, the bulk of the debug info has already been
1854 generated for the field's type. */
1855 if (TYPE_IS_PADDING_P (record_type))
1856 return;
1857
a5695aa2
EB
1858 /* If the type already has a parallel type (XVS type), then we're done. */
1859 if (has_parallel_type (record_type))
1860 return;
1861
fb88e1dd 1862 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1863 {
1864 /* We need to make an XVE/XVU record if any field has variable size,
1865 whether or not the record does. For example, if we have a union,
1866 it may be that all fields, rounded up to the alignment, have the
1867 same size, in which case we'll use that size. But the debug
1868 output routines (except Dwarf2) won't be able to output the fields,
1869 so we need to make the special record. */
1870 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1871 /* If a field has a non-constant qualifier, the record will have
1872 variable size too. */
fb88e1dd 1873 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1874 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1875 {
1876 var_size = true;
1877 break;
1878 }
1879 }
1880
fb88e1dd
EB
1881 /* If this record type is of variable size, make a parallel record type that
1882 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1883 if (var_size)
a1ab4c31
AC
1884 {
1885 tree new_record_type
1886 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1887 ? UNION_TYPE : TREE_CODE (record_type));
9dba4b55 1888 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
a1ab4c31 1889 tree last_pos = bitsize_zero_node;
0fb2335d 1890 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1891
0fb2335d
EB
1892 new_name
1893 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1894 ? "XVU" : "XVE");
1895 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1896 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1897 TYPE_STUB_DECL (new_record_type)
0fb2335d 1898 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1899 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1900 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1901 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1902 TYPE_SIZE_UNIT (new_record_type)
1903 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1904
5c475ba9
EB
1905 /* Now scan all the fields, replacing each field with a new field
1906 corresponding to the new encoding. */
a1ab4c31 1907 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1908 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1909 {
1910 tree field_type = TREE_TYPE (old_field);
1911 tree field_name = DECL_NAME (old_field);
a1ab4c31 1912 tree curpos = bit_position (old_field);
5c475ba9 1913 tree pos, new_field;
a1ab4c31
AC
1914 bool var = false;
1915 unsigned int align = 0;
a1ab4c31 1916
5c475ba9
EB
1917 /* We're going to do some pattern matching below so remove as many
1918 conversions as possible. */
1919 curpos = remove_conversions (curpos, true);
a1ab4c31 1920
5c475ba9 1921 /* See how the position was modified from the last position.
a1ab4c31 1922
5c475ba9
EB
1923 There are two basic cases we support: a value was added
1924 to the last position or the last position was rounded to
1925 a boundary and they something was added. Check for the
1926 first case first. If not, see if there is any evidence
1927 of rounding. If so, round the last position and retry.
a1ab4c31 1928
5c475ba9 1929 If this is a union, the position can be taken as zero. */
a1ab4c31 1930 if (TREE_CODE (new_record_type) == UNION_TYPE)
5c475ba9 1931 pos = bitsize_zero_node;
a1ab4c31
AC
1932 else
1933 pos = compute_related_constant (curpos, last_pos);
1934
5c475ba9
EB
1935 if (!pos
1936 && TREE_CODE (curpos) == MULT_EXPR
cc269bb6 1937 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
a1ab4c31
AC
1938 {
1939 tree offset = TREE_OPERAND (curpos, 0);
ae7e9ddd 1940 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
5c475ba9
EB
1941 align = scale_by_factor_of (offset, align);
1942 last_pos = round_up (last_pos, align);
1943 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1944 }
5c475ba9
EB
1945 else if (!pos
1946 && TREE_CODE (curpos) == PLUS_EXPR
cc269bb6 1947 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
a1ab4c31 1948 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
5a36c51b
RS
1949 && tree_fits_uhwi_p
1950 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
a1ab4c31 1951 {
5c475ba9
EB
1952 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1953 unsigned HOST_WIDE_INT addend
ae7e9ddd 1954 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
a1ab4c31 1955 align
ae7e9ddd 1956 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
5c475ba9
EB
1957 align = scale_by_factor_of (offset, align);
1958 align = MIN (align, addend & -addend);
1959 last_pos = round_up (last_pos, align);
1960 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1961 }
5c475ba9 1962 else if (potential_alignment_gap (prev_old_field, old_field, pos))
a1ab4c31
AC
1963 {
1964 align = TYPE_ALIGN (field_type);
5c475ba9
EB
1965 last_pos = round_up (last_pos, align);
1966 pos = compute_related_constant (curpos, last_pos);
a1ab4c31
AC
1967 }
1968
1969 /* If we can't compute a position, set it to zero.
1970
5c475ba9
EB
1971 ??? We really should abort here, but it's too much work
1972 to get this correct for all cases. */
a1ab4c31
AC
1973 if (!pos)
1974 pos = bitsize_zero_node;
1975
1976 /* See if this type is variable-sized and make a pointer type
1977 and indicate the indirection if so. Beware that the debug
1978 back-end may adjust the position computed above according
1979 to the alignment of the field type, i.e. the pointer type
1980 in this case, if we don't preventively counter that. */
1981 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1982 {
1983 field_type = build_pointer_type (field_type);
1984 if (align != 0 && TYPE_ALIGN (field_type) > align)
1985 {
1986 field_type = copy_node (field_type);
1987 TYPE_ALIGN (field_type) = align;
1988 }
1989 var = true;
1990 }
1991
1992 /* Make a new field name, if necessary. */
1993 if (var || align != 0)
1994 {
1995 char suffix[16];
1996
1997 if (align != 0)
1998 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1999 align / BITS_PER_UNIT);
2000 else
2001 strcpy (suffix, "XVL");
2002
0fb2335d 2003 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
2004 }
2005
da01bfee
EB
2006 new_field
2007 = create_field_decl (field_name, field_type, new_record_type,
2008 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 2009 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
2010 TYPE_FIELDS (new_record_type) = new_field;
2011
2012 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2013 zero. The only time it's not the last field of the record
2014 is when there are other components at fixed positions after
2015 it (meaning there was a rep clause for every field) and we
2016 want to be able to encode them. */
2017 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2018 (TREE_CODE (TREE_TYPE (old_field))
2019 == QUAL_UNION_TYPE)
2020 ? bitsize_zero_node
2021 : DECL_SIZE (old_field));
2022 prev_old_field = old_field;
2023 }
2024
fb88e1dd 2025 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 2026
a5695aa2 2027 add_parallel_type (record_type, new_record_type);
a1ab4c31 2028 }
a1ab4c31
AC
2029}
2030
a1ab4c31 2031/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
2032 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2033 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2034 replace a value of zero with the old size. If HAS_REP is true, we take the
2035 MAX of the end position of this field with LAST_SIZE. In all other cases,
2036 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
2037
2038static tree
2039merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2040 bool has_rep)
2041{
2042 tree type = TREE_TYPE (last_size);
c6bd4220 2043 tree new_size;
a1ab4c31
AC
2044
2045 if (!special || TREE_CODE (size) != COND_EXPR)
2046 {
c6bd4220 2047 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 2048 if (has_rep)
c6bd4220 2049 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
2050 }
2051
2052 else
c6bd4220
EB
2053 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2054 integer_zerop (TREE_OPERAND (size, 1))
2055 ? last_size : merge_sizes (last_size, first_bit,
2056 TREE_OPERAND (size, 1),
2057 1, has_rep),
2058 integer_zerop (TREE_OPERAND (size, 2))
2059 ? last_size : merge_sizes (last_size, first_bit,
2060 TREE_OPERAND (size, 2),
2061 1, has_rep));
a1ab4c31
AC
2062
2063 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2064 when fed through substitute_in_expr) into thinking that a constant
2065 size is not constant. */
c6bd4220
EB
2066 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2067 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 2068
c6bd4220 2069 return new_size;
a1ab4c31
AC
2070}
2071
2072/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2073 related by the addition of a constant. Return that constant if so. */
2074
2075static tree
2076compute_related_constant (tree op0, tree op1)
2077{
2078 tree op0_var, op1_var;
2079 tree op0_con = split_plus (op0, &op0_var);
2080 tree op1_con = split_plus (op1, &op1_var);
2081 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2082
2083 if (operand_equal_p (op0_var, op1_var, 0))
2084 return result;
2085 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2086 return result;
2087 else
2088 return 0;
2089}
2090
2091/* Utility function of above to split a tree OP which may be a sum, into a
2092 constant part, which is returned, and a variable part, which is stored
2093 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2094 bitsizetype. */
2095
2096static tree
2097split_plus (tree in, tree *pvar)
2098{
722356ce
EB
2099 /* Strip conversions in order to ease the tree traversal and maximize the
2100 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
2101 to always return and set *pvar to bitsizetype trees, but it's worth
2102 the effort. */
722356ce 2103 in = remove_conversions (in, false);
a1ab4c31
AC
2104
2105 *pvar = convert (bitsizetype, in);
2106
2107 if (TREE_CODE (in) == INTEGER_CST)
2108 {
2109 *pvar = bitsize_zero_node;
2110 return convert (bitsizetype, in);
2111 }
2112 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2113 {
2114 tree lhs_var, rhs_var;
2115 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2116 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2117
2118 if (lhs_var == TREE_OPERAND (in, 0)
2119 && rhs_var == TREE_OPERAND (in, 1))
2120 return bitsize_zero_node;
2121
2122 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2123 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2124 }
2125 else
2126 return bitsize_zero_node;
2127}
2128\f
d47d0a8d
EB
2129/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2130 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2131 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2132 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2133 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2134 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2135 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2136 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2137 invisible reference. */
a1ab4c31
AC
2138
2139tree
2140create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
2141 bool return_unconstrained_p, bool return_by_direct_ref_p,
2142 bool return_by_invisi_ref_p)
a1ab4c31 2143{
e5b00edf
NF
2144 /* A list of the data type nodes of the subprogram formal parameters.
2145 This list is generated by traversing the input list of PARM_DECL
2146 nodes. */
9771b263 2147 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 2148 tree t, type;
a1ab4c31 2149
910ad8de 2150 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 2151 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 2152
e5b00edf 2153 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 2154
d47d0a8d
EB
2155 /* TYPE may have been shared since GCC hashes types. If it has a different
2156 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
2157 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2158 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
2159 {
2160 type = copy_type (type);
2161 TYPE_CI_CO_LIST (type) = cico_list;
2162 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2163 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2164 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2165 }
a1ab4c31 2166
a1ab4c31
AC
2167 return type;
2168}
2169\f
2170/* Return a copy of TYPE but safe to modify in any way. */
2171
2172tree
2173copy_type (tree type)
2174{
c6bd4220 2175 tree new_type = copy_node (type);
a1ab4c31 2176
90dcfecb
EB
2177 /* Unshare the language-specific data. */
2178 if (TYPE_LANG_SPECIFIC (type))
2179 {
2180 TYPE_LANG_SPECIFIC (new_type) = NULL;
2181 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2182 }
2183
2184 /* And the contents of the language-specific slot if needed. */
2185 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2186 && TYPE_RM_VALUES (type))
2187 {
2188 TYPE_RM_VALUES (new_type) = NULL_TREE;
2189 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2190 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2191 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2192 }
2193
a1ab4c31
AC
2194 /* copy_node clears this field instead of copying it, because it is
2195 aliased with TREE_CHAIN. */
c6bd4220 2196 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2197
c6bd4220
EB
2198 TYPE_POINTER_TO (new_type) = 0;
2199 TYPE_REFERENCE_TO (new_type) = 0;
2200 TYPE_MAIN_VARIANT (new_type) = new_type;
2201 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 2202
c6bd4220 2203 return new_type;
a1ab4c31
AC
2204}
2205\f
c1abd261
EB
2206/* Return a subtype of sizetype with range MIN to MAX and whose
2207 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2208 of the associated TYPE_DECL. */
a1ab4c31
AC
2209
2210tree
2211create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2212{
2213 /* First build a type for the desired range. */
523e82a7 2214 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2215
523e82a7 2216 /* Then set the index type. */
a1ab4c31 2217 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2218 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2219
a1ab4c31
AC
2220 return type;
2221}
84fb43a1
EB
2222
2223/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2224 sizetype is used. */
2225
2226tree
2227create_range_type (tree type, tree min, tree max)
2228{
2229 tree range_type;
2230
2231 if (type == NULL_TREE)
2232 type = sizetype;
2233
2234 /* First build a type with the base range. */
523e82a7
EB
2235 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2236 TYPE_MAX_VALUE (type));
84fb43a1
EB
2237
2238 /* Then set the actual range. */
1eb58520
AC
2239 SET_TYPE_RM_MIN_VALUE (range_type, min);
2240 SET_TYPE_RM_MAX_VALUE (range_type, max);
84fb43a1
EB
2241
2242 return range_type;
2243}
a1ab4c31 2244\f
10069d53
EB
2245/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2246 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2247 its data type. */
2248
2249tree
2250create_type_stub_decl (tree type_name, tree type)
2251{
2252 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2253 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2254 emitted in DWARF. */
44e9e3ec 2255 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
10069d53 2256 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2257 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2258 return type_decl;
2259}
2260
2261/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2262 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2263 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2264 true if we need to write debug information about this type. GNAT_NODE
2265 is used for the position of the decl. */
a1ab4c31
AC
2266
2267tree
74746d49
EB
2268create_type_decl (tree type_name, tree type, bool artificial_p,
2269 bool debug_info_p, Node_Id gnat_node)
a1ab4c31 2270{
a1ab4c31 2271 enum tree_code code = TREE_CODE (type);
10069d53
EB
2272 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2273 tree type_decl;
a1ab4c31 2274
10069d53
EB
2275 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2276 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2277
10069d53
EB
2278 /* If the type hasn't been named yet, we're naming it; preserve an existing
2279 TYPE_STUB_DECL that has been attached to it for some purpose. */
2280 if (!named && TYPE_STUB_DECL (type))
2281 {
2282 type_decl = TYPE_STUB_DECL (type);
2283 DECL_NAME (type_decl) = type_name;
2284 }
2285 else
74746d49 2286 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
a1ab4c31 2287
10069d53 2288 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2289 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2290
2291 /* Add this decl to the current binding level. */
10069d53 2292 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2293
aef308d0
PMR
2294 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2295 causes the name to be also viewed as a "tag" by the debug back-end, with
2296 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2297 types in DWARF.
2298
2299 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2300 from multiple contexts, and "type_decl" references a copy of it: in such a
2301 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2302 with the mechanism above. */
2303 if (!named && type != DECL_ORIGINAL_TYPE (type_decl))
10069d53
EB
2304 TYPE_STUB_DECL (type) = type_decl;
2305
50741117
EB
2306 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2307 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2308 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2309 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2310
2311 return type_decl;
2312}
10069d53 2313\f
a1ab4c31
AC
2314/* Return a VAR_DECL or CONST_DECL node.
2315
2316 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2317 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2318 the GCC tree for an optional initial expression; NULL_TREE if none.
2319
2320 CONST_FLAG is true if this variable is constant, in which case we might
2321 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2322
2323 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2324 definition to be made visible outside of the current compilation unit, for
2325 instance variable definitions in a package specification.
2326
1e17ef87 2327 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2328 opposed to a definition: no storage is to be allocated for the variable).
2329
2330 STATIC_FLAG is only relevant when not at top level. In that case
2331 it indicates whether to always allocate storage to the variable.
2332
2333 GNAT_NODE is used for the position of the decl. */
2334
2335tree
2336create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2337 bool const_flag, bool public_flag, bool extern_flag,
2338 bool static_flag, bool const_decl_allowed_p,
2339 struct attrib *attr_list, Node_Id gnat_node)
2340{
5fe48b3d
EB
2341 /* Whether the object has static storage duration, either explicitly or by
2342 virtue of being declared at the global level. */
2343 const bool static_storage = static_flag || global_bindings_p ();
2344
2345 /* Whether the initializer is constant: for an external object or an object
2346 with static storage duration, we check that the initializer is a valid
2347 constant expression for initializing a static variable; otherwise, we
2348 only check that it is constant. */
2349 const bool init_const
2350 = (var_init
a1ab4c31 2351 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
5fe48b3d
EB
2352 && (extern_flag || static_storage
2353 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2354 != NULL_TREE
a1ab4c31
AC
2355 : TREE_CONSTANT (var_init)));
2356
2357 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
5fe48b3d 2358 case the initializer may be used in lieu of the DECL node (as done in
a1ab4c31 2359 Identifier_to_gnu). This is useful to prevent the need of elaboration
5fe48b3d
EB
2360 code when an identifier for which such a DECL is made is in turn used
2361 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2362 but extra constraints apply to this choice (see below) and they are not
2363 relevant to the distinction we wish to make. */
2364 const bool constant_p = const_flag && init_const;
a1ab4c31
AC
2365
2366 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2367 and may be used for scalars in general but not for aggregates. */
2368 tree var_decl
c172df28
AH
2369 = build_decl (input_location,
2370 (constant_p && const_decl_allowed_p
a1ab4c31
AC
2371 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2372 var_name, type);
2373
2374 /* If this is external, throw away any initializations (they will be done
2375 elsewhere) unless this is a constant for which we would like to remain
2376 able to get the initializer. If we are defining a global here, leave a
2377 constant initialization and save any variable elaborations for the
2378 elaboration routine. If we are just annotating types, throw away the
2379 initialization if it isn't a constant. */
2380 if ((extern_flag && !constant_p)
2381 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2382 var_init = NULL_TREE;
2383
5fe48b3d
EB
2384 /* At the global level, a non-constant initializer generates elaboration
2385 statements. Check that such statements are allowed, that is to say,
2386 not violating a No_Elaboration_Code restriction. */
2387 if (var_init && !init_const && global_bindings_p ())
a1ab4c31 2388 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2389
8b7b0c36
JH
2390 DECL_INITIAL (var_decl) = var_init;
2391 TREE_READONLY (var_decl) = const_flag;
2392 DECL_EXTERNAL (var_decl) = extern_flag;
8b7b0c36 2393 TREE_CONSTANT (var_decl) = constant_p;
5fe48b3d
EB
2394
2395 /* We need to allocate static storage for an object with static storage
2396 duration if it isn't external. */
2397 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2398
2399 /* The object is public if it is external or if it is declared public
2400 and has static storage duration. */
2401 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
a1ab4c31
AC
2402
2403 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2404 try to fiddle with DECL_COMMON. However, on platforms that don't
2405 support global BSS sections, uninitialized global variables would
2406 go in DATA instead, thus increasing the size of the executable. */
2407 if (!flag_no_common
2408 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2409 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2410 && !have_global_bss_p ())
2411 DECL_COMMON (var_decl) = 1;
a1ab4c31 2412
5225a138
EB
2413 /* For an external constant whose initializer is not absolute, do not emit
2414 debug info. In DWARF this would mean a global relocation in a read-only
c01fe451 2415 section which runs afoul of the PE-COFF run-time relocation mechanism. */
5225a138
EB
2416 if (extern_flag
2417 && constant_p
50179d58 2418 && var_init
5225a138 2419 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
5fe48b3d 2420 != null_pointer_node)
5225a138
EB
2421 DECL_IGNORED_P (var_decl) = 1;
2422
5fe48b3d
EB
2423 if (TYPE_VOLATILE (type))
2424 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2425
a1ab4c31
AC
2426 if (TREE_SIDE_EFFECTS (var_decl))
2427 TREE_ADDRESSABLE (var_decl) = 1;
2428
74746d49
EB
2429 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2430 if (TREE_CODE (var_decl) == VAR_DECL)
2431 process_attributes (&var_decl, &attr_list, true, gnat_node);
2432
2433 /* Add this decl to the current binding level. */
2434 gnat_pushdecl (var_decl, gnat_node);
2435
58c8f770 2436 if (TREE_CODE (var_decl) == VAR_DECL)
a1ab4c31 2437 {
58c8f770
EB
2438 if (asm_name)
2439 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
74746d49 2440
a1ab4c31
AC
2441 if (global_bindings_p ())
2442 rest_of_decl_compilation (var_decl, true, 0);
2443 }
a1ab4c31
AC
2444
2445 return var_decl;
2446}
2447\f
2448/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2449
2450static bool
2451aggregate_type_contains_array_p (tree type)
2452{
2453 switch (TREE_CODE (type))
2454 {
2455 case RECORD_TYPE:
2456 case UNION_TYPE:
2457 case QUAL_UNION_TYPE:
2458 {
2459 tree field;
910ad8de 2460 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2461 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2462 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2463 return true;
2464 return false;
2465 }
2466
2467 case ARRAY_TYPE:
2468 return true;
2469
2470 default:
2471 gcc_unreachable ();
2472 }
2473}
2474
62f9f3ce 2475/* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
da01bfee
EB
2476 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2477 nonzero, it is the specified size of the field. If POS is nonzero, it is
2478 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2479 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2480 means we are allowed to take the address of the field; if it is negative,
2481 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2482
2483tree
2484create_field_decl (tree field_name, tree field_type, tree record_type,
da01bfee 2485 tree size, tree pos, int packed, int addressable)
a1ab4c31 2486{
c172df28
AH
2487 tree field_decl = build_decl (input_location,
2488 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
2489
2490 DECL_CONTEXT (field_decl) = record_type;
2491 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2492
2493 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2494 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2495 Likewise for an aggregate without specified position that contains an
2496 array, because in this case slices of variable length of this array
2497 must be handled by GCC and variable-sized objects need to be aligned
2498 to at least a byte boundary. */
2499 if (packed && (TYPE_MODE (field_type) == BLKmode
2500 || (!pos
2501 && AGGREGATE_TYPE_P (field_type)
2502 && aggregate_type_contains_array_p (field_type))))
2503 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2504
2505 /* If a size is specified, use it. Otherwise, if the record type is packed
2506 compute a size to use, which may differ from the object's natural size.
2507 We always set a size in this case to trigger the checks for bitfield
2508 creation below, which is typically required when no position has been
2509 specified. */
2510 if (size)
2511 size = convert (bitsizetype, size);
2512 else if (packed == 1)
2513 {
2514 size = rm_size (field_type);
62f9f3ce
EB
2515 if (TYPE_MODE (field_type) == BLKmode)
2516 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2517 }
2518
2519 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2520 specified for two reasons: first if the size differs from the natural
2521 size. Second, if the alignment is insufficient. There are a number of
2522 ways the latter can be true.
2523
2524 We never make a bitfield if the type of the field has a nonconstant size,
2525 because no such entity requiring bitfield operations should reach here.
2526
2527 We do *preventively* make a bitfield when there might be the need for it
2528 but we don't have all the necessary information to decide, as is the case
2529 of a field with no specified position in a packed record.
2530
2531 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2532 in layout_decl or finish_record_type to clear the bit_field indication if
2533 it is in fact not needed. */
2534 if (addressable >= 0
2535 && size
2536 && TREE_CODE (size) == INTEGER_CST
2537 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2538 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2539 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2540 || packed
2541 || (TYPE_ALIGN (record_type) != 0
2542 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2543 {
2544 DECL_BIT_FIELD (field_decl) = 1;
2545 DECL_SIZE (field_decl) = size;
2546 if (!packed && !pos)
feec4372
EB
2547 {
2548 if (TYPE_ALIGN (record_type) != 0
2549 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2550 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2551 else
2552 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2553 }
a1ab4c31
AC
2554 }
2555
2556 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2557
2558 /* Bump the alignment if need be, either for bitfield/packing purposes or
2559 to satisfy the type requirements if no such consideration applies. When
2560 we get the alignment from the type, indicate if this is from an explicit
2561 user request, which prevents stor-layout from lowering it later on. */
2562 {
d9223014 2563 unsigned int bit_align
a1ab4c31
AC
2564 = (DECL_BIT_FIELD (field_decl) ? 1
2565 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2566
2567 if (bit_align > DECL_ALIGN (field_decl))
2568 DECL_ALIGN (field_decl) = bit_align;
2569 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2570 {
2571 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2572 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2573 }
2574 }
2575
2576 if (pos)
2577 {
2578 /* We need to pass in the alignment the DECL is known to have.
2579 This is the lowest-order bit set in POS, but no more than
2580 the alignment of the record, if one is specified. Note
2581 that an alignment of 0 is taken as infinite. */
2582 unsigned int known_align;
2583
cc269bb6 2584 if (tree_fits_uhwi_p (pos))
ae7e9ddd 2585 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
a1ab4c31
AC
2586 else
2587 known_align = BITS_PER_UNIT;
2588
2589 if (TYPE_ALIGN (record_type)
2590 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2591 known_align = TYPE_ALIGN (record_type);
2592
2593 layout_decl (field_decl, known_align);
2594 SET_DECL_OFFSET_ALIGN (field_decl,
cc269bb6 2595 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
a1ab4c31
AC
2596 : BITS_PER_UNIT);
2597 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2598 &DECL_FIELD_BIT_OFFSET (field_decl),
2599 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2600 }
2601
2602 /* In addition to what our caller says, claim the field is addressable if we
2603 know that its type is not suitable.
2604
2605 The field may also be "technically" nonaddressable, meaning that even if
2606 we attempt to take the field's address we will actually get the address
2607 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2608 value we have at this point is not accurate enough, so we don't account
2609 for this here and let finish_record_type decide. */
4c5a0615 2610 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
2611 addressable = 1;
2612
2613 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2614
2615 return field_decl;
2616}
2617\f
a8e05f92
EB
2618/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2619 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2620 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2621
2622tree
2623create_param_decl (tree param_name, tree param_type, bool readonly)
2624{
c172df28
AH
2625 tree param_decl = build_decl (input_location,
2626 PARM_DECL, param_name, param_type);
a1ab4c31 2627
a8e05f92
EB
2628 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2629 can lead to various ABI violations. */
2630 if (targetm.calls.promote_prototypes (NULL_TREE)
2631 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
2632 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2633 {
2634 /* We have to be careful about biased types here. Make a subtype
2635 of integer_type_node with the proper biasing. */
2636 if (TREE_CODE (param_type) == INTEGER_TYPE
2637 && TYPE_BIASED_REPRESENTATION_P (param_type))
2638 {
84fb43a1
EB
2639 tree subtype
2640 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2641 TREE_TYPE (subtype) = integer_type_node;
2642 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
2643 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2644 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 2645 param_type = subtype;
a1ab4c31
AC
2646 }
2647 else
2648 param_type = integer_type_node;
2649 }
2650
2651 DECL_ARG_TYPE (param_decl) = param_type;
2652 TREE_READONLY (param_decl) = readonly;
2653 return param_decl;
2654}
2655\f
74746d49
EB
2656/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2657 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2658 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2659
74746d49
EB
2660void
2661process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2662 Node_Id gnat_node)
a1ab4c31 2663{
74746d49
EB
2664 struct attrib *attr;
2665
2666 for (attr = *attr_list; attr; attr = attr->next)
2667 switch (attr->type)
a1ab4c31
AC
2668 {
2669 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2670 Sloc_to_locus (Sloc (gnat_node), &input_location);
2671 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2672 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2673 break;
2674
2675 case ATTR_LINK_ALIAS:
74746d49 2676 if (!DECL_EXTERNAL (*node))
a1ab4c31 2677 {
74746d49
EB
2678 TREE_STATIC (*node) = 1;
2679 assemble_alias (*node, attr->name);
a1ab4c31
AC
2680 }
2681 break;
2682
2683 case ATTR_WEAK_EXTERNAL:
2684 if (SUPPORTS_WEAK)
74746d49 2685 declare_weak (*node);
a1ab4c31
AC
2686 else
2687 post_error ("?weak declarations not supported on this target",
74746d49 2688 attr->error_point);
a1ab4c31
AC
2689 break;
2690
2691 case ATTR_LINK_SECTION:
677f3fa8 2692 if (targetm_common.have_named_sections)
a1ab4c31 2693 {
0ab75824 2694 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
74746d49 2695 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2696 }
2697 else
2698 post_error ("?section attributes are not supported for this target",
74746d49 2699 attr->error_point);
a1ab4c31
AC
2700 break;
2701
2702 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2703 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2704 TREE_USED (*node) = 1;
a1ab4c31
AC
2705 break;
2706
2707 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2708 DECL_STATIC_DESTRUCTOR (*node) = 1;
2709 TREE_USED (*node) = 1;
a1ab4c31 2710 break;
40a14772
TG
2711
2712 case ATTR_THREAD_LOCAL_STORAGE:
56363ffd 2713 set_decl_tls_model (*node, decl_default_tls_model (*node));
74746d49 2714 DECL_COMMON (*node) = 0;
40a14772 2715 break;
a1ab4c31 2716 }
74746d49
EB
2717
2718 *attr_list = NULL;
a1ab4c31
AC
2719}
2720\f
ea292448 2721/* Record DECL as a global non-constant renaming. */
a1ab4c31
AC
2722
2723void
ea292448 2724record_global_nonconstant_renaming (tree decl)
a1ab4c31 2725{
15bf7d19 2726 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
ea292448 2727 vec_safe_push (global_nonconstant_renamings, decl);
a1ab4c31
AC
2728}
2729
ea292448
EB
2730/* Invalidate the global non-constant renamings, lest their renamed object
2731 contains SAVE_EXPRs tied to an elaboration routine. */
a1ab4c31
AC
2732
2733void
ea292448 2734invalidate_global_nonconstant_renamings (void)
a1ab4c31
AC
2735{
2736 unsigned int i;
2737 tree iter;
2738
ea292448 2739 if (global_nonconstant_renamings == NULL)
9771b263
DN
2740 return;
2741
ea292448
EB
2742 FOR_EACH_VEC_ELT (*global_nonconstant_renamings, i, iter)
2743 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
a1ab4c31 2744
ea292448 2745 vec_free (global_nonconstant_renamings);
a1ab4c31
AC
2746}
2747
2748/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2749 a power of 2. */
2750
2751bool
2752value_factor_p (tree value, HOST_WIDE_INT factor)
2753{
cc269bb6 2754 if (tree_fits_uhwi_p (value))
ae7e9ddd 2755 return tree_to_uhwi (value) % factor == 0;
a1ab4c31
AC
2756
2757 if (TREE_CODE (value) == MULT_EXPR)
2758 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2759 || value_factor_p (TREE_OPERAND (value, 1), factor));
2760
2761 return false;
2762}
2763
e8fa3dcd
PMR
2764/* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2765 from the parameter association for the instantiation of a generic. We do
2766 not want to emit source location for them: the code generated for their
2767 initialization is likely to disturb debugging. */
2768
2769bool
2770renaming_from_generic_instantiation_p (Node_Id gnat_node)
2771{
2772 if (Nkind (gnat_node) != N_Defining_Identifier
2773 || !IN (Ekind (gnat_node), Object_Kind)
2774 || Comes_From_Source (gnat_node)
2775 || !Present (Renamed_Object (gnat_node)))
2776 return false;
2777
2778 /* Get the object declaration of the renamed object, if any and if the
2779 renamed object is a mere identifier. */
2780 gnat_node = Renamed_Object (gnat_node);
2781 if (Nkind (gnat_node) != N_Identifier)
2782 return false;
2783
2784 gnat_node = Entity (gnat_node);
2785 if (!Present (Parent (gnat_node)))
2786 return false;
2787
2788 gnat_node = Parent (gnat_node);
2789 return
2790 (Present (gnat_node)
2791 && Nkind (gnat_node) == N_Object_Declaration
2792 && Present (Corresponding_Generic_Association (gnat_node)));
2793}
2794
9a30c7c4
AC
2795/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2796 feed it with the elaboration of GNAT_SCOPE. */
2797
2798static struct deferred_decl_context_node *
2799add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2800{
2801 struct deferred_decl_context_node *new_node;
2802
2803 new_node
2804 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2805 new_node->decl = decl;
2806 new_node->gnat_scope = gnat_scope;
2807 new_node->force_global = force_global;
2808 new_node->types.create (1);
2809 new_node->next = deferred_decl_context_queue;
2810 deferred_decl_context_queue = new_node;
2811 return new_node;
2812}
2813
2814/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2815 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2816 computed. */
2817
2818static void
2819add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2820{
2821 n->types.safe_push (type);
2822}
2823
2824/* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2825 NULL_TREE if it is not available. */
2826
2827static tree
2828compute_deferred_decl_context (Entity_Id gnat_scope)
2829{
2830 tree context;
2831
2832 if (present_gnu_tree (gnat_scope))
2833 context = get_gnu_tree (gnat_scope);
2834 else
2835 return NULL_TREE;
2836
2837 if (TREE_CODE (context) == TYPE_DECL)
2838 {
2839 const tree context_type = TREE_TYPE (context);
2840
2841 /* Skip dummy types: only the final ones can appear in the context
2842 chain. */
2843 if (TYPE_DUMMY_P (context_type))
2844 return NULL_TREE;
2845
2846 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2847 chain. */
2848 else
2849 context = context_type;
2850 }
2851
2852 return context;
2853}
2854
2855/* Try to process all deferred nodes in the queue. Keep in the queue the ones
2856 that cannot be processed yet, remove the other ones. If FORCE is true,
2857 force the processing for all nodes, use the global context when nodes don't
2858 have a GNU translation. */
2859
2860void
2861process_deferred_decl_context (bool force)
2862{
2863 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2864 struct deferred_decl_context_node *node;
2865
2866 while (*it != NULL)
2867 {
2868 bool processed = false;
2869 tree context = NULL_TREE;
2870 Entity_Id gnat_scope;
2871
2872 node = *it;
2873
2874 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2875 get the first scope. */
2876 gnat_scope = node->gnat_scope;
2877 while (Present (gnat_scope))
2878 {
2879 context = compute_deferred_decl_context (gnat_scope);
2880 if (!force || context != NULL_TREE)
2881 break;
2882 gnat_scope = get_debug_scope (gnat_scope, NULL);
2883 }
2884
2885 /* Imported declarations must not be in a local context (i.e. not inside
2886 a function). */
2887 if (context != NULL_TREE && node->force_global > 0)
2888 {
2889 tree ctx = context;
2890
2891 while (ctx != NULL_TREE)
2892 {
2893 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2894 ctx = (DECL_P (ctx))
2895 ? DECL_CONTEXT (ctx)
2896 : TYPE_CONTEXT (ctx);
2897 }
2898 }
2899
2900 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2901 was no elaborated scope, use the global context. */
2902 if (force && context == NULL_TREE)
2903 context = get_global_context ();
2904
2905 if (context != NULL_TREE)
2906 {
2907 tree t;
2908 int i;
2909
2910 DECL_CONTEXT (node->decl) = context;
2911
2912 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2913 ..._TYPE nodes. */
2914 FOR_EACH_VEC_ELT (node->types, i, t)
2915 {
24d4b3d5 2916 gnat_set_type_context (t, context);
9a30c7c4
AC
2917 }
2918 processed = true;
2919 }
2920
2921 /* If this node has been successfuly processed, remove it from the
2922 queue. Then move to the next node. */
2923 if (processed)
2924 {
2925 *it = node->next;
2926 node->types.release ();
2927 free (node);
2928 }
2929 else
2930 it = &node->next;
2931 }
2932}
2933
2934
5c475ba9
EB
2935/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2936
2937static unsigned int
2938scale_by_factor_of (tree expr, unsigned int value)
2939{
2940 expr = remove_conversions (expr, true);
2941
2942 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2943 corresponding to the number of trailing zeros of the mask. */
2944 if (TREE_CODE (expr) == BIT_AND_EXPR
2945 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2946 {
2947 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2948 unsigned int i = 0;
2949
2950 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2951 {
2952 mask >>= 1;
2953 value *= 2;
2954 i++;
2955 }
2956 }
2957
2958 return value;
2959}
2960
7d7fcb08 2961/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2962 unless we can prove these 2 fields are laid out in such a way that no gap
2963 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2964 is the distance in bits between the end of PREV_FIELD and the starting
2965 position of CURR_FIELD. It is ignored if null. */
2966
2967static bool
2968potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2969{
2970 /* If this is the first field of the record, there cannot be any gap */
2971 if (!prev_field)
2972 return false;
2973
78df6221 2974 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
2975 time when such a field is not the last field of the record is when
2976 there are other components at fixed positions after it (meaning there
2977 was a rep clause for every field), in which case we don't want the
2978 alignment constraint to override them. */
2979 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2980 return false;
2981
2982 /* If the distance between the end of prev_field and the beginning of
2983 curr_field is constant, then there is a gap if the value of this
2984 constant is not null. */
cc269bb6 2985 if (offset && tree_fits_uhwi_p (offset))
a1ab4c31
AC
2986 return !integer_zerop (offset);
2987
2988 /* If the size and position of the previous field are constant,
2989 then check the sum of this size and position. There will be a gap
2990 iff it is not multiple of the current field alignment. */
cc269bb6
RS
2991 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2992 && tree_fits_uhwi_p (bit_position (prev_field)))
ae7e9ddd
RS
2993 return ((tree_to_uhwi (bit_position (prev_field))
2994 + tree_to_uhwi (DECL_SIZE (prev_field)))
a1ab4c31
AC
2995 % DECL_ALIGN (curr_field) != 0);
2996
2997 /* If both the position and size of the previous field are multiples
2998 of the current field alignment, there cannot be any gap. */
2999 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3000 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3001 return false;
3002
3003 /* Fallback, return that there may be a potential gap */
3004 return true;
3005}
3006
88a94e2b
EB
3007/* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
3008 of the decl. */
a1ab4c31
AC
3009
3010tree
88a94e2b 3011create_label_decl (tree label_name, Node_Id gnat_node)
a1ab4c31 3012{
88a94e2b
EB
3013 tree label_decl
3014 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
a1ab4c31 3015
88a94e2b
EB
3016 DECL_MODE (label_decl) = VOIDmode;
3017
3018 /* Add this decl to the current binding level. */
3019 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
3020
3021 return label_decl;
3022}
3023\f
7d7fcb08 3024/* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
a1ab4c31
AC
3025 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
3026 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
7d76717d 3027 PARM_DECL nodes chained through the DECL_CHAIN field).
a1ab4c31 3028
0e24192c 3029 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
7d7fcb08
EB
3030 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
3031 used for the position of the decl. */
a1ab4c31
AC
3032
3033tree
7d7fcb08 3034create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
0e24192c
EB
3035 tree param_decl_list, enum inline_status_t inline_status,
3036 bool public_flag, bool extern_flag, bool artificial_flag,
7d7fcb08 3037 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 3038{
d47d0a8d
EB
3039 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3040 subprog_type);
3041 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3042 TREE_TYPE (subprog_type));
7d7fcb08 3043 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 3044
7d7fcb08
EB
3045 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3046 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
3047
3048 switch (inline_status)
3049 {
3050 case is_suppressed:
3051 DECL_UNINLINABLE (subprog_decl) = 1;
3052 break;
3053
3054 case is_disabled:
3055 break;
3056
f087ea44
AC
3057 case is_required:
3058 if (Back_End_Inlining)
1eb58520
AC
3059 decl_attributes (&subprog_decl,
3060 tree_cons (get_identifier ("always_inline"),
3061 NULL_TREE, NULL_TREE),
3062 ATTR_FLAG_TYPE_IN_PLACE);
3063
f087ea44
AC
3064 /* ... fall through ... */
3065
0e24192c
EB
3066 case is_enabled:
3067 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3068 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3069 break;
3070
3071 default:
3072 gcc_unreachable ();
3073 }
7d7fcb08
EB
3074
3075 TREE_PUBLIC (subprog_decl) = public_flag;
3076 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
a1ab4c31
AC
3077 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3078 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
a1ab4c31 3079
d47d0a8d
EB
3080 DECL_ARTIFICIAL (result_decl) = 1;
3081 DECL_IGNORED_P (result_decl) = 1;
3082 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3083 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 3084
a1ab4c31
AC
3085 if (asm_name)
3086 {
3087 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3088
3089 /* The expand_main_function circuitry expects "main_identifier_node" to
3090 designate the DECL_NAME of the 'main' entry point, in turn expected
3091 to be declared as the "main" function literally by default. Ada
3092 program entry points are typically declared with a different name
3093 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 3094 system expectations. Force main_identifier_node in this case. */
a1ab4c31 3095 if (asm_name == main_identifier_node)
cfbb663c 3096 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
3097 }
3098
74746d49
EB
3099 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3100
a1ab4c31
AC
3101 /* Add this decl to the current binding level. */
3102 gnat_pushdecl (subprog_decl, gnat_node);
3103
3104 /* Output the assembler code and/or RTL for the declaration. */
3105 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3106
3107 return subprog_decl;
3108}
3109\f
3110/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3111 body. This routine needs to be invoked before processing the declarations
3112 appearing in the subprogram. */
3113
3114void
3115begin_subprog_body (tree subprog_decl)
3116{
3117 tree param_decl;
3118
a1ab4c31
AC
3119 announce_function (subprog_decl);
3120
0ae44446
JR
3121 /* This function is being defined. */
3122 TREE_STATIC (subprog_decl) = 1;
3123
e2d13a4a
EB
3124 /* The failure of this assertion will likely come from a wrong context for
3125 the subprogram body, e.g. another procedure for a procedure declared at
3126 library level. */
3127 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3128
58c8f770
EB
3129 current_function_decl = subprog_decl;
3130
a1ab4c31
AC
3131 /* Enter a new binding level and show that all the parameters belong to
3132 this function. */
3133 gnat_pushlevel ();
a09d56d8 3134
a1ab4c31 3135 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 3136 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
3137 DECL_CONTEXT (param_decl) = subprog_decl;
3138
3139 make_decl_rtl (subprog_decl);
a1ab4c31
AC
3140}
3141
71196d4e 3142/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
3143
3144void
a406865a 3145end_subprog_body (tree body)
a1ab4c31
AC
3146{
3147 tree fndecl = current_function_decl;
3148
bd9c7fb9 3149 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
3150 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3151 DECL_INITIAL (fndecl) = current_binding_level->block;
3152 gnat_poplevel ();
3153
a1ab4c31
AC
3154 /* Mark the RESULT_DECL as being in this subprogram. */
3155 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3156
a963da4d
EB
3157 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3158 if (TREE_CODE (body) == BIND_EXPR)
3159 {
3160 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3161 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3162 }
3163
a1ab4c31
AC
3164 DECL_SAVED_TREE (fndecl) = body;
3165
228ee426 3166 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
3167}
3168
3169/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 3170
71196d4e
EB
3171void
3172rest_of_subprog_body_compilation (tree subprog_decl)
3173{
a1ab4c31
AC
3174 /* We cannot track the location of errors past this point. */
3175 error_gnat_node = Empty;
3176
3177 /* If we're only annotating types, don't actually compile this function. */
3178 if (type_annotate_only)
3179 return;
3180
a406865a 3181 /* Dump functions before gimplification. */
71196d4e 3182 dump_function (TDI_original, subprog_decl);
a406865a 3183
228ee426 3184 if (!decl_function_context (subprog_decl))
3dafb85c 3185 cgraph_node::finalize_function (subprog_decl, false);
a1ab4c31
AC
3186 else
3187 /* Register this function with cgraph just far enough to get it
3188 added to our parent's nested function list. */
037e5573 3189 (void) cgraph_node::get_create (subprog_decl);
a1ab4c31
AC
3190}
3191
a1ab4c31
AC
3192tree
3193gnat_builtin_function (tree decl)
3194{
3195 gnat_pushdecl (decl, Empty);
3196 return decl;
3197}
3198
3199/* Return an integer type with the number of bits of precision given by
3200 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3201 it is a signed type. */
3202
3203tree
3204gnat_type_for_size (unsigned precision, int unsignedp)
3205{
3206 tree t;
3207 char type_name[20];
3208
3209 if (precision <= 2 * MAX_BITS_PER_WORD
3210 && signed_and_unsigned_types[precision][unsignedp])
3211 return signed_and_unsigned_types[precision][unsignedp];
3212
3213 if (unsignedp)
3214 t = make_unsigned_type (precision);
3215 else
3216 t = make_signed_type (precision);
3217
3218 if (precision <= 2 * MAX_BITS_PER_WORD)
3219 signed_and_unsigned_types[precision][unsignedp] = t;
3220
3221 if (!TYPE_NAME (t))
3222 {
c3d79c60 3223 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
3224 TYPE_NAME (t) = get_identifier (type_name);
3225 }
3226
3227 return t;
3228}
3229
3230/* Likewise for floating-point types. */
3231
3232static tree
ef4bddc2 3233float_type_for_precision (int precision, machine_mode mode)
a1ab4c31
AC
3234{
3235 tree t;
3236 char type_name[20];
3237
3238 if (float_types[(int) mode])
3239 return float_types[(int) mode];
3240
3241 float_types[(int) mode] = t = make_node (REAL_TYPE);
3242 TYPE_PRECISION (t) = precision;
3243 layout_type (t);
3244
3245 gcc_assert (TYPE_MODE (t) == mode);
3246 if (!TYPE_NAME (t))
3247 {
3248 sprintf (type_name, "FLOAT_%d", precision);
3249 TYPE_NAME (t) = get_identifier (type_name);
3250 }
3251
3252 return t;
3253}
3254
3255/* Return a data type that has machine mode MODE. UNSIGNEDP selects
3256 an unsigned type; otherwise a signed type is returned. */
3257
3258tree
ef4bddc2 3259gnat_type_for_mode (machine_mode mode, int unsignedp)
a1ab4c31
AC
3260{
3261 if (mode == BLKmode)
3262 return NULL_TREE;
2799d18c
EB
3263
3264 if (mode == VOIDmode)
a1ab4c31 3265 return void_type_node;
2799d18c
EB
3266
3267 if (COMPLEX_MODE_P (mode))
a1ab4c31 3268 return NULL_TREE;
2799d18c
EB
3269
3270 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 3271 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
3272
3273 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 3274 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
3275
3276 if (VECTOR_MODE_P (mode))
3277 {
ef4bddc2 3278 machine_mode inner_mode = GET_MODE_INNER (mode);
2799d18c
EB
3279 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3280 if (inner_type)
3281 return build_vector_type_for_mode (inner_type, mode);
3282 }
3283
3284 return NULL_TREE;
a1ab4c31
AC
3285}
3286
3287/* Return the unsigned version of a TYPE_NODE, a scalar type. */
3288
3289tree
3290gnat_unsigned_type (tree type_node)
3291{
3292 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3293
3294 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3295 {
3296 type = copy_node (type);
3297 TREE_TYPE (type) = type_node;
3298 }
3299 else if (TREE_TYPE (type_node)
3300 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3301 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3302 {
3303 type = copy_node (type);
3304 TREE_TYPE (type) = TREE_TYPE (type_node);
3305 }
3306
3307 return type;
3308}
3309
3310/* Return the signed version of a TYPE_NODE, a scalar type. */
3311
3312tree
3313gnat_signed_type (tree type_node)
3314{
3315 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3316
3317 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3318 {
3319 type = copy_node (type);
3320 TREE_TYPE (type) = type_node;
3321 }
3322 else if (TREE_TYPE (type_node)
3323 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3324 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3325 {
3326 type = copy_node (type);
3327 TREE_TYPE (type) = TREE_TYPE (type_node);
3328 }
3329
3330 return type;
3331}
3332
3333/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3334 transparently converted to each other. */
3335
3336int
3337gnat_types_compatible_p (tree t1, tree t2)
3338{
3339 enum tree_code code;
3340
3341 /* This is the default criterion. */
3342 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3343 return 1;
3344
3345 /* We only check structural equivalence here. */
3346 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3347 return 0;
3348
7948ae37
OH
3349 /* Vector types are also compatible if they have the same number of subparts
3350 and the same form of (scalar) element type. */
3351 if (code == VECTOR_TYPE
3352 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3353 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3354 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3355 return 1;
3356
cfa0bd19 3357 /* Array types are also compatible if they are constrained and have the same
96bba5e6 3358 domain(s) and the same component type. */
a1ab4c31 3359 if (code == ARRAY_TYPE
0adef32b
JJ
3360 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3361 || (TYPE_DOMAIN (t1)
b4680ca1 3362 && TYPE_DOMAIN (t2)
0adef32b
JJ
3363 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3364 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3365 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 3366 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
3367 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3368 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3369 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
a1ab4c31
AC
3370 return 1;
3371
a1ab4c31
AC
3372 return 0;
3373}
523e82a7 3374
71196d4e
EB
3375/* Return true if EXPR is a useless type conversion. */
3376
3377bool
3378gnat_useless_type_conversion (tree expr)
3379{
3380 if (CONVERT_EXPR_P (expr)
3381 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3382 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3383 return gnat_types_compatible_p (TREE_TYPE (expr),
3384 TREE_TYPE (TREE_OPERAND (expr, 0)));
3385
3386 return false;
3387}
3388
523e82a7
EB
3389/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3390
3391bool
3392fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3393 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3394{
3395 return TYPE_CI_CO_LIST (t) == cico_list
3396 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3397 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3398 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3399}
a1ab4c31
AC
3400\f
3401/* EXP is an expression for the size of an object. If this size contains
3402 discriminant references, replace them with the maximum (if MAX_P) or
3403 minimum (if !MAX_P) possible value of the discriminant. */
3404
3405tree
3406max_size (tree exp, bool max_p)
3407{
3408 enum tree_code code = TREE_CODE (exp);
3409 tree type = TREE_TYPE (exp);
3410
3411 switch (TREE_CODE_CLASS (code))
3412 {
3413 case tcc_declaration:
3414 case tcc_constant:
3415 return exp;
3416
3417 case tcc_vl_exp:
3418 if (code == CALL_EXPR)
3419 {
f82a627c
EB
3420 tree t, *argarray;
3421 int n, i;
3422
3423 t = maybe_inline_call_in_expr (exp);
3424 if (t)
3425 return max_size (t, max_p);
a1ab4c31 3426
f82a627c
EB
3427 n = call_expr_nargs (exp);
3428 gcc_assert (n > 0);
2bb1fc26 3429 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3430 for (i = 0; i < n; i++)
3431 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3432 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3433 }
3434 break;
3435
3436 case tcc_reference:
3437 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3438 modify. Otherwise, we treat it like a variable. */
1eb58520
AC
3439 if (CONTAINS_PLACEHOLDER_P (exp))
3440 {
3441 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3442 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3443 return max_size (convert (get_base_type (val_type), val), true);
3444 }
a1ab4c31 3445
1eb58520 3446 return exp;
a1ab4c31
AC
3447
3448 case tcc_comparison:
3449 return max_p ? size_one_node : size_zero_node;
3450
3451 case tcc_unary:
ce3da0d0
EB
3452 if (code == NON_LVALUE_EXPR)
3453 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3454
ce3da0d0
EB
3455 return fold_build1 (code, type,
3456 max_size (TREE_OPERAND (exp, 0),
3457 code == NEGATE_EXPR ? !max_p : max_p));
3458
a1ab4c31 3459 case tcc_binary:
ce3da0d0
EB
3460 {
3461 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3462 tree rhs = max_size (TREE_OPERAND (exp, 1),
3463 code == MINUS_EXPR ? !max_p : max_p);
3464
3465 /* Special-case wanting the maximum value of a MIN_EXPR.
3466 In that case, if one side overflows, return the other. */
3467 if (max_p && code == MIN_EXPR)
3468 {
3469 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3470 return lhs;
3471
3472 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3473 return rhs;
3474 }
3475
3476 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3477 overflowing and the RHS a variable. */
3478 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3479 && TREE_CODE (lhs) == INTEGER_CST
3480 && TREE_OVERFLOW (lhs)
3481 && !TREE_CONSTANT (rhs))
3482 return lhs;
3483
3484 return size_binop (code, lhs, rhs);
3485 }
3486
a1ab4c31
AC
3487 case tcc_expression:
3488 switch (TREE_CODE_LENGTH (code))
3489 {
3490 case 1:
722356ce
EB
3491 if (code == SAVE_EXPR)
3492 return exp;
ce3da0d0
EB
3493
3494 return fold_build1 (code, type,
3495 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3496
3497 case 2:
3498 if (code == COMPOUND_EXPR)
3499 return max_size (TREE_OPERAND (exp, 1), max_p);
3500
ce3da0d0
EB
3501 return fold_build2 (code, type,
3502 max_size (TREE_OPERAND (exp, 0), max_p),
3503 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3504
3505 case 3:
722356ce 3506 if (code == COND_EXPR)
a1ab4c31
AC
3507 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3508 max_size (TREE_OPERAND (exp, 1), max_p),
3509 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3510
3511 default:
3512 break;
a1ab4c31
AC
3513 }
3514
3515 /* Other tree classes cannot happen. */
3516 default:
3517 break;
3518 }
3519
3520 gcc_unreachable ();
3521}
3522\f
3523/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3524 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3525 Return a constructor for the template. */
3526
3527tree
3528build_template (tree template_type, tree array_type, tree expr)
3529{
9771b263 3530 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3531 tree bound_list = NULL_TREE;
3532 tree field;
3533
3534 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3535 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3536 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3537 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3538
3539 if (TREE_CODE (array_type) == ARRAY_TYPE
3540 || (TREE_CODE (array_type) == INTEGER_TYPE
3541 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3542 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3543
3544 /* First make the list for a CONSTRUCTOR for the template. Go down the
3545 field list of the template instead of the type chain because this
3546 array might be an Ada array of arrays and we can't tell where the
3547 nested arrays stop being the underlying object. */
3548
3549 for (field = TYPE_FIELDS (template_type); field;
3550 (bound_list
3551 ? (bound_list = TREE_CHAIN (bound_list))
3552 : (array_type = TREE_TYPE (array_type))),
910ad8de 3553 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3554 {
3555 tree bounds, min, max;
3556
3557 /* If we have a bound list, get the bounds from there. Likewise
3558 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3559 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3560 This will give us a maximum range. */
3561 if (bound_list)
3562 bounds = TREE_VALUE (bound_list);
3563 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3564 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3565 else if (expr && TREE_CODE (expr) == PARM_DECL
3566 && DECL_BY_COMPONENT_PTR_P (expr))
3567 bounds = TREE_TYPE (field);
3568 else
3569 gcc_unreachable ();
3570
3571 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3572 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3573
3574 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3575 substitute it from OBJECT. */
3576 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3577 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3578
0e228dd9 3579 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3580 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3581 }
3582
0e228dd9 3583 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3584}
3585\f
e63b36bd
EB
3586/* Return true if TYPE is suitable for the element type of a vector. */
3587
3588static bool
3589type_for_vector_element_p (tree type)
3590{
ef4bddc2 3591 machine_mode mode;
e63b36bd
EB
3592
3593 if (!INTEGRAL_TYPE_P (type)
3594 && !SCALAR_FLOAT_TYPE_P (type)
3595 && !FIXED_POINT_TYPE_P (type))
3596 return false;
3597
3598 mode = TYPE_MODE (type);
3599 if (GET_MODE_CLASS (mode) != MODE_INT
3600 && !SCALAR_FLOAT_MODE_P (mode)
3601 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3602 return false;
3603
3604 return true;
3605}
3606
3607/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3608 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3609 attribute declaration and want to issue error messages on failure. */
3610
3611static tree
3612build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3613{
3614 unsigned HOST_WIDE_INT size_int, inner_size_int;
3615 int nunits;
3616
3617 /* Silently punt on variable sizes. We can't make vector types for them,
3618 need to ignore them on front-end generated subtypes of unconstrained
3619 base types, and this attribute is for binding implementors, not end
3620 users, so we should never get there from legitimate explicit uses. */
3621 if (!tree_fits_uhwi_p (size))
3622 return NULL_TREE;
3623 size_int = tree_to_uhwi (size);
3624
3625 if (!type_for_vector_element_p (inner_type))
3626 {
3627 if (attribute)
3628 error ("invalid element type for attribute %qs",
3629 IDENTIFIER_POINTER (attribute));
3630 return NULL_TREE;
3631 }
3632 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3633
3634 if (size_int % inner_size_int)
3635 {
3636 if (attribute)
3637 error ("vector size not an integral multiple of component size");
3638 return NULL_TREE;
3639 }
3640
3641 if (size_int == 0)
3642 {
3643 if (attribute)
3644 error ("zero vector size");
3645 return NULL_TREE;
3646 }
3647
3648 nunits = size_int / inner_size_int;
3649 if (nunits & (nunits - 1))
3650 {
3651 if (attribute)
3652 error ("number of components of vector not a power of two");
3653 return NULL_TREE;
3654 }
3655
3656 return build_vector_type (inner_type, nunits);
3657}
3658
3659/* Return a vector type whose representative array type is ARRAY_TYPE, or
3660 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3661 processing the attribute and want to issue error messages on failure. */
3662
3663static tree
3664build_vector_type_for_array (tree array_type, tree attribute)
3665{
3666 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3667 TYPE_SIZE_UNIT (array_type),
3668 attribute);
3669 if (!vector_type)
3670 return NULL_TREE;
3671
3672 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3673 return vector_type;
3674}
3675\f
928dfa4b
EB
3676/* Build a type to be used to represent an aliased object whose nominal type
3677 is an unconstrained array. This consists of a RECORD_TYPE containing a
3678 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3679 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3680 an arbitrary unconstrained object. Use NAME as the name of the record.
3681 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
3682
3683tree
928dfa4b
EB
3684build_unc_object_type (tree template_type, tree object_type, tree name,
3685 bool debug_info_p)
a1ab4c31 3686{
24d4b3d5 3687 tree decl;
a1ab4c31 3688 tree type = make_node (RECORD_TYPE);
da01bfee
EB
3689 tree template_field
3690 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3691 NULL_TREE, NULL_TREE, 0, 1);
3692 tree array_field
3693 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3694 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
3695
3696 TYPE_NAME (type) = name;
3697 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 3698 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
3699 finish_record_type (type, template_field, 0, true);
3700
3701 /* Declare it now since it will never be declared otherwise. This is
3702 necessary to ensure that its subtrees are properly marked. */
24d4b3d5
AC
3703 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3704
3705 /* template_type will not be used elsewhere than here, so to keep the debug
3706 info clean and in order to avoid scoping issues, make decl its
3707 context. */
3708 gnat_set_type_context (template_type, decl);
a1ab4c31
AC
3709
3710 return type;
3711}
3712
3713/* Same, taking a thin or fat pointer type instead of a template type. */
3714
3715tree
3716build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 3717 tree name, bool debug_info_p)
a1ab4c31
AC
3718{
3719 tree template_type;
3720
315cff15 3721 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
3722
3723 template_type
315cff15 3724 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 3725 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 3726 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
3727
3728 return
3729 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 3730}
a1ab4c31 3731\f
229077b0
EB
3732/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3733 In the normal case this is just two adjustments, but we have more to
3734 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3735
3736void
3737update_pointer_to (tree old_type, tree new_type)
3738{
3739 tree ptr = TYPE_POINTER_TO (old_type);
3740 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 3741 tree t;
a1ab4c31
AC
3742
3743 /* If this is the main variant, process all the other variants first. */
3744 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
3745 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3746 update_pointer_to (t, new_type);
a1ab4c31 3747
229077b0 3748 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3749 if (!ptr && !ref)
3750 return;
3751
3752 /* Merge the old type qualifiers in the new type.
3753
3754 Each old variant has qualifiers for specific reasons, and the new
229077b0 3755 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3756 information grabbed at some point, and merging the two simply unifies
3757 these inputs into the final type description.
3758
3759 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3760 type designating it; after the designated type's freeze, we get here with
3761 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3762 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3763 designated type, because that's what it really is.
3764
229077b0
EB
3765 We might also get here for a non-dummy OLD_TYPE variant with different
3766 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3767 to private record type elaboration (see the comments around the call to
229077b0
EB
3768 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3769 the qualifiers in those cases too, to avoid accidentally discarding the
3770 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3771 new_type
3772 = build_qualified_type (new_type,
3773 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3774
3775 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3776 if (old_type == new_type)
3777 return;
3778
3779 /* Otherwise, first handle the simple case. */
3780 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3781 {
aeecf17c
EB
3782 tree new_ptr, new_ref;
3783
3784 /* If pointer or reference already points to new type, nothing to do.
3785 This can happen as update_pointer_to can be invoked multiple times
3786 on the same couple of types because of the type variants. */
3787 if ((ptr && TREE_TYPE (ptr) == new_type)
3788 || (ref && TREE_TYPE (ref) == new_type))
3789 return;
3790
3791 /* Chain PTR and its variants at the end. */
3792 new_ptr = TYPE_POINTER_TO (new_type);
3793 if (new_ptr)
3794 {
3795 while (TYPE_NEXT_PTR_TO (new_ptr))
3796 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3797 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3798 }
3799 else
3800 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 3801
aeecf17c 3802 /* Now adjust them. */
a1ab4c31 3803 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 3804 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
3805 {
3806 TREE_TYPE (t) = new_type;
3807 if (TYPE_NULL_BOUNDS (t))
3808 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3809 }
de9528f0 3810
aeecf17c
EB
3811 /* Chain REF and its variants at the end. */
3812 new_ref = TYPE_REFERENCE_TO (new_type);
3813 if (new_ref)
3814 {
3815 while (TYPE_NEXT_REF_TO (new_ref))
3816 new_ref = TYPE_NEXT_REF_TO (new_ref);
3817 TYPE_NEXT_REF_TO (new_ref) = ref;
3818 }
3819 else
3820 TYPE_REFERENCE_TO (new_type) = ref;
3821
3822 /* Now adjust them. */
a1ab4c31 3823 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
3824 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3825 TREE_TYPE (t) = new_type;
de9528f0
EB
3826
3827 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 3828 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3829 }
3830
aeecf17c
EB
3831 /* Now deal with the unconstrained array case. In this case the pointer
3832 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
3833 Turn them into pointers to the correct types using update_pointer_to.
3834 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
3835 else
3836 {
e3edbd56 3837 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
3838
3839 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3840
e3edbd56 3841 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
3842 since update_pointer_to can be invoked multiple times on the same
3843 couple of types because of the type variants. */
3844 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3845 return;
3846
a1ab4c31 3847 update_pointer_to
e3edbd56
EB
3848 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3849 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 3850
a1ab4c31 3851 update_pointer_to
e3edbd56
EB
3852 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3853 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 3854
e3edbd56
EB
3855 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3856 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 3857
e3edbd56 3858 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3859 }
3860}
3861\f
8df2e902
EB
3862/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3863 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3864
3865static tree
3866convert_to_fat_pointer (tree type, tree expr)
3867{
910ad8de 3868 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 3869 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3870 tree etype = TREE_TYPE (expr);
88293f03 3871 tree template_addr;
9771b263
DN
3872 vec<constructor_elt, va_gc> *v;
3873 vec_alloc (v, 2);
a1ab4c31 3874
50179d58
EB
3875 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3876 array (compare_fat_pointers ensures that this is the full discriminant)
3877 and a valid pointer to the bounds. This latter property is necessary
3878 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 3879 if (integer_zerop (expr))
0e228dd9 3880 {
50179d58
EB
3881 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3882 tree null_bounds, t;
3883
3884 if (TYPE_NULL_BOUNDS (ptr_template_type))
3885 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3886 else
3887 {
3888 /* The template type can still be dummy at this point so we build an
3889 empty constructor. The middle-end will fill it in with zeros. */
90b4c164 3890 t = build_constructor (template_type, NULL);
50179d58
EB
3891 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3892 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3893 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3894 }
3895
0e228dd9 3896 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
3897 fold_convert (p_array_type, null_pointer_node));
3898 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3899 t = build_constructor (type, v);
3900 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3901 TREE_CONSTANT (t) = 0;
3902 TREE_STATIC (t) = 1;
3903
3904 return t;
0e228dd9 3905 }
a1ab4c31 3906
0d7de0e1
EB
3907 /* If EXPR is a thin pointer, make template and data from the record. */
3908 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 3909 {
0d7de0e1 3910 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 3911
7d7a1fe8 3912 expr = gnat_protect_expr (expr);
88293f03
EB
3913
3914 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3915 the thin pointer value has been shifted so we shift it back to get
3916 the template address. */
3917 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
2b45154d 3918 {
88293f03
EB
3919 template_addr
3920 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3921 fold_build1 (NEGATE_EXPR, sizetype,
3922 byte_position
3923 (DECL_CHAIN (field))));
3924 template_addr
3925 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3926 template_addr);
2b45154d 3927 }
a1ab4c31 3928
88293f03
EB
3929 /* Otherwise we explicitly take the address of the fields. */
3930 else
3931 {
3932 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3933 template_addr
3934 = build_unary_op (ADDR_EXPR, NULL_TREE,
3935 build_component_ref (expr, NULL_TREE, field,
3936 false));
3937 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3938 build_component_ref (expr, NULL_TREE,
3939 DECL_CHAIN (field),
3940 false));
3941 }
a1ab4c31 3942 }
8df2e902
EB
3943
3944 /* Otherwise, build the constructor for the template. */
a1ab4c31 3945 else
88293f03
EB
3946 template_addr
3947 = build_unary_op (ADDR_EXPR, NULL_TREE,
3948 build_template (template_type, TREE_TYPE (etype),
3949 expr));
a1ab4c31 3950
8df2e902 3951 /* The final result is a constructor for the fat pointer.
a1ab4c31 3952
8df2e902
EB
3953 If EXPR is an argument of a foreign convention subprogram, the type it
3954 points to is directly the component type. In this case, the expression
a1ab4c31 3955 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 3956 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 3957 required, for instance because it ensures that possible later folding of
8df2e902 3958 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
3959 same type as the initial reference.
3960
8df2e902
EB
3961 Note that the call to "build_template" above is still fine because it
3962 will only refer to the provided TEMPLATE_TYPE in this case. */
88293f03
EB
3963 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3964 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
0e228dd9 3965 return gnat_build_constructor (type, v);
a1ab4c31
AC
3966}
3967\f
a1ab4c31
AC
3968/* Create an expression whose value is that of EXPR,
3969 converted to type TYPE. The TREE_TYPE of the value
3970 is always TYPE. This function implements all reasonable
3971 conversions; callers should filter out those that are
3972 not permitted by the language being compiled. */
3973
3974tree
3975convert (tree type, tree expr)
3976{
a1ab4c31
AC
3977 tree etype = TREE_TYPE (expr);
3978 enum tree_code ecode = TREE_CODE (etype);
c34f3839 3979 enum tree_code code = TREE_CODE (type);
a1ab4c31 3980
c34f3839
EB
3981 /* If the expression is already of the right type, we are done. */
3982 if (etype == type)
a1ab4c31
AC
3983 return expr;
3984
3985 /* If both input and output have padding and are of variable size, do this
3986 as an unchecked conversion. Likewise if one is a mere variant of the
3987 other, so we avoid a pointless unpad/repad sequence. */
3988 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 3989 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
3990 && (!TREE_CONSTANT (TYPE_SIZE (type))
3991 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 3992 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
3993 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3994 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3995 ;
3996
f88facfe
EB
3997 /* If the output type has padding, convert to the inner type and make a
3998 constructor to build the record, unless a variable size is involved. */
315cff15 3999 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4000 {
9771b263 4001 vec<constructor_elt, va_gc> *v;
0e228dd9 4002
a1ab4c31
AC
4003 /* If we previously converted from another type and our type is
4004 of variable size, remove the conversion to avoid the need for
f88facfe 4005 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4006 original and packable version. */
4007 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4008 && (!TREE_CONSTANT (TYPE_SIZE (type))
4009 || (ecode == RECORD_TYPE
4010 && TYPE_NAME (etype)
4011 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4012 expr = TREE_OPERAND (expr, 0);
4013
4014 /* If we are just removing the padding from expr, convert the original
4015 object if we have variable size in order to avoid the need for some
f88facfe 4016 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4017 of the other, so we avoid a pointless unpad/repad sequence. */
4018 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4019 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4020 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4021 || TYPE_MAIN_VARIANT (type)
4022 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4023 || (ecode == RECORD_TYPE
4024 && TYPE_NAME (etype)
4025 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4026 return convert (type, TREE_OPERAND (expr, 0));
4027
431cfac1
EB
4028 /* If the inner type is of self-referential size and the expression type
4029 is a record, do this as an unchecked conversion. But first pad the
4030 expression if possible to have the same size on both sides. */
c34f3839 4031 if (ecode == RECORD_TYPE
f88facfe 4032 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4033 {
980a0501 4034 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4035 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4036 false, false, false, true),
4037 expr);
431cfac1
EB
4038 return unchecked_convert (type, expr, false);
4039 }
a1ab4c31 4040
f88facfe
EB
4041 /* If we are converting between array types with variable size, do the
4042 final conversion as an unchecked conversion, again to avoid the need
4043 for some variable-sized temporaries. If valid, this conversion is
4044 very likely purely technical and without real effects. */
c34f3839 4045 if (ecode == ARRAY_TYPE
f88facfe
EB
4046 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4047 && !TREE_CONSTANT (TYPE_SIZE (etype))
4048 && !TREE_CONSTANT (TYPE_SIZE (type)))
4049 return unchecked_convert (type,
4050 convert (TREE_TYPE (TYPE_FIELDS (type)),
4051 expr),
4052 false);
4053
9771b263 4054 vec_alloc (v, 1);
0e228dd9
NF
4055 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4056 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4057 return gnat_build_constructor (type, v);
a1ab4c31
AC
4058 }
4059
4060 /* If the input type has padding, remove it and convert to the output type.
4061 The conditions ordering is arranged to ensure that the output type is not
4062 a padding type here, as it is not clear whether the conversion would
4063 always be correct if this was to happen. */
315cff15 4064 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4065 {
4066 tree unpadded;
4067
4068 /* If we have just converted to this padded type, just get the
4069 inner expression. */
4070 if (TREE_CODE (expr) == CONSTRUCTOR
9771b263
DN
4071 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4072 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4073 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
a1ab4c31
AC
4074
4075 /* Otherwise, build an explicit component reference. */
4076 else
4077 unpadded
4078 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4079
4080 return convert (type, unpadded);
4081 }
4082
4083 /* If the input is a biased type, adjust first. */
4084 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4085 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
1eb58520 4086 fold_convert (TREE_TYPE (etype), expr),
a1ab4c31 4087 fold_convert (TREE_TYPE (etype),
1eb58520 4088 TYPE_MIN_VALUE (etype))));
a1ab4c31
AC
4089
4090 /* If the input is a justified modular type, we need to extract the actual
4091 object before converting it to any other type with the exceptions of an
4092 unconstrained array or of a mere type variant. It is useful to avoid the
4093 extraction and conversion in the type variant case because it could end
4094 up replacing a VAR_DECL expr by a constructor and we might be about the
4095 take the address of the result. */
4096 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4097 && code != UNCONSTRAINED_ARRAY_TYPE
4098 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4099 return convert (type, build_component_ref (expr, NULL_TREE,
4100 TYPE_FIELDS (etype), false));
4101
4102 /* If converting to a type that contains a template, convert to the data
4103 type and then build the template. */
4104 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4105 {
910ad8de 4106 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4107 vec<constructor_elt, va_gc> *v;
4108 vec_alloc (v, 2);
a1ab4c31
AC
4109
4110 /* If the source already has a template, get a reference to the
4111 associated array only, as we are going to rebuild a template
4112 for the target type anyway. */
4113 expr = maybe_unconstrained_array (expr);
4114
0e228dd9
NF
4115 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4116 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4117 obj_type, NULL_TREE));
910ad8de 4118 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
0e228dd9
NF
4119 convert (obj_type, expr));
4120 return gnat_build_constructor (type, v);
a1ab4c31
AC
4121 }
4122
a1c7d797 4123 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4124 switch (TREE_CODE (expr))
4125 {
4126 case ERROR_MARK:
4127 return expr;
4128
4129 case NULL_EXPR:
4130 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4131 conversion in gnat_expand_expr. NULL_EXPR does not represent
4132 and actual value, so no conversion is needed. */
4133 expr = copy_node (expr);
4134 TREE_TYPE (expr) = type;
4135 return expr;
4136
4137 case STRING_CST:
4138 /* If we are converting a STRING_CST to another constrained array type,
4139 just make a new one in the proper type. */
4140 if (code == ecode && AGGREGATE_TYPE_P (etype)
4141 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4142 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4143 {
4144 expr = copy_node (expr);
4145 TREE_TYPE (expr) = type;
4146 return expr;
4147 }
4148 break;
4149
7948ae37 4150 case VECTOR_CST:
44e9e3ec 4151 /* If we are converting a VECTOR_CST to a mere type variant, just make
7948ae37
OH
4152 a new one in the proper type. */
4153 if (code == ecode && gnat_types_compatible_p (type, etype))
4154 {
4155 expr = copy_node (expr);
4156 TREE_TYPE (expr) = type;
4157 return expr;
4158 }
4159
a1ab4c31 4160 case CONSTRUCTOR:
44e9e3ec
EB
4161 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4162 another padding type around the same type, just make a new one in
4163 the proper type. */
4164 if (code == ecode
4165 && (gnat_types_compatible_p (type, etype)
4166 || (code == RECORD_TYPE
4167 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4168 && TREE_TYPE (TYPE_FIELDS (type))
4169 == TREE_TYPE (TYPE_FIELDS (etype)))))
a1ab4c31
AC
4170 {
4171 expr = copy_node (expr);
4172 TREE_TYPE (expr) = type;
9771b263 4173 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4174 return expr;
4175 }
4176
cb3d597d
EB
4177 /* Likewise for a conversion between original and packable version, or
4178 conversion between types of the same size and with the same list of
4179 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4180 if (code == ecode
4181 && code == RECORD_TYPE
cb3d597d
EB
4182 && (TYPE_NAME (type) == TYPE_NAME (etype)
4183 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4184
a1ab4c31 4185 {
9771b263
DN
4186 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4187 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4188 vec<constructor_elt, va_gc> *v;
4189 vec_alloc (v, len);
a1ab4c31
AC
4190 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4191 unsigned HOST_WIDE_INT idx;
4192 tree index, value;
4193
db868e1e
OH
4194 /* Whether we need to clear TREE_CONSTANT et al. on the output
4195 constructor when we convert in place. */
4196 bool clear_constant = false;
4197
a1ab4c31
AC
4198 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4199 {
44e9e3ec
EB
4200 /* Skip the missing fields in the CONSTRUCTOR. */
4201 while (efield && field && !SAME_FIELD_P (efield, index))
4202 {
4203 efield = DECL_CHAIN (efield);
4204 field = DECL_CHAIN (field);
4205 }
cb3d597d 4206 /* The field must be the same. */
44e9e3ec 4207 if (!(efield && field && SAME_FIELD_P (efield, field)))
a1ab4c31 4208 break;
44e9e3ec
EB
4209 constructor_elt elt
4210 = {field, convert (TREE_TYPE (field), value)};
9771b263 4211 v->quick_push (elt);
db868e1e
OH
4212
4213 /* If packing has made this field a bitfield and the input
4214 value couldn't be emitted statically any more, we need to
4215 clear TREE_CONSTANT on our output. */
ced57283
EB
4216 if (!clear_constant
4217 && TREE_CONSTANT (expr)
db868e1e
OH
4218 && !CONSTRUCTOR_BITFIELD_P (efield)
4219 && CONSTRUCTOR_BITFIELD_P (field)
4220 && !initializer_constant_valid_for_bitfield_p (value))
4221 clear_constant = true;
4222
910ad8de
NF
4223 efield = DECL_CHAIN (efield);
4224 field = DECL_CHAIN (field);
a1ab4c31
AC
4225 }
4226
db868e1e
OH
4227 /* If we have been able to match and convert all the input fields
4228 to their output type, convert in place now. We'll fallback to a
4229 view conversion downstream otherwise. */
a1ab4c31
AC
4230 if (idx == len)
4231 {
4232 expr = copy_node (expr);
4233 TREE_TYPE (expr) = type;
4234 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4235 if (clear_constant)
ced57283 4236 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4237 return expr;
4238 }
4239 }
7948ae37
OH
4240
4241 /* Likewise for a conversion between array type and vector type with a
4242 compatible representative array. */
4243 else if (code == VECTOR_TYPE
4244 && ecode == ARRAY_TYPE
4245 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4246 etype))
4247 {
9771b263
DN
4248 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4249 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4250 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4251 unsigned HOST_WIDE_INT ix;
4252 tree value;
4253
4254 /* Build a VECTOR_CST from a *constant* array constructor. */
4255 if (TREE_CONSTANT (expr))
4256 {
4257 bool constant_p = true;
4258
4259 /* Iterate through elements and check if all constructor
4260 elements are *_CSTs. */
4261 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4262 if (!CONSTANT_CLASS_P (value))
4263 {
4264 constant_p = false;
4265 break;
4266 }
4267
4268 if (constant_p)
4269 return build_vector_from_ctor (type,
4270 CONSTRUCTOR_ELTS (expr));
4271 }
4272
4273 /* Otherwise, build a regular vector constructor. */
9771b263 4274 vec_alloc (v, len);
7948ae37
OH
4275 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4276 {
f32682ca 4277 constructor_elt elt = {NULL_TREE, value};
9771b263 4278 v->quick_push (elt);
7948ae37
OH
4279 }
4280 expr = copy_node (expr);
4281 TREE_TYPE (expr) = type;
4282 CONSTRUCTOR_ELTS (expr) = v;
4283 return expr;
4284 }
a1ab4c31
AC
4285 break;
4286
4287 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4288 /* First retrieve the underlying array. */
4289 expr = maybe_unconstrained_array (expr);
4290 etype = TREE_TYPE (expr);
4291 ecode = TREE_CODE (etype);
4292 break;
a1ab4c31
AC
4293
4294 case VIEW_CONVERT_EXPR:
4295 {
4296 /* GCC 4.x is very sensitive to type consistency overall, and view
4297 conversions thus are very frequent. Even though just "convert"ing
4298 the inner operand to the output type is fine in most cases, it
4299 might expose unexpected input/output type mismatches in special
4300 circumstances so we avoid such recursive calls when we can. */
4301 tree op0 = TREE_OPERAND (expr, 0);
4302
4303 /* If we are converting back to the original type, we can just
4304 lift the input conversion. This is a common occurrence with
4305 switches back-and-forth amongst type variants. */
4306 if (type == TREE_TYPE (op0))
4307 return op0;
4308
7948ae37
OH
4309 /* Otherwise, if we're converting between two aggregate or vector
4310 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4311 target type in place or to just convert the inner expression. */
4312 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4313 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4314 {
4315 /* If we are converting between mere variants, we can just
4316 substitute the VIEW_CONVERT_EXPR in place. */
4317 if (gnat_types_compatible_p (type, etype))
4318 return build1 (VIEW_CONVERT_EXPR, type, op0);
4319
4320 /* Otherwise, we may just bypass the input view conversion unless
4321 one of the types is a fat pointer, which is handled by
4322 specialized code below which relies on exact type matching. */
315cff15
EB
4323 else if (!TYPE_IS_FAT_POINTER_P (type)
4324 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4325 return convert (type, op0);
4326 }
ad1d36ba
EB
4327
4328 break;
a1ab4c31 4329 }
a1ab4c31 4330
a1ab4c31
AC
4331 default:
4332 break;
4333 }
4334
4335 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4336 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4337 return convert_to_fat_pointer (type, expr);
4338
7948ae37
OH
4339 /* If we are converting between two aggregate or vector types that are mere
4340 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4341 to a vector type from its representative array type. */
4342 else if ((code == ecode
4343 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4344 && gnat_types_compatible_p (type, etype))
4345 || (code == VECTOR_TYPE
4346 && ecode == ARRAY_TYPE
4347 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4348 etype)))
a1ab4c31
AC
4349 return build1 (VIEW_CONVERT_EXPR, type, expr);
4350
76af763d
EB
4351 /* If we are converting between tagged types, try to upcast properly. */
4352 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4353 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4354 {
4355 tree child_etype = etype;
4356 do {
4357 tree field = TYPE_FIELDS (child_etype);
4358 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4359 return build_component_ref (expr, NULL_TREE, field, false);
4360 child_etype = TREE_TYPE (field);
4361 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4362 }
4363
bb1f7929
EB
4364 /* If we are converting from a smaller form of record type back to it, just
4365 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4366 size on both sides. */
4367 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4368 && smaller_form_type_p (etype, type))
4369 {
4370 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4371 false, false, false, true),
4372 expr);
4373 return build1 (VIEW_CONVERT_EXPR, type, expr);
4374 }
4375
a1ab4c31 4376 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4377 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4378 return fold_convert (type, expr);
4379
4380 switch (code)
4381 {
4382 case VOID_TYPE:
4383 return fold_build1 (CONVERT_EXPR, type, expr);
4384
a1ab4c31
AC
4385 case INTEGER_TYPE:
4386 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4387 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4388 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4389 return unchecked_convert (type, expr, false);
4390 else if (TYPE_BIASED_REPRESENTATION_P (type))
4391 return fold_convert (type,
4392 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4393 convert (TREE_TYPE (type), expr),
1eb58520
AC
4394 convert (TREE_TYPE (type),
4395 TYPE_MIN_VALUE (type))));
a1ab4c31
AC
4396
4397 /* ... fall through ... */
4398
4399 case ENUMERAL_TYPE:
01ddebf2 4400 case BOOLEAN_TYPE:
a1ab4c31
AC
4401 /* If we are converting an additive expression to an integer type
4402 with lower precision, be wary of the optimization that can be
4403 applied by convert_to_integer. There are 2 problematic cases:
4404 - if the first operand was originally of a biased type,
4405 because we could be recursively called to convert it
4406 to an intermediate type and thus rematerialize the
4407 additive operator endlessly,
4408 - if the expression contains a placeholder, because an
4409 intermediate conversion that changes the sign could
4410 be inserted and thus introduce an artificial overflow
4411 at compile time when the placeholder is substituted. */
4412 if (code == INTEGER_TYPE
4413 && ecode == INTEGER_TYPE
4414 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4415 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4416 {
4417 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4418
4419 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4420 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4421 || CONTAINS_PLACEHOLDER_P (expr))
4422 return build1 (NOP_EXPR, type, expr);
4423 }
4424
4425 return fold (convert_to_integer (type, expr));
4426
4427 case POINTER_TYPE:
4428 case REFERENCE_TYPE:
0d7de0e1 4429 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4430 for differing offsets from the base pointer, depending on whether
4431 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4432 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4433 {
2b45154d
EB
4434 tree etype_pos
4435 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4436 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4437 : size_zero_node;
4438 tree type_pos
4439 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4440 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4441 : size_zero_node;
4442 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4443
a1ab4c31 4444 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4445 if (integer_zerop (byte_diff))
4446 return expr;
4447
4448 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4449 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4450 }
4451
0d7de0e1
EB
4452 /* If converting fat pointer to normal or thin pointer, get the pointer
4453 to the array and then convert it. */
4454 if (TYPE_IS_FAT_POINTER_P (etype))
86060344
EB
4455 expr
4456 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4457
4458 return fold (convert_to_pointer (type, expr));
4459
4460 case REAL_TYPE:
4461 return fold (convert_to_real (type, expr));
4462
4463 case RECORD_TYPE:
4464 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4465 {
9771b263
DN
4466 vec<constructor_elt, va_gc> *v;
4467 vec_alloc (v, 1);
0e228dd9
NF
4468
4469 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4470 convert (TREE_TYPE (TYPE_FIELDS (type)),
4471 expr));
4472 return gnat_build_constructor (type, v);
4473 }
a1ab4c31
AC
4474
4475 /* ... fall through ... */
4476
4477 case ARRAY_TYPE:
4478 /* In these cases, assume the front-end has validated the conversion.
4479 If the conversion is valid, it will be a bit-wise conversion, so
4480 it can be viewed as an unchecked conversion. */
4481 return unchecked_convert (type, expr, false);
4482
4483 case UNION_TYPE:
4484 /* This is a either a conversion between a tagged type and some
4485 subtype, which we have to mark as a UNION_TYPE because of
4486 overlapping fields or a conversion of an Unchecked_Union. */
4487 return unchecked_convert (type, expr, false);
4488
4489 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4490 /* If the input is a VECTOR_TYPE, convert to the representative
4491 array type first. */
4492 if (ecode == VECTOR_TYPE)
4493 {
4494 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4495 etype = TREE_TYPE (expr);
4496 ecode = TREE_CODE (etype);
4497 }
4498
a1ab4c31
AC
4499 /* If EXPR is a constrained array, take its address, convert it to a
4500 fat pointer, and then dereference it. Likewise if EXPR is a
4501 record containing both a template and a constrained array.
4502 Note that a record representing a justified modular type
4503 always represents a packed constrained array. */
4504 if (ecode == ARRAY_TYPE
4505 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4506 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4507 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4508 return
4509 build_unary_op
4510 (INDIRECT_REF, NULL_TREE,
4511 convert_to_fat_pointer (TREE_TYPE (type),
4512 build_unary_op (ADDR_EXPR,
4513 NULL_TREE, expr)));
4514
4515 /* Do something very similar for converting one unconstrained
4516 array to another. */
4517 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4518 return
4519 build_unary_op (INDIRECT_REF, NULL_TREE,
4520 convert (TREE_TYPE (type),
4521 build_unary_op (ADDR_EXPR,
4522 NULL_TREE, expr)));
4523 else
4524 gcc_unreachable ();
4525
4526 case COMPLEX_TYPE:
4527 return fold (convert_to_complex (type, expr));
4528
4529 default:
4530 gcc_unreachable ();
4531 }
4532}
15bf7d19
EB
4533
4534/* Create an expression whose value is that of EXPR converted to the common
4535 index type, which is sizetype. EXPR is supposed to be in the base type
4536 of the GNAT index type. Calling it is equivalent to doing
4537
4538 convert (sizetype, expr)
4539
4540 but we try to distribute the type conversion with the knowledge that EXPR
4541 cannot overflow in its type. This is a best-effort approach and we fall
4542 back to the above expression as soon as difficulties are encountered.
4543
4544 This is necessary to overcome issues that arise when the GNAT base index
4545 type and the GCC common index type (sizetype) don't have the same size,
4546 which is quite frequent on 64-bit architectures. In this case, and if
4547 the GNAT base index type is signed but the iteration type of the loop has
4548 been forced to unsigned, the loop scalar evolution engine cannot compute
4549 a simple evolution for the general induction variables associated with the
4550 array indices, because it will preserve the wrap-around semantics in the
4551 unsigned type of their "inner" part. As a result, many loop optimizations
4552 are blocked.
4553
4554 The solution is to use a special (basic) induction variable that is at
4555 least as large as sizetype, and to express the aforementioned general
4556 induction variables in terms of this induction variable, eliminating
4557 the problematic intermediate truncation to the GNAT base index type.
4558 This is possible as long as the original expression doesn't overflow
4559 and if the middle-end hasn't introduced artificial overflows in the
4560 course of the various simplification it can make to the expression. */
4561
4562tree
4563convert_to_index_type (tree expr)
4564{
4565 enum tree_code code = TREE_CODE (expr);
4566 tree type = TREE_TYPE (expr);
4567
4568 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4569 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4570 if (TYPE_UNSIGNED (type) || !optimize)
4571 return convert (sizetype, expr);
4572
4573 switch (code)
4574 {
4575 case VAR_DECL:
4576 /* The main effect of the function: replace a loop parameter with its
4577 associated special induction variable. */
4578 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4579 expr = DECL_INDUCTION_VAR (expr);
4580 break;
4581
4582 CASE_CONVERT:
4583 {
4584 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4585 /* Bail out as soon as we suspect some sort of type frobbing. */
4586 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4587 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4588 break;
4589 }
4590
4591 /* ... fall through ... */
4592
4593 case NON_LVALUE_EXPR:
4594 return fold_build1 (code, sizetype,
4595 convert_to_index_type (TREE_OPERAND (expr, 0)));
4596
4597 case PLUS_EXPR:
4598 case MINUS_EXPR:
4599 case MULT_EXPR:
4600 return fold_build2 (code, sizetype,
4601 convert_to_index_type (TREE_OPERAND (expr, 0)),
4602 convert_to_index_type (TREE_OPERAND (expr, 1)));
4603
4604 case COMPOUND_EXPR:
4605 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4606 convert_to_index_type (TREE_OPERAND (expr, 1)));
4607
4608 case COND_EXPR:
4609 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4610 convert_to_index_type (TREE_OPERAND (expr, 1)),
4611 convert_to_index_type (TREE_OPERAND (expr, 2)));
4612
4613 default:
4614 break;
4615 }
4616
4617 return convert (sizetype, expr);
4618}
a1ab4c31
AC
4619\f
4620/* Remove all conversions that are done in EXP. This includes converting
4621 from a padded type or to a justified modular type. If TRUE_ADDRESS
4622 is true, always return the address of the containing object even if
4623 the address is not bit-aligned. */
4624
4625tree
4626remove_conversions (tree exp, bool true_address)
4627{
4628 switch (TREE_CODE (exp))
4629 {
4630 case CONSTRUCTOR:
4631 if (true_address
4632 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4633 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4634 return
9771b263 4635 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
a1ab4c31
AC
4636 break;
4637
4638 case COMPONENT_REF:
315cff15 4639 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
4640 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4641 break;
4642
a1ab4c31 4643 CASE_CONVERT:
722356ce
EB
4644 case VIEW_CONVERT_EXPR:
4645 case NON_LVALUE_EXPR:
a1ab4c31
AC
4646 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4647
4648 default:
4649 break;
4650 }
4651
4652 return exp;
4653}
4654\f
4655/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 4656 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
4657 likewise return an expression pointing to the underlying array. */
4658
4659tree
4660maybe_unconstrained_array (tree exp)
4661{
4662 enum tree_code code = TREE_CODE (exp);
1aa291f7 4663 tree type = TREE_TYPE (exp);
a1ab4c31 4664
1aa291f7 4665 switch (TREE_CODE (type))
a1ab4c31
AC
4666 {
4667 case UNCONSTRAINED_ARRAY_TYPE:
4668 if (code == UNCONSTRAINED_ARRAY_REF)
4669 {
7e169899 4670 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
4671 const bool no_trap = TREE_THIS_NOTRAP (exp);
4672
7e169899 4673 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
4674 type = TREE_TYPE (exp);
4675
7e169899
EB
4676 if (TREE_CODE (exp) == COND_EXPR)
4677 {
4678 tree op1
4679 = build_unary_op (INDIRECT_REF, NULL_TREE,
4680 build_component_ref (TREE_OPERAND (exp, 1),
4681 NULL_TREE,
1aa291f7 4682 TYPE_FIELDS (type),
7e169899
EB
4683 false));
4684 tree op2
4685 = build_unary_op (INDIRECT_REF, NULL_TREE,
4686 build_component_ref (TREE_OPERAND (exp, 2),
4687 NULL_TREE,
1aa291f7 4688 TYPE_FIELDS (type),
7e169899
EB
4689 false));
4690
4691 exp = build3 (COND_EXPR,
1aa291f7 4692 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
4693 TREE_OPERAND (exp, 0), op1, op2);
4694 }
4695 else
a1c7d797
EB
4696 {
4697 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4698 build_component_ref (exp, NULL_TREE,
1aa291f7 4699 TYPE_FIELDS (type),
a1c7d797
EB
4700 false));
4701 TREE_READONLY (exp) = read_only;
4702 TREE_THIS_NOTRAP (exp) = no_trap;
4703 }
a1ab4c31
AC
4704 }
4705
4706 else if (code == NULL_EXPR)
1aa291f7
EB
4707 exp = build1 (NULL_EXPR,
4708 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4709 TREE_OPERAND (exp, 0));
4710 break;
a1ab4c31
AC
4711
4712 case RECORD_TYPE:
1aa291f7
EB
4713 /* If this is a padded type and it contains a template, convert to the
4714 unpadded type first. */
4715 if (TYPE_PADDING_P (type)
4716 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4717 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 4718 {
1aa291f7
EB
4719 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4720 type = TREE_TYPE (exp);
4721 }
4722
4723 if (TYPE_CONTAINS_TEMPLATE_P (type))
4724 {
4725 exp = build_component_ref (exp, NULL_TREE,
4726 DECL_CHAIN (TYPE_FIELDS (type)),
4727 false);
4728 type = TREE_TYPE (exp);
4729
4730 /* If the array type is padded, convert to the unpadded type. */
4731 if (TYPE_IS_PADDING_P (type))
4732 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 4733 }
a1ab4c31
AC
4734 break;
4735
4736 default:
4737 break;
4738 }
4739
4740 return exp;
4741}
4742\f
afcea859 4743/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 4744 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
4745
4746static bool
4747can_fold_for_view_convert_p (tree expr)
4748{
4749 tree t1, t2;
4750
4751 /* The folder will fold NOP_EXPRs between integral types with the same
4752 precision (in the middle-end's sense). We cannot allow it if the
4753 types don't have the same precision in the Ada sense as well. */
4754 if (TREE_CODE (expr) != NOP_EXPR)
4755 return true;
4756
4757 t1 = TREE_TYPE (expr);
4758 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4759
4760 /* Defer to the folder for non-integral conversions. */
4761 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4762 return true;
4763
4764 /* Only fold conversions that preserve both precisions. */
4765 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4766 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4767 return true;
4768
4769 return false;
4770}
4771
a1ab4c31 4772/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4773 If NOTRUNC_P is true, truncation operations should be suppressed.
4774
4775 Special care is required with (source or target) integral types whose
4776 precision is not equal to their size, to make sure we fetch or assign
4777 the value bits whose location might depend on the endianness, e.g.
4778
4779 Rmsize : constant := 8;
4780 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4781
4782 type Bit_Array is array (1 .. Rmsize) of Boolean;
4783 pragma Pack (Bit_Array);
4784
4785 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4786
4787 Value : Int := 2#1000_0001#;
4788 Vbits : Bit_Array := To_Bit_Array (Value);
4789
4790 we expect the 8 bits at Vbits'Address to always contain Value, while
4791 their original location depends on the endianness, at Value'Address
84fb43a1 4792 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
4793
4794tree
4795unchecked_convert (tree type, tree expr, bool notrunc_p)
4796{
4797 tree etype = TREE_TYPE (expr);
c34f3839
EB
4798 enum tree_code ecode = TREE_CODE (etype);
4799 enum tree_code code = TREE_CODE (type);
e63b36bd 4800 tree tem;
980a0501 4801 int c;
a1ab4c31 4802
c34f3839 4803 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
4804 if (etype == type)
4805 return expr;
4806
4807 /* If both types types are integral just do a normal conversion.
4808 Likewise for a conversion to an unconstrained array. */
1eb58520 4809 if (((INTEGRAL_TYPE_P (type)
0d7de0e1 4810 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 4811 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
1eb58520 4812 && (INTEGRAL_TYPE_P (etype)
315cff15 4813 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
4814 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4815 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 4816 {
c34f3839 4817 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
4818 {
4819 tree ntype = copy_type (etype);
a1ab4c31
AC
4820 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4821 TYPE_MAIN_VARIANT (ntype) = ntype;
4822 expr = build1 (NOP_EXPR, ntype, expr);
4823 }
4824
c34f3839 4825 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 4826 {
afcea859 4827 tree rtype = copy_type (type);
a1ab4c31
AC
4828 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4829 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4830 expr = convert (rtype, expr);
4831 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 4832 }
afcea859
EB
4833 else
4834 expr = convert (type, expr);
a1ab4c31
AC
4835 }
4836
afcea859 4837 /* If we are converting to an integral type whose precision is not equal
416de7d5
EB
4838 to its size, first unchecked convert to a record type that contains an
4839 field of the given precision. Then extract the field. */
980a0501
EB
4840 else if (INTEGRAL_TYPE_P (type)
4841 && TYPE_RM_SIZE (type)
a1ab4c31
AC
4842 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4843 GET_MODE_BITSIZE (TYPE_MODE (type))))
4844 {
4845 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
4846 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4847 tree field_type, field;
4848
4849 if (TYPE_UNSIGNED (type))
4850 field_type = make_unsigned_type (prec);
4851 else
4852 field_type = make_signed_type (prec);
4853 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4854
4855 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 4856 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 4857
44e9e3ec 4858 finish_record_type (rec_type, field, 1, false);
a1ab4c31
AC
4859
4860 expr = unchecked_convert (rec_type, expr, notrunc_p);
3cd64bab 4861 expr = build_component_ref (expr, NULL_TREE, field, false);
416de7d5 4862 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4863 }
4864
416de7d5
EB
4865 /* Similarly if we are converting from an integral type whose precision is
4866 not equal to its size, first copy into a field of the given precision
4867 and unchecked convert the record type. */
980a0501
EB
4868 else if (INTEGRAL_TYPE_P (etype)
4869 && TYPE_RM_SIZE (etype)
4870 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4871 GET_MODE_BITSIZE (TYPE_MODE (etype))))
a1ab4c31
AC
4872 {
4873 tree rec_type = make_node (RECORD_TYPE);
416de7d5 4874 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
4875 vec<constructor_elt, va_gc> *v;
4876 vec_alloc (v, 1);
416de7d5
EB
4877 tree field_type, field;
4878
4879 if (TYPE_UNSIGNED (etype))
4880 field_type = make_unsigned_type (prec);
4881 else
4882 field_type = make_signed_type (prec);
4883 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4884
4885 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 4886 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 4887
44e9e3ec 4888 finish_record_type (rec_type, field, 1, false);
a1ab4c31 4889
416de7d5 4890 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
4891 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4892 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
4893 expr = unchecked_convert (type, expr, notrunc_p);
4894 }
4895
980a0501
EB
4896 /* If we are converting from a scalar type to a type with a different size,
4897 we need to pad to have the same size on both sides.
4898
4899 ??? We cannot do it unconditionally because unchecked conversions are
4900 used liberally by the front-end to implement polymorphism, e.g. in:
4901
4902 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4903 return p___size__4 (p__object!(S191s.all));
4904
4905 so we skip all expressions that are references. */
4906 else if (!REFERENCE_CLASS_P (expr)
4907 && !AGGREGATE_TYPE_P (etype)
4908 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4909 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4910 {
4911 if (c < 0)
4912 {
4913 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4914 false, false, false, true),
4915 expr);
4916 expr = unchecked_convert (type, expr, notrunc_p);
4917 }
4918 else
4919 {
4920 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4921 false, false, false, true);
4922 expr = unchecked_convert (rec_type, expr, notrunc_p);
4923 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4924 false);
4925 }
4926 }
4927
7948ae37
OH
4928 /* We have a special case when we are converting between two unconstrained
4929 array types. In that case, take the address, convert the fat pointer
4930 types, and dereference. */
c34f3839 4931 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
4932 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4933 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4934 build_unary_op (ADDR_EXPR, NULL_TREE,
4935 expr)));
7948ae37
OH
4936
4937 /* Another special case is when we are converting to a vector type from its
4938 representative array type; this a regular conversion. */
c34f3839
EB
4939 else if (code == VECTOR_TYPE
4940 && ecode == ARRAY_TYPE
7948ae37
OH
4941 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4942 etype))
4943 expr = convert (type, expr);
4944
e63b36bd
EB
4945 /* And, if the array type is not the representative, we try to build an
4946 intermediate vector type of which the array type is the representative
4947 and to do the unchecked conversion between the vector types, in order
4948 to enable further simplifications in the middle-end. */
4949 else if (code == VECTOR_TYPE
4950 && ecode == ARRAY_TYPE
4951 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4952 {
4953 expr = convert (tem, expr);
4954 return unchecked_convert (type, expr, notrunc_p);
4955 }
4956
44e9e3ec
EB
4957 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4958 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4959 else if (TREE_CODE (expr) == CONSTRUCTOR
4960 && code == RECORD_TYPE
4961 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4962 {
4963 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4964 Empty, false, false, false, true),
4965 expr);
4966 return unchecked_convert (type, expr, notrunc_p);
4967 }
4968
4969 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
a1ab4c31
AC
4970 else
4971 {
4972 expr = maybe_unconstrained_array (expr);
4973 etype = TREE_TYPE (expr);
c34f3839 4974 ecode = TREE_CODE (etype);
afcea859
EB
4975 if (can_fold_for_view_convert_p (expr))
4976 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4977 else
4978 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4979 }
4980
afcea859
EB
4981 /* If the result is an integral type whose precision is not equal to its
4982 size, sign- or zero-extend the result. We need not do this if the input
4983 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
4984 is a biased type or if both the input and output are unsigned. */
4985 if (!notrunc_p
4986 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 4987 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
4988 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4989 GET_MODE_BITSIZE (TYPE_MODE (type)))
4990 && !(INTEGRAL_TYPE_P (etype)
4991 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4992 && operand_equal_p (TYPE_RM_SIZE (type),
4993 (TYPE_RM_SIZE (etype) != 0
4994 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4995 0))
4996 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4997 {
c34f3839
EB
4998 tree base_type
4999 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
5000 tree shift_expr
5001 = convert (base_type,
5002 size_binop (MINUS_EXPR,
5003 bitsize_int
5004 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5005 TYPE_RM_SIZE (type)));
5006 expr
5007 = convert (type,
5008 build_binary_op (RSHIFT_EXPR, base_type,
5009 build_binary_op (LSHIFT_EXPR, base_type,
5010 convert (base_type, expr),
5011 shift_expr),
5012 shift_expr));
5013 }
5014
5015 /* An unchecked conversion should never raise Constraint_Error. The code
5016 below assumes that GCC's conversion routines overflow the same way that
5017 the underlying hardware does. This is probably true. In the rare case
5018 when it is false, we can rely on the fact that such conversions are
5019 erroneous anyway. */
5020 if (TREE_CODE (expr) == INTEGER_CST)
5021 TREE_OVERFLOW (expr) = 0;
5022
5023 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5024 show no longer constant. */
5025 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5026 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5027 OEP_ONLY_CONST))
5028 TREE_CONSTANT (expr) = 0;
5029
5030 return expr;
5031}
5032\f
feec4372 5033/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5034 the latter being a record type as predicated by Is_Record_Type. */
5035
5036enum tree_code
5037tree_code_for_record_type (Entity_Id gnat_type)
5038{
b1a785fb 5039 Node_Id component_list, component;
a1ab4c31 5040
b1a785fb
EB
5041 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5042 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5043 if (!Is_Unchecked_Union (gnat_type))
5044 return RECORD_TYPE;
5045
b1a785fb
EB
5046 gnat_type = Implementation_Base_Type (gnat_type);
5047 component_list
5048 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5049
a1ab4c31
AC
5050 for (component = First_Non_Pragma (Component_Items (component_list));
5051 Present (component);
5052 component = Next_Non_Pragma (component))
5053 if (Ekind (Defining_Entity (component)) == E_Component)
5054 return RECORD_TYPE;
5055
5056 return UNION_TYPE;
5057}
5058
caa9d12a
EB
5059/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5060 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5061 according to the presence of an alignment clause on the type or, if it
5062 is an array, on the component type. */
5063
5064bool
5065is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5066{
5067 gnat_type = Underlying_Type (gnat_type);
5068
5069 *align_clause = Present (Alignment_Clause (gnat_type));
5070
5071 if (Is_Array_Type (gnat_type))
5072 {
5073 gnat_type = Underlying_Type (Component_Type (gnat_type));
5074 if (Present (Alignment_Clause (gnat_type)))
5075 *align_clause = true;
5076 }
5077
5078 if (!Is_Floating_Point_Type (gnat_type))
5079 return false;
5080
5081 if (UI_To_Int (Esize (gnat_type)) != 64)
5082 return false;
5083
5084 return true;
5085}
5086
5087/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5088 size is greater or equal to 64 bits, or an array of such a type. Set
5089 ALIGN_CLAUSE according to the presence of an alignment clause on the
5090 type or, if it is an array, on the component type. */
5091
5092bool
5093is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5094{
5095 gnat_type = Underlying_Type (gnat_type);
5096
5097 *align_clause = Present (Alignment_Clause (gnat_type));
5098
5099 if (Is_Array_Type (gnat_type))
5100 {
5101 gnat_type = Underlying_Type (Component_Type (gnat_type));
5102 if (Present (Alignment_Clause (gnat_type)))
5103 *align_clause = true;
5104 }
5105
5106 if (!Is_Scalar_Type (gnat_type))
5107 return false;
5108
5109 if (UI_To_Int (Esize (gnat_type)) < 64)
5110 return false;
5111
5112 return true;
5113}
5114
a1ab4c31
AC
5115/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5116 component of an aggregate type. */
5117
5118bool
5119type_for_nonaliased_component_p (tree gnu_type)
5120{
5121 /* If the type is passed by reference, we may have pointers to the
5122 component so it cannot be made non-aliased. */
5123 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5124 return false;
5125
5126 /* We used to say that any component of aggregate type is aliased
5127 because the front-end may take 'Reference of it. The front-end
5128 has been enhanced in the meantime so as to use a renaming instead
5129 in most cases, but the back-end can probably take the address of
5130 such a component too so we go for the conservative stance.
5131
5132 For instance, we might need the address of any array type, even
5133 if normally passed by copy, to construct a fat pointer if the
5134 component is used as an actual for an unconstrained formal.
5135
5136 Likewise for record types: even if a specific record subtype is
5137 passed by copy, the parent type might be passed by ref (e.g. if
5138 it's of variable size) and we might take the address of a child
5139 component to pass to a parent formal. We have no way to check
5140 for such conditions here. */
5141 if (AGGREGATE_TYPE_P (gnu_type))
5142 return false;
5143
5144 return true;
5145}
5146
bb1f7929
EB
5147/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5148
5149bool
5150smaller_form_type_p (tree type, tree orig_type)
5151{
5152 tree size, osize;
5153
5154 /* We're not interested in variants here. */
5155 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5156 return false;
5157
5158 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5159 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5160 return false;
5161
5162 size = TYPE_SIZE (type);
5163 osize = TYPE_SIZE (orig_type);
5164
5165 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5166 return false;
5167
5168 return tree_int_cst_lt (size, osize) != 0;
5169}
5170
a1ab4c31
AC
5171/* Perform final processing on global variables. */
5172
65444786
EB
5173static GTY (()) tree dummy_global;
5174
a1ab4c31
AC
5175void
5176gnat_write_global_declarations (void)
5177{
10e4d056
EB
5178 unsigned int i;
5179 tree iter;
5180
65444786
EB
5181 /* If we have declared types as used at the global level, insert them in
5182 the global hash table. We use a dummy variable for this purpose. */
9771b263 5183 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
65444786 5184 {
35e8bcf4 5185 struct varpool_node *node;
d3c268ab
EB
5186 char *label;
5187
5188 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5189 dummy_global
d3c268ab
EB
5190 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5191 void_type_node);
dd25fe0a 5192 DECL_HARD_REGISTER (dummy_global) = 1;
65444786 5193 TREE_STATIC (dummy_global) = 1;
037e5573 5194 node = varpool_node::get_create (dummy_global);
dd25fe0a 5195 node->definition = 1;
67348ccc 5196 node->force_output = 1;
65444786 5197
9771b263 5198 while (!types_used_by_cur_var_decl->is_empty ())
65444786 5199 {
9771b263 5200 tree t = types_used_by_cur_var_decl->pop ();
65444786
EB
5201 types_used_by_var_decl_insert (t, dummy_global);
5202 }
5203 }
5204
10e4d056
EB
5205 /* Output debug information for all global type declarations first. This
5206 ensures that global types whose compilation hasn't been finalized yet,
5207 for example pointers to Taft amendment types, have their compilation
5208 finalized in the right context. */
9771b263 5209 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
9a30c7c4 5210 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
1f0e2688 5211 debug_hooks->type_decl (iter, false);
10e4d056 5212
65d630d4 5213 /* Proceed to optimize and emit assembly. */
3dafb85c 5214 symtab->finalize_compilation_unit ();
a1ab4c31 5215
10e4d056
EB
5216 /* After cgraph has had a chance to emit everything that's going to
5217 be emitted, output debug information for the rest of globals. */
5218 if (!seen_error ())
5219 {
5220 timevar_push (TV_SYMOUT);
9771b263 5221 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
9a30c7c4 5222 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
10e4d056
EB
5223 debug_hooks->global_decl (iter);
5224 timevar_pop (TV_SYMOUT);
5225 }
a1ab4c31
AC
5226}
5227
5228/* ************************************************************************
5229 * * GCC builtins support *
5230 * ************************************************************************ */
5231
5232/* The general scheme is fairly simple:
5233
5234 For each builtin function/type to be declared, gnat_install_builtins calls
aef308d0 5235 internal facilities which eventually get to gnat_pushdecl, which in turn
a1ab4c31
AC
5236 tracks the so declared builtin function decls in the 'builtin_decls' global
5237 datastructure. When an Intrinsic subprogram declaration is processed, we
5238 search this global datastructure to retrieve the associated BUILT_IN DECL
5239 node. */
5240
5241/* Search the chain of currently available builtin declarations for a node
5242 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5243 found, if any, or NULL_TREE otherwise. */
5244tree
5245builtin_decl_for (tree name)
5246{
5247 unsigned i;
5248 tree decl;
5249
9771b263 5250 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5251 if (DECL_NAME (decl) == name)
5252 return decl;
5253
5254 return NULL_TREE;
5255}
5256
5257/* The code below eventually exposes gnat_install_builtins, which declares
5258 the builtin types and functions we might need, either internally or as
5259 user accessible facilities.
5260
5261 ??? This is a first implementation shot, still in rough shape. It is
5262 heavily inspired from the "C" family implementation, with chunks copied
5263 verbatim from there.
5264
5265 Two obvious TODO candidates are
5266 o Use a more efficient name/decl mapping scheme
5267 o Devise a middle-end infrastructure to avoid having to copy
5268 pieces between front-ends. */
5269
5270/* ----------------------------------------------------------------------- *
5271 * BUILTIN ELEMENTARY TYPES *
5272 * ----------------------------------------------------------------------- */
5273
5274/* Standard data types to be used in builtin argument declarations. */
5275
5276enum c_tree_index
5277{
5278 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5279 CTI_STRING_TYPE,
5280 CTI_CONST_STRING_TYPE,
5281
5282 CTI_MAX
5283};
5284
5285static tree c_global_trees[CTI_MAX];
5286
5287#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5288#define string_type_node c_global_trees[CTI_STRING_TYPE]
5289#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5290
5291/* ??? In addition some attribute handlers, we currently don't support a
5292 (small) number of builtin-types, which in turns inhibits support for a
5293 number of builtin functions. */
5294#define wint_type_node void_type_node
5295#define intmax_type_node void_type_node
5296#define uintmax_type_node void_type_node
5297
5298/* Build the void_list_node (void_type_node having been created). */
5299
5300static tree
5301build_void_list_node (void)
5302{
5303 tree t = build_tree_list (NULL_TREE, void_type_node);
5304 return t;
5305}
5306
5307/* Used to help initialize the builtin-types.def table. When a type of
5308 the correct size doesn't exist, use error_mark_node instead of NULL.
5309 The later results in segfaults even when a decl using the type doesn't
5310 get invoked. */
5311
5312static tree
5313builtin_type_for_size (int size, bool unsignedp)
5314{
ced57283 5315 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5316 return type ? type : error_mark_node;
5317}
5318
5319/* Build/push the elementary type decls that builtin functions/types
5320 will need. */
5321
5322static void
5323install_builtin_elementary_types (void)
5324{
728936bb 5325 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5326 pid_type_node = integer_type_node;
5327 void_list_node = build_void_list_node ();
5328
5329 string_type_node = build_pointer_type (char_type_node);
5330 const_string_type_node
5331 = build_pointer_type (build_qualified_type
5332 (char_type_node, TYPE_QUAL_CONST));
5333}
5334
5335/* ----------------------------------------------------------------------- *
5336 * BUILTIN FUNCTION TYPES *
5337 * ----------------------------------------------------------------------- */
5338
5339/* Now, builtin function types per se. */
5340
5341enum c_builtin_type
5342{
5343#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5344#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5345#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5346#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5347#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5348#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5349#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
f6a7cffc
TS
5350#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5351 ARG6) NAME,
5352#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5353 ARG6, ARG7) NAME,
5354#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5355 ARG6, ARG7, ARG8) NAME,
a1ab4c31
AC
5356#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5357#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5358#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5359#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5360#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
f6a7cffc 5361#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
56a9f6bc
TS
5362 NAME,
5363#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5364 ARG6, ARG7) NAME,
5365#define DEF_FUNCTION_TYPE_VAR_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5366 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
a1ab4c31
AC
5367#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5368#include "builtin-types.def"
5369#undef DEF_PRIMITIVE_TYPE
5370#undef DEF_FUNCTION_TYPE_0
5371#undef DEF_FUNCTION_TYPE_1
5372#undef DEF_FUNCTION_TYPE_2
5373#undef DEF_FUNCTION_TYPE_3
5374#undef DEF_FUNCTION_TYPE_4
5375#undef DEF_FUNCTION_TYPE_5
5376#undef DEF_FUNCTION_TYPE_6
5377#undef DEF_FUNCTION_TYPE_7
acf0174b 5378#undef DEF_FUNCTION_TYPE_8
a1ab4c31
AC
5379#undef DEF_FUNCTION_TYPE_VAR_0
5380#undef DEF_FUNCTION_TYPE_VAR_1
5381#undef DEF_FUNCTION_TYPE_VAR_2
5382#undef DEF_FUNCTION_TYPE_VAR_3
5383#undef DEF_FUNCTION_TYPE_VAR_4
5384#undef DEF_FUNCTION_TYPE_VAR_5
56a9f6bc
TS
5385#undef DEF_FUNCTION_TYPE_VAR_7
5386#undef DEF_FUNCTION_TYPE_VAR_11
a1ab4c31
AC
5387#undef DEF_POINTER_TYPE
5388 BT_LAST
5389};
5390
5391typedef enum c_builtin_type builtin_type;
5392
5393/* A temporary array used in communication with def_fn_type. */
5394static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5395
5396/* A helper function for install_builtin_types. Build function type
5397 for DEF with return type RET and N arguments. If VAR is true, then the
5398 function should be variadic after those N arguments.
5399
5400 Takes special care not to ICE if any of the types involved are
5401 error_mark_node, which indicates that said type is not in fact available
5402 (see builtin_type_for_size). In which case the function type as a whole
5403 should be error_mark_node. */
5404
5405static void
5406def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5407{
e5b00edf
NF
5408 tree t;
5409 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5410 va_list list;
5411 int i;
5412
5413 va_start (list, n);
5414 for (i = 0; i < n; ++i)
5415 {
c6bd4220 5416 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5417 t = builtin_types[a];
5418 if (t == error_mark_node)
5419 goto egress;
e5b00edf 5420 args[i] = t;
a1ab4c31 5421 }
a1ab4c31 5422
a1ab4c31
AC
5423 t = builtin_types[ret];
5424 if (t == error_mark_node)
5425 goto egress;
e5b00edf
NF
5426 if (var)
5427 t = build_varargs_function_type_array (t, n, args);
5428 else
5429 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5430
5431 egress:
5432 builtin_types[def] = t;
0edf1bb2 5433 va_end (list);
a1ab4c31
AC
5434}
5435
5436/* Build the builtin function types and install them in the builtin_types
5437 array for later use in builtin function decls. */
5438
5439static void
5440install_builtin_function_types (void)
5441{
5442 tree va_list_ref_type_node;
5443 tree va_list_arg_type_node;
5444
5445 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5446 {
5447 va_list_arg_type_node = va_list_ref_type_node =
5448 build_pointer_type (TREE_TYPE (va_list_type_node));
5449 }
5450 else
5451 {
5452 va_list_arg_type_node = va_list_type_node;
5453 va_list_ref_type_node = build_reference_type (va_list_type_node);
5454 }
5455
5456#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5457 builtin_types[ENUM] = VALUE;
5458#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5459 def_fn_type (ENUM, RETURN, 0, 0);
5460#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5461 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5462#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5463 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5464#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5465 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5466#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5467 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5468#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5469 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5470#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5471 ARG6) \
5472 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5473#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5474 ARG6, ARG7) \
5475 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
5476#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5477 ARG6, ARG7, ARG8) \
5478 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5479 ARG7, ARG8);
a1ab4c31
AC
5480#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5481 def_fn_type (ENUM, RETURN, 1, 0);
5482#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5483 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5484#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5485 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5486#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5487 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5488#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5489 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5490#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5491 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
56a9f6bc
TS
5492#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5493 ARG6, ARG7) \
5494 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5495#define DEF_FUNCTION_TYPE_VAR_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5496 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5497 def_fn_type (ENUM, RETURN, 1, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5498 ARG7, ARG8, ARG9, ARG10, ARG11);
a1ab4c31
AC
5499#define DEF_POINTER_TYPE(ENUM, TYPE) \
5500 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5501
5502#include "builtin-types.def"
5503
5504#undef DEF_PRIMITIVE_TYPE
f6a7cffc 5505#undef DEF_FUNCTION_TYPE_0
a1ab4c31
AC
5506#undef DEF_FUNCTION_TYPE_1
5507#undef DEF_FUNCTION_TYPE_2
5508#undef DEF_FUNCTION_TYPE_3
5509#undef DEF_FUNCTION_TYPE_4
5510#undef DEF_FUNCTION_TYPE_5
5511#undef DEF_FUNCTION_TYPE_6
f6a7cffc
TS
5512#undef DEF_FUNCTION_TYPE_7
5513#undef DEF_FUNCTION_TYPE_8
a1ab4c31
AC
5514#undef DEF_FUNCTION_TYPE_VAR_0
5515#undef DEF_FUNCTION_TYPE_VAR_1
5516#undef DEF_FUNCTION_TYPE_VAR_2
5517#undef DEF_FUNCTION_TYPE_VAR_3
5518#undef DEF_FUNCTION_TYPE_VAR_4
5519#undef DEF_FUNCTION_TYPE_VAR_5
56a9f6bc
TS
5520#undef DEF_FUNCTION_TYPE_VAR_7
5521#undef DEF_FUNCTION_TYPE_VAR_11
a1ab4c31
AC
5522#undef DEF_POINTER_TYPE
5523 builtin_types[(int) BT_LAST] = NULL_TREE;
5524}
5525
5526/* ----------------------------------------------------------------------- *
5527 * BUILTIN ATTRIBUTES *
5528 * ----------------------------------------------------------------------- */
5529
5530enum built_in_attribute
5531{
5532#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5533#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5534#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5535#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5536#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5537#include "builtin-attrs.def"
5538#undef DEF_ATTR_NULL_TREE
5539#undef DEF_ATTR_INT
e384e6b5 5540#undef DEF_ATTR_STRING
a1ab4c31
AC
5541#undef DEF_ATTR_IDENT
5542#undef DEF_ATTR_TREE_LIST
5543 ATTR_LAST
5544};
5545
5546static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5547
5548static void
5549install_builtin_attributes (void)
5550{
5551 /* Fill in the built_in_attributes array. */
5552#define DEF_ATTR_NULL_TREE(ENUM) \
5553 built_in_attributes[(int) ENUM] = NULL_TREE;
5554#define DEF_ATTR_INT(ENUM, VALUE) \
5555 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5556#define DEF_ATTR_STRING(ENUM, VALUE) \
5557 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5558#define DEF_ATTR_IDENT(ENUM, STRING) \
5559 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5560#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5561 built_in_attributes[(int) ENUM] \
5562 = tree_cons (built_in_attributes[(int) PURPOSE], \
5563 built_in_attributes[(int) VALUE], \
5564 built_in_attributes[(int) CHAIN]);
5565#include "builtin-attrs.def"
5566#undef DEF_ATTR_NULL_TREE
5567#undef DEF_ATTR_INT
e384e6b5 5568#undef DEF_ATTR_STRING
a1ab4c31
AC
5569#undef DEF_ATTR_IDENT
5570#undef DEF_ATTR_TREE_LIST
5571}
5572
5573/* Handle a "const" attribute; arguments as in
5574 struct attribute_spec.handler. */
5575
5576static tree
5577handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5578 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5579 bool *no_add_attrs)
5580{
5581 if (TREE_CODE (*node) == FUNCTION_DECL)
5582 TREE_READONLY (*node) = 1;
5583 else
5584 *no_add_attrs = true;
5585
5586 return NULL_TREE;
5587}
5588
5589/* Handle a "nothrow" attribute; arguments as in
5590 struct attribute_spec.handler. */
5591
5592static tree
5593handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5594 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5595 bool *no_add_attrs)
5596{
5597 if (TREE_CODE (*node) == FUNCTION_DECL)
5598 TREE_NOTHROW (*node) = 1;
5599 else
5600 *no_add_attrs = true;
5601
5602 return NULL_TREE;
5603}
5604
5605/* Handle a "pure" attribute; arguments as in
5606 struct attribute_spec.handler. */
5607
5608static tree
5609handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5610 int ARG_UNUSED (flags), bool *no_add_attrs)
5611{
5612 if (TREE_CODE (*node) == FUNCTION_DECL)
5613 DECL_PURE_P (*node) = 1;
5614 /* ??? TODO: Support types. */
5615 else
5616 {
7948ae37
OH
5617 warning (OPT_Wattributes, "%qs attribute ignored",
5618 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5619 *no_add_attrs = true;
5620 }
5621
5622 return NULL_TREE;
5623}
5624
5625/* Handle a "no vops" attribute; arguments as in
5626 struct attribute_spec.handler. */
5627
5628static tree
5629handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5630 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5631 bool *ARG_UNUSED (no_add_attrs))
5632{
5633 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5634 DECL_IS_NOVOPS (*node) = 1;
5635 return NULL_TREE;
5636}
5637
5638/* Helper for nonnull attribute handling; fetch the operand number
5639 from the attribute argument list. */
5640
5641static bool
5642get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5643{
5644 /* Verify the arg number is a constant. */
807e902e 5645 if (!tree_fits_uhwi_p (arg_num_expr))
a1ab4c31
AC
5646 return false;
5647
5648 *valp = TREE_INT_CST_LOW (arg_num_expr);
5649 return true;
5650}
5651
5652/* Handle the "nonnull" attribute. */
5653static tree
5654handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5655 tree args, int ARG_UNUSED (flags),
5656 bool *no_add_attrs)
5657{
5658 tree type = *node;
5659 unsigned HOST_WIDE_INT attr_arg_num;
5660
5661 /* If no arguments are specified, all pointer arguments should be
5662 non-null. Verify a full prototype is given so that the arguments
5663 will have the correct types when we actually check them later. */
5664 if (!args)
5665 {
f4da8dce 5666 if (!prototype_p (type))
a1ab4c31
AC
5667 {
5668 error ("nonnull attribute without arguments on a non-prototype");
5669 *no_add_attrs = true;
5670 }
5671 return NULL_TREE;
5672 }
5673
5674 /* Argument list specified. Verify that each argument number references
5675 a pointer argument. */
5676 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5677 {
a1ab4c31
AC
5678 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5679
5680 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5681 {
5682 error ("nonnull argument has invalid operand number (argument %lu)",
5683 (unsigned long) attr_arg_num);
5684 *no_add_attrs = true;
5685 return NULL_TREE;
5686 }
5687
d7d058c5 5688 if (prototype_p (type))
a1ab4c31 5689 {
d7d058c5
NF
5690 function_args_iterator iter;
5691 tree argument;
5692
5693 function_args_iter_init (&iter, type);
5694 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 5695 {
d7d058c5 5696 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
5697 if (!argument || ck_num == arg_num)
5698 break;
a1ab4c31
AC
5699 }
5700
5701 if (!argument
d7d058c5 5702 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 5703 {
58c8f770
EB
5704 error ("nonnull argument with out-of-range operand number "
5705 "(argument %lu, operand %lu)",
a1ab4c31
AC
5706 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5707 *no_add_attrs = true;
5708 return NULL_TREE;
5709 }
5710
d7d058c5 5711 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 5712 {
58c8f770
EB
5713 error ("nonnull argument references non-pointer operand "
5714 "(argument %lu, operand %lu)",
a1ab4c31
AC
5715 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5716 *no_add_attrs = true;
5717 return NULL_TREE;
5718 }
5719 }
5720 }
5721
5722 return NULL_TREE;
5723}
5724
5725/* Handle a "sentinel" attribute. */
5726
5727static tree
5728handle_sentinel_attribute (tree *node, tree name, tree args,
5729 int ARG_UNUSED (flags), bool *no_add_attrs)
5730{
f4da8dce 5731 if (!prototype_p (*node))
a1ab4c31
AC
5732 {
5733 warning (OPT_Wattributes,
7948ae37
OH
5734 "%qs attribute requires prototypes with named arguments",
5735 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5736 *no_add_attrs = true;
5737 }
5738 else
5739 {
dcf0c47e 5740 if (!stdarg_p (*node))
a1ab4c31
AC
5741 {
5742 warning (OPT_Wattributes,
7948ae37
OH
5743 "%qs attribute only applies to variadic functions",
5744 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5745 *no_add_attrs = true;
5746 }
5747 }
5748
5749 if (args)
5750 {
5751 tree position = TREE_VALUE (args);
5752
5753 if (TREE_CODE (position) != INTEGER_CST)
5754 {
5755 warning (0, "requested position is not an integer constant");
5756 *no_add_attrs = true;
5757 }
5758 else
5759 {
5760 if (tree_int_cst_lt (position, integer_zero_node))
5761 {
5762 warning (0, "requested position is less than zero");
5763 *no_add_attrs = true;
5764 }
5765 }
5766 }
5767
5768 return NULL_TREE;
5769}
5770
5771/* Handle a "noreturn" attribute; arguments as in
5772 struct attribute_spec.handler. */
5773
5774static tree
5775handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5776 int ARG_UNUSED (flags), bool *no_add_attrs)
5777{
5778 tree type = TREE_TYPE (*node);
5779
5780 /* See FIXME comment in c_common_attribute_table. */
5781 if (TREE_CODE (*node) == FUNCTION_DECL)
5782 TREE_THIS_VOLATILE (*node) = 1;
5783 else if (TREE_CODE (type) == POINTER_TYPE
5784 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5785 TREE_TYPE (*node)
5786 = build_pointer_type
5787 (build_type_variant (TREE_TYPE (type),
5788 TYPE_READONLY (TREE_TYPE (type)), 1));
5789 else
5790 {
7948ae37
OH
5791 warning (OPT_Wattributes, "%qs attribute ignored",
5792 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5793 *no_add_attrs = true;
5794 }
5795
5796 return NULL_TREE;
5797}
5798
0d6e14fd
JH
5799/* Handle a "leaf" attribute; arguments as in
5800 struct attribute_spec.handler. */
5801
5802static tree
f087ea44 5803handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
0d6e14fd
JH
5804 int ARG_UNUSED (flags), bool *no_add_attrs)
5805{
5806 if (TREE_CODE (*node) != FUNCTION_DECL)
5807 {
5808 warning (OPT_Wattributes, "%qE attribute ignored", name);
5809 *no_add_attrs = true;
5810 }
5811 if (!TREE_PUBLIC (*node))
5812 {
32a5388a 5813 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd 5814 *no_add_attrs = true;
f087ea44
AC
5815 }
5816
5817 return NULL_TREE;
5818}
5819
5820/* Handle a "always_inline" attribute; arguments as in
5821 struct attribute_spec.handler. */
5822
5823static tree
5824handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5825 int ARG_UNUSED (flags), bool *no_add_attrs)
5826{
5827 if (TREE_CODE (*node) == FUNCTION_DECL)
5828 {
5829 /* Set the attribute and mark it for disregarding inline limits. */
5830 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5831 }
5832 else
5833 {
5834 warning (OPT_Wattributes, "%qE attribute ignored", name);
5835 *no_add_attrs = true;
0d6e14fd
JH
5836 }
5837
5838 return NULL_TREE;
5839}
5840
a1ab4c31
AC
5841/* Handle a "malloc" attribute; arguments as in
5842 struct attribute_spec.handler. */
5843
5844static tree
5845handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5846 int ARG_UNUSED (flags), bool *no_add_attrs)
5847{
5848 if (TREE_CODE (*node) == FUNCTION_DECL
5849 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5850 DECL_IS_MALLOC (*node) = 1;
5851 else
5852 {
7948ae37
OH
5853 warning (OPT_Wattributes, "%qs attribute ignored",
5854 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5855 *no_add_attrs = true;
5856 }
5857
5858 return NULL_TREE;
5859}
5860
5861/* Fake handler for attributes we don't properly support. */
5862
5863tree
5864fake_attribute_handler (tree * ARG_UNUSED (node),
5865 tree ARG_UNUSED (name),
5866 tree ARG_UNUSED (args),
5867 int ARG_UNUSED (flags),
5868 bool * ARG_UNUSED (no_add_attrs))
5869{
5870 return NULL_TREE;
5871}
5872
5873/* Handle a "type_generic" attribute. */
5874
5875static tree
5876handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5877 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5878 bool * ARG_UNUSED (no_add_attrs))
5879{
a1ab4c31
AC
5880 /* Ensure we have a function type. */
5881 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 5882
a1ab4c31 5883 /* Ensure we have a variadic function. */
dcf0c47e 5884 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
5885
5886 return NULL_TREE;
5887}
5888
2724e58f
OH
5889/* Handle a "vector_size" attribute; arguments as in
5890 struct attribute_spec.handler. */
5891
5892static tree
5893handle_vector_size_attribute (tree *node, tree name, tree args,
e63b36bd 5894 int ARG_UNUSED (flags), bool *no_add_attrs)
2724e58f 5895{
e63b36bd
EB
5896 tree type = *node;
5897 tree vector_type;
2724e58f
OH
5898
5899 *no_add_attrs = true;
5900
2724e58f
OH
5901 /* We need to provide for vector pointers, vector arrays, and
5902 functions returning vectors. For example:
5903
5904 __attribute__((vector_size(16))) short *foo;
5905
5906 In this case, the mode is SI, but the type being modified is
5907 HI, so we need to look further. */
2724e58f
OH
5908 while (POINTER_TYPE_P (type)
5909 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 5910 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
5911 type = TREE_TYPE (type);
5912
e63b36bd
EB
5913 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5914 if (!vector_type)
5915 return NULL_TREE;
2724e58f
OH
5916
5917 /* Build back pointers if needed. */
e63b36bd 5918 *node = reconstruct_complex_type (*node, vector_type);
2724e58f
OH
5919
5920 return NULL_TREE;
5921}
5922
7948ae37
OH
5923/* Handle a "vector_type" attribute; arguments as in
5924 struct attribute_spec.handler. */
5925
5926static tree
5927handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
e63b36bd 5928 int ARG_UNUSED (flags), bool *no_add_attrs)
7948ae37 5929{
e63b36bd
EB
5930 tree type = *node;
5931 tree vector_type;
7948ae37
OH
5932
5933 *no_add_attrs = true;
5934
e63b36bd 5935 if (TREE_CODE (type) != ARRAY_TYPE)
7948ae37
OH
5936 {
5937 error ("attribute %qs applies to array types only",
5938 IDENTIFIER_POINTER (name));
5939 return NULL_TREE;
5940 }
5941
e63b36bd
EB
5942 vector_type = build_vector_type_for_array (type, name);
5943 if (!vector_type)
7948ae37
OH
5944 return NULL_TREE;
5945
e63b36bd
EB
5946 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5947 *node = vector_type;
7948ae37
OH
5948
5949 return NULL_TREE;
5950}
5951
a1ab4c31
AC
5952/* ----------------------------------------------------------------------- *
5953 * BUILTIN FUNCTIONS *
5954 * ----------------------------------------------------------------------- */
5955
5956/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5957 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5958 if nonansi_p and flag_no_nonansi_builtin. */
5959
5960static void
5961def_builtin_1 (enum built_in_function fncode,
5962 const char *name,
5963 enum built_in_class fnclass,
5964 tree fntype, tree libtype,
5965 bool both_p, bool fallback_p,
5966 bool nonansi_p ATTRIBUTE_UNUSED,
5967 tree fnattrs, bool implicit_p)
5968{
5969 tree decl;
5970 const char *libname;
5971
5972 /* Preserve an already installed decl. It most likely was setup in advance
5973 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 5974 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
5975 return;
5976
5977 gcc_assert ((!both_p && !fallback_p)
5978 || !strncmp (name, "__builtin_",
5979 strlen ("__builtin_")));
5980
5981 libname = name + strlen ("__builtin_");
5982 decl = add_builtin_function (name, fntype, fncode, fnclass,
5983 (fallback_p ? libname : NULL),
5984 fnattrs);
5985 if (both_p)
5986 /* ??? This is normally further controlled by command-line options
5987 like -fno-builtin, but we don't have them for Ada. */
5988 add_builtin_function (libname, libtype, fncode, fnclass,
5989 NULL, fnattrs);
5990
e79983f4 5991 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
5992}
5993
5994static int flag_isoc94 = 0;
5995static int flag_isoc99 = 0;
22869a37 5996static int flag_isoc11 = 0;
a1ab4c31
AC
5997
5998/* Install what the common builtins.def offers. */
5999
6000static void
6001install_builtin_functions (void)
6002{
6003#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6004 NONANSI_P, ATTRS, IMPLICIT, COND) \
6005 if (NAME && COND) \
6006 def_builtin_1 (ENUM, NAME, CLASS, \
6007 builtin_types[(int) TYPE], \
6008 builtin_types[(int) LIBTYPE], \
6009 BOTH_P, FALLBACK_P, NONANSI_P, \
6010 built_in_attributes[(int) ATTRS], IMPLICIT);
6011#include "builtins.def"
6012#undef DEF_BUILTIN
6013}
6014
6015/* ----------------------------------------------------------------------- *
6016 * BUILTIN FUNCTIONS *
6017 * ----------------------------------------------------------------------- */
6018
6019/* Install the builtin functions we might need. */
6020
6021void
6022gnat_install_builtins (void)
6023{
6024 install_builtin_elementary_types ();
6025 install_builtin_function_types ();
6026 install_builtin_attributes ();
6027
6028 /* Install builtins used by generic middle-end pieces first. Some of these
6029 know about internal specificities and control attributes accordingly, for
6030 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6031 the generic definition from builtins.def. */
384c400a 6032 build_common_builtin_nodes ();
a1ab4c31
AC
6033
6034 /* Now, install the target specific builtins, such as the AltiVec family on
6035 ppc, and the common set as exposed by builtins.def. */
6036 targetm.init_builtins ();
6037 install_builtin_functions ();
6038}
6039
6040#include "gt-ada-utils.h"
6041#include "gtype-ada.h"
This page took 3.143918 seconds and 5 git commands to generate.