]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
decl.c (gnat_to_gnu_entity): Replace pointer types with references ones for functions...
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
e1872645 9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
40e23961
MC
30#include "hash-set.h"
31#include "machmode.h"
32#include "vec.h"
33#include "double-int.h"
34#include "input.h"
35#include "alias.h"
36#include "symtab.h"
37#include "wide-int.h"
38#include "inchash.h"
a1ab4c31 39#include "tree.h"
40e23961 40#include "fold-const.h"
d8a2d370
DN
41#include "stringpool.h"
42#include "stor-layout.h"
43#include "attribs.h"
44#include "varasm.h"
a1ab4c31 45#include "flags.h"
a1ab4c31 46#include "toplev.h"
718f9c0f 47#include "diagnostic-core.h"
a1ab4c31
AC
48#include "output.h"
49#include "ggc.h"
50#include "debug.h"
51#include "convert.h"
52#include "target.h"
677f3fa8 53#include "common/common-target.h"
8713b7e4 54#include "langhooks.h"
c582198b
AM
55#include "hash-map.h"
56#include "is-a.h"
57#include "plugin-api.h"
c582198b
AM
58#include "hard-reg-set.h"
59#include "input.h"
60#include "function.h"
61#include "ipa-ref.h"
a1ab4c31 62#include "cgraph.h"
10e4d056 63#include "diagnostic.h"
7ee2468b 64#include "timevar.h"
8713b7e4 65#include "tree-dump.h"
a1ab4c31
AC
66#include "tree-inline.h"
67#include "tree-iterator.h"
a1ab4c31
AC
68
69#include "ada.h"
70#include "types.h"
71#include "atree.h"
72#include "elists.h"
73#include "namet.h"
74#include "nlists.h"
75#include "stringt.h"
76#include "uintp.h"
77#include "fe.h"
78#include "sinfo.h"
79#include "einfo.h"
80#include "ada-tree.h"
81#include "gigi.h"
82
a1ab4c31
AC
83/* If nonzero, pretend we are allocating at global level. */
84int force_global;
85
caa9d12a
EB
86/* The default alignment of "double" floating-point types, i.e. floating
87 point types whose size is equal to 64 bits, or 0 if this alignment is
88 not specifically capped. */
89int double_float_alignment;
90
91/* The default alignment of "double" or larger scalar types, i.e. scalar
92 types whose size is greater or equal to 64 bits, or 0 if this alignment
93 is not specifically capped. */
94int double_scalar_alignment;
95
24228312
AC
96/* True if floating-point arithmetics may use wider intermediate results. */
97bool fp_arith_may_widen = true;
98
a1ab4c31
AC
99/* Tree nodes for the various types and decls we create. */
100tree gnat_std_decls[(int) ADT_LAST];
101
102/* Functions to call for each of the possible raise reasons. */
103tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
104
ca8e13e8 105/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
106tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
107
a1ab4c31
AC
108/* Forward declarations for handlers of attributes. */
109static tree handle_const_attribute (tree *, tree, tree, int, bool *);
110static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
111static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
112static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
113static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
114static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
115static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 116static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
f087ea44 117static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
118static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
119static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 120static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 121static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
122
123/* Fake handler for attributes we don't properly support, typically because
124 they'd require dragging a lot of the common-c front-end circuitry. */
125static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
126
127/* Table of machine-independent internal attributes for Ada. We support
128 this minimal set of attributes to accommodate the needs of builtins. */
129const struct attribute_spec gnat_internal_attribute_table[] =
130{
62d784f7
KT
131 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
132 affects_type_identity } */
133 { "const", 0, 0, true, false, false, handle_const_attribute,
134 false },
135 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
136 false },
137 { "pure", 0, 0, true, false, false, handle_pure_attribute,
138 false },
139 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
140 false },
141 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
142 false },
143 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
144 false },
145 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
146 false },
147 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
148 false },
f087ea44
AC
149 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
150 false },
62d784f7
KT
151 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
152 false },
153 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
154 false },
155
156 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
157 false },
158 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
159 false },
160 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
161
162 /* ??? format and format_arg are heavy and not supported, which actually
163 prevents support for stdio builtins, which we however declare as part
164 of the common builtins.def contents. */
62d784f7
KT
165 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
166 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 167
62d784f7 168 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
169};
170
171/* Associates a GNAT tree node to a GCC tree node. It is used in
172 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
173 of `save_gnu_tree' for more info. */
174static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
175
176#define GET_GNU_TREE(GNAT_ENTITY) \
177 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
178
179#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
180 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
181
182#define PRESENT_GNU_TREE(GNAT_ENTITY) \
183 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
184
185/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
186static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
187
188#define GET_DUMMY_NODE(GNAT_ENTITY) \
189 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
190
191#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
192 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
193
194#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
195 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
196
197/* This variable keeps a table for types for each precision so that we only
198 allocate each of them once. Signed and unsigned types are kept separate.
199
200 Note that these types are only used when fold-const requests something
201 special. Perhaps we should NOT share these types; we'll see how it
202 goes later. */
203static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
204
205/* Likewise for float types, but record these by mode. */
206static GTY(()) tree float_types[NUM_MACHINE_MODES];
207
208/* For each binding contour we allocate a binding_level structure to indicate
209 the binding depth. */
210
d1b38208 211struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
212 /* The binding level containing this one (the enclosing binding level). */
213 struct gnat_binding_level *chain;
214 /* The BLOCK node for this level. */
215 tree block;
216 /* If nonzero, the setjmp buffer that needs to be updated for any
217 variable-sized definition within this context. */
218 tree jmpbuf_decl;
219};
220
221/* The binding level currently in effect. */
222static GTY(()) struct gnat_binding_level *current_binding_level;
223
224/* A chain of gnat_binding_level structures awaiting reuse. */
225static GTY((deletable)) struct gnat_binding_level *free_binding_level;
226
228ee426
EB
227/* The context to be used for global declarations. */
228static GTY(()) tree global_context;
229
a1ab4c31 230/* An array of global declarations. */
9771b263 231static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
232
233/* An array of builtin function declarations. */
9771b263 234static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31 235
a1ab4c31
AC
236/* A chain of unused BLOCK nodes. */
237static GTY((deletable)) tree free_block_chain;
238
842d4ee2
EB
239/* A hash table of padded types. It is modelled on the generic type
240 hash table in tree.c, which must thus be used as a reference. */
d242408f
TS
241
242struct GTY((for_user)) pad_type_hash {
842d4ee2
EB
243 unsigned long hash;
244 tree type;
245};
246
d242408f
TS
247struct pad_type_hasher : ggc_cache_hasher<pad_type_hash *>
248{
249 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
250 static bool equal (pad_type_hash *a, pad_type_hash *b);
251 static void handle_cache_entry (pad_type_hash *&);
252};
253
254static GTY ((cache))
255 hash_table<pad_type_hasher> *pad_type_hash_table;
842d4ee2 256
a1ab4c31
AC
257static tree merge_sizes (tree, tree, tree, bool, bool);
258static tree compute_related_constant (tree, tree);
259static tree split_plus (tree, tree *);
ef4bddc2 260static tree float_type_for_precision (int, machine_mode);
a1ab4c31 261static tree convert_to_fat_pointer (tree, tree);
5c475ba9 262static unsigned int scale_by_factor_of (tree, unsigned int);
a1ab4c31 263static bool potential_alignment_gap (tree, tree, tree);
9a30c7c4
AC
264
265/* A linked list used as a queue to defer the initialization of the
266 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
267 of ..._TYPE nodes. */
268struct deferred_decl_context_node
269{
270 tree decl; /* The ..._DECL node to work on. */
271 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
272 int force_global; /* force_global value when pushing DECL. */
273 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
274 context to. */
275 struct deferred_decl_context_node *next; /* The next queue item. */
276};
277
278static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
279
280/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
281 feed it with the elaboration of GNAT_SCOPE. */
282static struct deferred_decl_context_node *
283add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
284
285/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
286 feed it with the DECL_CONTEXT computed as part of N as soon as it is
287 computed. */
288static void add_deferred_type_context (struct deferred_decl_context_node *n,
289 tree type);
a1ab4c31 290\f
842d4ee2 291/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
292
293void
842d4ee2 294init_gnat_utils (void)
a1ab4c31 295{
842d4ee2 296 /* Initialize the association of GNAT nodes to GCC trees. */
766090c2 297 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
298
299 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
766090c2 300 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
301
302 /* Initialize the hash table of padded types. */
d242408f 303 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
a1ab4c31
AC
304}
305
842d4ee2 306/* Destroy data structures of the utils.c module. */
f04b8d69
EB
307
308void
842d4ee2 309destroy_gnat_utils (void)
f04b8d69 310{
842d4ee2 311 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
312 ggc_free (associate_gnat_to_gnu);
313 associate_gnat_to_gnu = NULL;
f04b8d69 314
842d4ee2
EB
315 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
316 ggc_free (dummy_node_table);
317 dummy_node_table = NULL;
318
319 /* Destroy the hash table of padded types. */
d242408f 320 pad_type_hash_table->empty ();
842d4ee2 321 pad_type_hash_table = NULL;
842d4ee2
EB
322}
323\f
a1d8cc63
EB
324/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
325 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
326 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 327
a1d8cc63 328 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
329
330void
331save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
332{
333 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 334 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
335 means GNAT_ENTITY is defined twice, but occasionally is due to some
336 Gigi problem. */
337 gcc_assert (!(gnu_decl
338 && (PRESENT_GNU_TREE (gnat_entity)
339 || (!no_check && !DECL_P (gnu_decl)))));
340
341 SET_GNU_TREE (gnat_entity, gnu_decl);
342}
343
a1d8cc63
EB
344/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
345 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
346
347 In some cases, such as delayed elaboration or expressions that need to
348 be elaborated only once, GNAT_ENTITY is really not an entity. */
349
350tree
351get_gnu_tree (Entity_Id gnat_entity)
352{
353 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
354 return GET_GNU_TREE (gnat_entity);
355}
356
357/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
358
359bool
360present_gnu_tree (Entity_Id gnat_entity)
361{
362 return PRESENT_GNU_TREE (gnat_entity);
363}
364\f
a1ab4c31
AC
365/* Make a dummy type corresponding to GNAT_TYPE. */
366
367tree
368make_dummy_type (Entity_Id gnat_type)
369{
bf0b0e5e 370 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
a1ab4c31
AC
371 tree gnu_type;
372
a1ab4c31
AC
373 /* If there was no equivalent type (can only happen when just annotating
374 types) or underlying type, go back to the original type. */
bf0b0e5e
AC
375 if (No (gnat_equiv))
376 gnat_equiv = gnat_type;
a1ab4c31
AC
377
378 /* If it there already a dummy type, use that one. Else make one. */
bf0b0e5e
AC
379 if (PRESENT_DUMMY_NODE (gnat_equiv))
380 return GET_DUMMY_NODE (gnat_equiv);
a1ab4c31
AC
381
382 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
383 an ENUMERAL_TYPE. */
bf0b0e5e
AC
384 gnu_type = make_node (Is_Record_Type (gnat_equiv)
385 ? tree_code_for_record_type (gnat_equiv)
a1ab4c31
AC
386 : ENUMERAL_TYPE);
387 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
388 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
389 TYPE_STUB_DECL (gnu_type)
390 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
bf0b0e5e 391 if (Is_By_Reference_Type (gnat_equiv))
a0b8b1b7 392 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31 393
bf0b0e5e 394 SET_DUMMY_NODE (gnat_equiv, gnu_type);
a1ab4c31
AC
395
396 return gnu_type;
397}
e3edbd56
EB
398
399/* Return the dummy type that was made for GNAT_TYPE, if any. */
400
401tree
402get_dummy_type (Entity_Id gnat_type)
403{
404 return GET_DUMMY_NODE (gnat_type);
405}
406
407/* Build dummy fat and thin pointer types whose designated type is specified
408 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
409
410void
411build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
412{
413 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
414 tree gnu_fat_type, fields, gnu_object_type;
415
416 gnu_template_type = make_node (RECORD_TYPE);
417 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
418 TYPE_DUMMY_P (gnu_template_type) = 1;
419 gnu_ptr_template = build_pointer_type (gnu_template_type);
420
421 gnu_array_type = make_node (ENUMERAL_TYPE);
422 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
423 TYPE_DUMMY_P (gnu_array_type) = 1;
424 gnu_ptr_array = build_pointer_type (gnu_array_type);
425
426 gnu_fat_type = make_node (RECORD_TYPE);
427 /* Build a stub DECL to trigger the special processing for fat pointer types
428 in gnat_pushdecl. */
429 TYPE_NAME (gnu_fat_type)
430 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
431 gnu_fat_type);
432 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
433 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
434 DECL_CHAIN (fields)
435 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
436 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
437 finish_fat_pointer_type (gnu_fat_type, fields);
438 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
439 /* Suppress debug info until after the type is completed. */
440 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
441
442 gnu_object_type = make_node (RECORD_TYPE);
443 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
444 TYPE_DUMMY_P (gnu_object_type) = 1;
445
446 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
447 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
448}
a1ab4c31 449\f
c99c0026 450/* Return true if we are in the global binding level. */
a1ab4c31 451
c99c0026 452bool
a1ab4c31
AC
453global_bindings_p (void)
454{
c99c0026 455 return force_global || current_function_decl == NULL_TREE;
a1ab4c31
AC
456}
457
a09d56d8 458/* Enter a new binding level. */
a1ab4c31
AC
459
460void
c6bd4220 461gnat_pushlevel (void)
a1ab4c31
AC
462{
463 struct gnat_binding_level *newlevel = NULL;
464
465 /* Reuse a struct for this binding level, if there is one. */
466 if (free_binding_level)
467 {
468 newlevel = free_binding_level;
469 free_binding_level = free_binding_level->chain;
470 }
471 else
766090c2 472 newlevel = ggc_alloc<gnat_binding_level> ();
a1ab4c31
AC
473
474 /* Use a free BLOCK, if any; otherwise, allocate one. */
475 if (free_block_chain)
476 {
477 newlevel->block = free_block_chain;
478 free_block_chain = BLOCK_CHAIN (free_block_chain);
479 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
480 }
481 else
482 newlevel->block = make_node (BLOCK);
483
484 /* Point the BLOCK we just made to its parent. */
485 if (current_binding_level)
486 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
487
a09d56d8
EB
488 BLOCK_VARS (newlevel->block) = NULL_TREE;
489 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
490 TREE_USED (newlevel->block) = 1;
491
a09d56d8 492 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
493 newlevel->chain = current_binding_level;
494 newlevel->jmpbuf_decl = NULL_TREE;
495 current_binding_level = newlevel;
496}
497
498/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
499 and point FNDECL to this BLOCK. */
500
501void
502set_current_block_context (tree fndecl)
503{
504 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
505 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 506 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
507}
508
509/* Set the jmpbuf_decl for the current binding level to DECL. */
510
511void
512set_block_jmpbuf_decl (tree decl)
513{
514 current_binding_level->jmpbuf_decl = decl;
515}
516
517/* Get the jmpbuf_decl, if any, for the current binding level. */
518
519tree
c6bd4220 520get_block_jmpbuf_decl (void)
a1ab4c31
AC
521{
522 return current_binding_level->jmpbuf_decl;
523}
524
a09d56d8 525/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
526
527void
c6bd4220 528gnat_poplevel (void)
a1ab4c31
AC
529{
530 struct gnat_binding_level *level = current_binding_level;
531 tree block = level->block;
532
533 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 534 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
535
536 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
537 are no variables free the block and merge its subblocks into those of its
a09d56d8 538 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
539 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
540 ;
541 else if (BLOCK_VARS (block) == NULL_TREE)
542 {
543 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
544 = block_chainon (BLOCK_SUBBLOCKS (block),
545 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
546 BLOCK_CHAIN (block) = free_block_chain;
547 free_block_chain = block;
548 }
549 else
550 {
551 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
552 BLOCK_SUBBLOCKS (level->chain->block) = block;
553 TREE_USED (block) = 1;
554 set_block_for_group (block);
555 }
556
557 /* Free this binding structure. */
558 current_binding_level = level->chain;
559 level->chain = free_binding_level;
560 free_binding_level = level;
561}
562
2231f17f
EB
563/* Exit a binding level and discard the associated BLOCK. */
564
565void
566gnat_zaplevel (void)
567{
568 struct gnat_binding_level *level = current_binding_level;
569 tree block = level->block;
570
571 BLOCK_CHAIN (block) = free_block_chain;
572 free_block_chain = block;
573
574 /* Free this binding structure. */
575 current_binding_level = level->chain;
576 level->chain = free_binding_level;
577 free_binding_level = level;
578}
a1ab4c31 579\f
4708440c
EB
580/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
581
582static void
583gnat_set_type_context (tree type, tree context)
584{
585 tree decl = TYPE_STUB_DECL (type);
586
587 TYPE_CONTEXT (type) = context;
588
589 while (decl && DECL_PARALLEL_TYPE (decl))
590 {
24d4b3d5
AC
591 tree parallel_type = DECL_PARALLEL_TYPE (decl);
592
593 /* Give a context to the parallel types and their stub decl, if any.
594 Some parallel types seems to be present in multiple parallel type
595 chains, so don't mess with their context if they already have one. */
596 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
597 {
598 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
599 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
600 TYPE_CONTEXT (parallel_type) = context;
601 }
602
4708440c
EB
603 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
604 }
605}
606
9a30c7c4
AC
607/* Return the innermost scope, starting at GNAT_NODE, we are be interested in
608 the debug info, or Empty if there is no such scope. If not NULL, set
609 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
610
611static Entity_Id
612get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
613{
614 Entity_Id gnat_entity;
615
616 if (is_subprogram)
617 *is_subprogram = false;
618
619 if (Nkind (gnat_node) == N_Defining_Identifier)
620 gnat_entity = Scope (gnat_node);
621 else
622 return Empty;
623
624 while (Present (gnat_entity))
625 {
626 switch (Ekind (gnat_entity))
627 {
628 case E_Function:
629 case E_Procedure:
630 if (Present (Protected_Body_Subprogram (gnat_entity)))
631 gnat_entity = Protected_Body_Subprogram (gnat_entity);
632
633 /* If the scope is a subprogram, then just rely on
634 current_function_decl, so that we don't have to defer
635 anything. This is needed because other places rely on the
636 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
637 if (is_subprogram)
638 *is_subprogram = true;
639 return gnat_entity;
640
641 case E_Record_Type:
642 case E_Record_Subtype:
643 return gnat_entity;
644
645 default:
646 /* By default, we are not interested in this particular scope: go to
647 the outer one. */
648 break;
649 }
650 gnat_entity = Scope (gnat_entity);
651 }
652 return Empty;
653}
654
655/* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
656 N otherwise. */
657
658static void
659defer_or_set_type_context (tree type,
660 tree context,
661 struct deferred_decl_context_node *n)
662{
663 if (n)
664 add_deferred_type_context (n, type);
665 else
666 gnat_set_type_context (type, context);
667}
668
669/* Return global_context. Create it if needed, first. */
670
671static tree
672get_global_context (void)
673{
674 if (!global_context)
675 global_context = build_translation_unit_decl (NULL_TREE);
676 return global_context;
677}
678
228ee426
EB
679/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
680 for location information and flag propagation. */
a1ab4c31
AC
681
682void
683gnat_pushdecl (tree decl, Node_Id gnat_node)
684{
9a30c7c4
AC
685 tree context = NULL_TREE;
686 struct deferred_decl_context_node *deferred_decl_context = NULL;
687
688 /* If explicitely asked to make DECL global or if it's an imported nested
689 object, short-circuit the regular Scope-based context computation. */
690 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
a1ab4c31 691 {
9a30c7c4
AC
692 /* Rely on the GNAT scope, or fallback to the current_function_decl if
693 the GNAT scope reached the global scope, if it reached a subprogram
694 or the declaration is a subprogram or a variable (for them we skip
695 intermediate context types because the subprogram body elaboration
696 machinery and the inliner both expect a subprogram context).
697
698 Falling back to current_function_decl is necessary for implicit
699 subprograms created by gigi, such as the elaboration subprograms. */
700 bool context_is_subprogram = false;
701 const Entity_Id gnat_scope
702 = get_debug_scope (gnat_node, &context_is_subprogram);
703
704 if (Present (gnat_scope)
705 && !context_is_subprogram
706 && TREE_CODE (decl) != FUNCTION_DECL
707 && TREE_CODE (decl) != VAR_DECL)
708 /* Always assume the scope has not been elaborated, thus defer the
709 context propagation to the time its elaboration will be
710 available. */
711 deferred_decl_context
712 = add_deferred_decl_context (decl, gnat_scope, force_global);
713
714 /* External declarations (when force_global > 0) may not be in a
715 local context. */
716 else if (current_function_decl != NULL_TREE && force_global == 0)
717 context = current_function_decl;
a1ab4c31
AC
718 }
719
9a30c7c4
AC
720 /* If either we are forced to be in global mode or if both the GNAT scope and
721 the current_function_decl did not help determining the context, use the
722 global scope. */
723 if (!deferred_decl_context && context == NULL_TREE)
724 context = get_global_context ();
725
726 /* Functions imported in another function are not really nested.
727 For really nested functions mark them initially as needing
728 a static chain for uses of that flag before unnesting;
729 lower_nested_functions will then recompute it. */
730 if (TREE_CODE (decl) == FUNCTION_DECL
731 && !TREE_PUBLIC (decl)
732 && context != NULL_TREE
733 && (TREE_CODE (context) == FUNCTION_DECL
734 || decl_function_context (context) != NULL_TREE))
735 DECL_STATIC_CHAIN (decl) = 1;
736
737 if (!deferred_decl_context)
738 DECL_CONTEXT (decl) = context;
739
228ee426 740 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
741
742 /* Set the location of DECL and emit a declaration for it. */
e8fa3dcd 743 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
a1ab4c31 744 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 745
a1ab4c31
AC
746 add_decl_expr (decl, gnat_node);
747
748 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
749 order. The list will be reversed later. Put global declarations in the
750 globals list and local ones in the current block. But skip TYPE_DECLs
751 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
752 with the debugger and aren't needed anyway. */
753 if (!(TREE_CODE (decl) == TYPE_DECL
754 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 755 {
9083aacd 756 if (DECL_EXTERNAL (decl))
a1ab4c31 757 {
a1ab4c31 758 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 759 vec_safe_push (builtin_decls, decl);
a1ab4c31 760 }
9083aacd 761 else if (global_bindings_p ())
9771b263 762 vec_safe_push (global_decls, decl);
9083aacd 763 else
a1ab4c31 764 {
a963da4d
EB
765 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
766 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
767 }
768 }
769
aef308d0 770 /* For the declaration of a type, set its name either if it isn't already
10069d53 771 set or if the previous type name was not derived from a source name.
a1ab4c31 772 We'd rather have the type named with a real name and all the pointer
aef308d0
PMR
773 types to the same object have the same node, except when the names are
774 both derived from source names. */
a1ab4c31
AC
775 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
776 {
777 tree t = TREE_TYPE (decl);
778
79714815
EB
779 /* Array and pointer types aren't tagged types in the C sense so we need
780 to generate a typedef in DWARF for them and make sure it is preserved,
781 unless the type is artificial. */
aef308d0 782 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
79714815
EB
783 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
784 || DECL_ARTIFICIAL (decl)))
785 ;
786 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
787 generate the typedef in DWARF. Also do that for fat pointer types
788 because, even though they are tagged types in the C sense, they are
789 still XUP types attached to the base array type at this point. */
aef308d0 790 else if (!DECL_ARTIFICIAL (decl)
79714815
EB
791 && (TREE_CODE (t) == ARRAY_TYPE
792 || TREE_CODE (t) == POINTER_TYPE
793 || TYPE_IS_FAT_POINTER_P (t)))
a1ab4c31 794 {
aef308d0 795 tree tt;
1e039275
EB
796 /* ??? Copy and original type are not supposed to be variant but we
797 really need a variant for the placeholder machinery to work. */
aef308d0
PMR
798 if (TYPE_IS_FAT_POINTER_P (t))
799 tt = build_variant_type_copy (t);
800 else
1e039275
EB
801 {
802 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
803 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
79714815
EB
804 if (TREE_CODE (t) == POINTER_TYPE)
805 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
1e039275
EB
806 tt = build_qualified_type (tt, TYPE_QUALS (t));
807 }
a1ab4c31 808 TYPE_NAME (tt) = decl;
9a30c7c4
AC
809 defer_or_set_type_context (tt,
810 DECL_CONTEXT (decl),
811 deferred_decl_context);
a1ab4c31
AC
812 TREE_USED (tt) = TREE_USED (t);
813 TREE_TYPE (decl) = tt;
79714815 814 if (TYPE_NAME (t)
aef308d0
PMR
815 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
816 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
40c88b94
EB
817 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
818 else
819 DECL_ORIGINAL_TYPE (decl) = t;
79714815
EB
820 /* Array types need to have a name so that they can be related to
821 their GNAT encodings. */
822 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
823 TYPE_NAME (t) = DECL_NAME (decl);
e3edbd56 824 t = NULL_TREE;
a1ab4c31 825 }
79714815 826 else if (TYPE_NAME (t)
aef308d0
PMR
827 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
828 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
a1ab4c31
AC
829 ;
830 else
831 t = NULL_TREE;
832
79714815
EB
833 /* Propagate the name to all the variants, this is needed for the type
834 qualifiers machinery to work properly (see check_qualified_type).
835 Also propagate the context to them. Note that it will be propagated
836 to all parallel types too thanks to gnat_set_type_context. */
a1ab4c31
AC
837 if (t)
838 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
79714815
EB
839 /* ??? Because of the previous kludge, we can have variants of fat
840 pointer types with different names. */
841 if (!(TYPE_IS_FAT_POINTER_P (t)
842 && TYPE_NAME (t)
843 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
844 {
845 TYPE_NAME (t) = decl;
9a30c7c4
AC
846 defer_or_set_type_context (t,
847 DECL_CONTEXT (decl),
848 deferred_decl_context);
d4d05b52 849 }
a1ab4c31
AC
850 }
851}
852\f
842d4ee2
EB
853/* Create a record type that contains a SIZE bytes long field of TYPE with a
854 starting bit position so that it is aligned to ALIGN bits, and leaving at
855 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
856 record is guaranteed to get. GNAT_NODE is used for the position of the
857 associated TYPE_DECL. */
842d4ee2
EB
858
859tree
860make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 861 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
862{
863 /* We will be crafting a record type with one field at a position set to be
864 the next multiple of ALIGN past record'address + room bytes. We use a
865 record placeholder to express record'address. */
866 tree record_type = make_node (RECORD_TYPE);
867 tree record = build0 (PLACEHOLDER_EXPR, record_type);
868
869 tree record_addr_st
870 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
871
872 /* The diagram below summarizes the shape of what we manipulate:
873
874 <--------- pos ---------->
875 { +------------+-------------+-----------------+
876 record =>{ |############| ... | field (type) |
877 { +------------+-------------+-----------------+
878 |<-- room -->|<- voffset ->|<---- size ----->|
879 o o
880 | |
881 record_addr vblock_addr
882
883 Every length is in sizetype bytes there, except "pos" which has to be
884 set as a bit position in the GCC tree for the record. */
885 tree room_st = size_int (room);
886 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
887 tree voffset_st, pos, field;
888
9dba4b55 889 tree name = TYPE_IDENTIFIER (type);
842d4ee2 890
842d4ee2
EB
891 name = concat_name (name, "ALIGN");
892 TYPE_NAME (record_type) = name;
893
894 /* Compute VOFFSET and then POS. The next byte position multiple of some
895 alignment after some address is obtained by "and"ing the alignment minus
896 1 with the two's complement of the address. */
897 voffset_st = size_binop (BIT_AND_EXPR,
898 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
899 size_int ((align / BITS_PER_UNIT) - 1));
900
901 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
902 pos = size_binop (MULT_EXPR,
903 convert (bitsizetype,
904 size_binop (PLUS_EXPR, room_st, voffset_st)),
905 bitsize_unit_node);
906
907 /* Craft the GCC record representation. We exceptionally do everything
908 manually here because 1) our generic circuitry is not quite ready to
909 handle the complex position/size expressions we are setting up, 2) we
910 have a strong simplifying factor at hand: we know the maximum possible
911 value of voffset, and 3) we have to set/reset at least the sizes in
912 accordance with this maximum value anyway, as we need them to convey
913 what should be "alloc"ated for this type.
914
915 Use -1 as the 'addressable' indication for the field to prevent the
916 creation of a bitfield. We don't need one, it would have damaging
917 consequences on the alignment computation, and create_field_decl would
918 make one without this special argument, for instance because of the
919 complex position expression. */
920 field = create_field_decl (get_identifier ("F"), type, record_type, size,
921 pos, 1, -1);
922 TYPE_FIELDS (record_type) = field;
923
924 TYPE_ALIGN (record_type) = base_align;
925 TYPE_USER_ALIGN (record_type) = 1;
926
927 TYPE_SIZE (record_type)
928 = size_binop (PLUS_EXPR,
929 size_binop (MULT_EXPR, convert (bitsizetype, size),
930 bitsize_unit_node),
931 bitsize_int (align + room * BITS_PER_UNIT));
932 TYPE_SIZE_UNIT (record_type)
933 = size_binop (PLUS_EXPR, size,
934 size_int (room + align / BITS_PER_UNIT));
935
936 SET_TYPE_MODE (record_type, BLKmode);
937 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
938
939 /* Declare it now since it will never be declared otherwise. This is
940 necessary to ensure that its subtrees are properly marked. */
74746d49 941 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
942
943 return record_type;
944}
945
946/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
947 as the field type of a packed record if IN_RECORD is true, or as the
948 component type of a packed array if IN_RECORD is false. See if we can
949 rewrite it either as a type that has a non-BLKmode, which we can pack
950 tighter in the packed record case, or as a smaller type. If so, return
951 the new type. If not, return the original type. */
952
953tree
954make_packable_type (tree type, bool in_record)
955{
ae7e9ddd 956 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
842d4ee2
EB
957 unsigned HOST_WIDE_INT new_size;
958 tree new_type, old_field, field_list = NULL_TREE;
959 unsigned int align;
960
961 /* No point in doing anything if the size is zero. */
962 if (size == 0)
963 return type;
964
965 new_type = make_node (TREE_CODE (type));
966
967 /* Copy the name and flags from the old type to that of the new.
968 Note that we rely on the pointer equality created here for
969 TYPE_NAME to look through conversions in various places. */
970 TYPE_NAME (new_type) = TYPE_NAME (type);
971 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
972 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
973 if (TREE_CODE (type) == RECORD_TYPE)
974 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
975
976 /* If we are in a record and have a small size, set the alignment to
977 try for an integral mode. Otherwise set it to try for a smaller
978 type with BLKmode. */
979 if (in_record && size <= MAX_FIXED_MODE_SIZE)
980 {
981 align = ceil_pow2 (size);
982 TYPE_ALIGN (new_type) = align;
983 new_size = (size + align - 1) & -align;
984 }
985 else
986 {
987 unsigned HOST_WIDE_INT align;
988
989 /* Do not try to shrink the size if the RM size is not constant. */
990 if (TYPE_CONTAINS_TEMPLATE_P (type)
cc269bb6 991 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
842d4ee2
EB
992 return type;
993
994 /* Round the RM size up to a unit boundary to get the minimal size
995 for a BLKmode record. Give up if it's already the size. */
eb1ce453 996 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
842d4ee2
EB
997 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
998 if (new_size == size)
999 return type;
1000
1001 align = new_size & -new_size;
1002 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
1003 }
1004
1005 TYPE_USER_ALIGN (new_type) = 1;
1006
1007 /* Now copy the fields, keeping the position and size as we don't want
1008 to change the layout by propagating the packedness downwards. */
1009 for (old_field = TYPE_FIELDS (type); old_field;
1010 old_field = DECL_CHAIN (old_field))
1011 {
1012 tree new_field_type = TREE_TYPE (old_field);
1013 tree new_field, new_size;
1014
1015 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1016 && !TYPE_FAT_POINTER_P (new_field_type)
cc269bb6 1017 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
842d4ee2
EB
1018 new_field_type = make_packable_type (new_field_type, true);
1019
1020 /* However, for the last field in a not already packed record type
1021 that is of an aggregate type, we need to use the RM size in the
1022 packable version of the record type, see finish_record_type. */
1023 if (!DECL_CHAIN (old_field)
1024 && !TYPE_PACKED (type)
1025 && RECORD_OR_UNION_TYPE_P (new_field_type)
1026 && !TYPE_FAT_POINTER_P (new_field_type)
1027 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1028 && TYPE_ADA_SIZE (new_field_type))
1029 new_size = TYPE_ADA_SIZE (new_field_type);
1030 else
1031 new_size = DECL_SIZE (old_field);
1032
1033 new_field
1034 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1035 new_size, bit_position (old_field),
1036 TYPE_PACKED (type),
1037 !DECL_NONADDRESSABLE_P (old_field));
1038
1039 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1040 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1041 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1042 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1043
1044 DECL_CHAIN (new_field) = field_list;
1045 field_list = new_field;
1046 }
1047
1048 finish_record_type (new_type, nreverse (field_list), 2, false);
1049 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
44e9e3ec
EB
1050 if (TYPE_STUB_DECL (type))
1051 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1052 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
842d4ee2
EB
1053
1054 /* If this is a padding record, we never want to make the size smaller
1055 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1056 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1057 {
1058 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1059 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1060 new_size = size;
1061 }
1062 else
1063 {
1064 TYPE_SIZE (new_type) = bitsize_int (new_size);
1065 TYPE_SIZE_UNIT (new_type)
1066 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1067 }
1068
1069 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1070 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1071
1072 compute_record_mode (new_type);
1073
1074 /* Try harder to get a packable type if necessary, for example
1075 in case the record itself contains a BLKmode field. */
1076 if (in_record && TYPE_MODE (new_type) == BLKmode)
1077 SET_TYPE_MODE (new_type,
1078 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1079
1080 /* If neither the mode nor the size has shrunk, return the old type. */
1081 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1082 return type;
1083
1084 return new_type;
1085}
1086
1087/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1088 If TYPE is the best type, return it. Otherwise, make a new type. We
1089 only support new integral and pointer types. FOR_BIASED is true if
1090 we are making a biased type. */
1091
1092tree
1093make_type_from_size (tree type, tree size_tree, bool for_biased)
1094{
1095 unsigned HOST_WIDE_INT size;
1096 bool biased_p;
1097 tree new_type;
1098
1099 /* If size indicates an error, just return TYPE to avoid propagating
1100 the error. Likewise if it's too large to represent. */
cc269bb6 1101 if (!size_tree || !tree_fits_uhwi_p (size_tree))
842d4ee2
EB
1102 return type;
1103
ae7e9ddd 1104 size = tree_to_uhwi (size_tree);
842d4ee2
EB
1105
1106 switch (TREE_CODE (type))
1107 {
1108 case INTEGER_TYPE:
1109 case ENUMERAL_TYPE:
1110 case BOOLEAN_TYPE:
1111 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1112 && TYPE_BIASED_REPRESENTATION_P (type));
1113
1114 /* Integer types with precision 0 are forbidden. */
1115 if (size == 0)
1116 size = 1;
1117
1118 /* Only do something if the type isn't a packed array type and doesn't
1119 already have the proper size and the size isn't too large. */
1120 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1121 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1122 || size > LONG_LONG_TYPE_SIZE)
1123 break;
1124
1125 biased_p |= for_biased;
1126 if (TYPE_UNSIGNED (type) || biased_p)
1127 new_type = make_unsigned_type (size);
1128 else
1129 new_type = make_signed_type (size);
1130 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1eb58520
AC
1131 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1132 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
842d4ee2
EB
1133 /* Copy the name to show that it's essentially the same type and
1134 not a subrange type. */
1135 TYPE_NAME (new_type) = TYPE_NAME (type);
1136 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1137 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1138 return new_type;
1139
1140 case RECORD_TYPE:
1141 /* Do something if this is a fat pointer, in which case we
1142 may need to return the thin pointer. */
1143 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1144 {
ef4bddc2 1145 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
842d4ee2
EB
1146 if (!targetm.valid_pointer_mode (p_mode))
1147 p_mode = ptr_mode;
1148 return
1149 build_pointer_type_for_mode
1150 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1151 p_mode, 0);
1152 }
1153 break;
1154
1155 case POINTER_TYPE:
1156 /* Only do something if this is a thin pointer, in which case we
1157 may need to return the fat pointer. */
1158 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1159 return
1160 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1161 break;
1162
1163 default:
1164 break;
1165 }
1166
1167 return type;
1168}
1169
1170/* See if the data pointed to by the hash table slot is marked. */
1171
d242408f
TS
1172void
1173pad_type_hasher::handle_cache_entry (pad_type_hash *&t)
842d4ee2 1174{
d242408f
TS
1175 extern void gt_ggc_mx (pad_type_hash *&);
1176 if (t == HTAB_EMPTY_ENTRY || t == HTAB_DELETED_ENTRY)
1177 return;
1178 else if (ggc_marked_p (t->type))
1179 gt_ggc_mx (t);
1180 else
1181 t = static_cast<pad_type_hash *> (HTAB_DELETED_ENTRY);
842d4ee2
EB
1182}
1183
d242408f 1184/* Return true iff the padded types are equivalent. */
842d4ee2 1185
d242408f
TS
1186bool
1187pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
842d4ee2 1188{
842d4ee2
EB
1189 tree type1, type2;
1190
1191 if (t1->hash != t2->hash)
1192 return 0;
1193
1194 type1 = t1->type;
1195 type2 = t2->type;
1196
1197 /* We consider that the padded types are equivalent if they pad the same
1198 type and have the same size, alignment and RM size. Taking the mode
1199 into account is redundant since it is determined by the others. */
1200 return
1201 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1202 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1203 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1204 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1205}
1206
5cb7516d
EB
1207/* Look up the padded TYPE in the hash table and return its canonical version
1208 if it exists; otherwise, insert it into the hash table. */
1209
1210static tree
1211lookup_and_insert_pad_type (tree type)
1212{
1213 hashval_t hashcode;
1214 struct pad_type_hash in, *h;
5cb7516d
EB
1215
1216 hashcode
1217 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1218 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1219 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1220 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1221
1222 in.hash = hashcode;
1223 in.type = type;
d242408f 1224 h = pad_type_hash_table->find_with_hash (&in, hashcode);
5cb7516d
EB
1225 if (h)
1226 return h->type;
1227
1228 h = ggc_alloc<pad_type_hash> ();
1229 h->hash = hashcode;
1230 h->type = type;
d242408f 1231 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
5cb7516d
EB
1232 return NULL_TREE;
1233}
1234
842d4ee2 1235/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
5cb7516d 1236 if needed. We have already verified that SIZE and ALIGN are large enough.
842d4ee2
EB
1237 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1238 IS_COMPONENT_TYPE is true if this is being done for the component type of
1239 an array. IS_USER_TYPE is true if the original type needs to be completed.
1240 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1241 the RM size of the resulting type is to be set to SIZE too. */
1242
1243tree
1244maybe_pad_type (tree type, tree size, unsigned int align,
1245 Entity_Id gnat_entity, bool is_component_type,
1246 bool is_user_type, bool definition, bool set_rm_size)
1247{
1248 tree orig_size = TYPE_SIZE (type);
44e9e3ec 1249 unsigned int orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1250 tree record, field;
1251
1252 /* If TYPE is a padded type, see if it agrees with any size and alignment
1253 we were given. If so, return the original type. Otherwise, strip
1254 off the padding, since we will either be returning the inner type
1255 or repadding it. If no size or alignment is specified, use that of
1256 the original padded type. */
1257 if (TYPE_IS_PADDING_P (type))
1258 {
1259 if ((!size
44e9e3ec
EB
1260 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1261 && (align == 0 || align == orig_align))
842d4ee2
EB
1262 return type;
1263
1264 if (!size)
44e9e3ec 1265 size = orig_size;
842d4ee2 1266 if (align == 0)
44e9e3ec 1267 align = orig_align;
842d4ee2
EB
1268
1269 type = TREE_TYPE (TYPE_FIELDS (type));
1270 orig_size = TYPE_SIZE (type);
44e9e3ec 1271 orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1272 }
1273
1274 /* If the size is either not being changed or is being made smaller (which
1275 is not done here and is only valid for bitfields anyway), show the size
1276 isn't changing. Likewise, clear the alignment if it isn't being
1277 changed. Then return if we aren't doing anything. */
1278 if (size
1279 && (operand_equal_p (size, orig_size, 0)
1280 || (TREE_CODE (orig_size) == INTEGER_CST
1281 && tree_int_cst_lt (size, orig_size))))
1282 size = NULL_TREE;
1283
44e9e3ec 1284 if (align == orig_align)
842d4ee2
EB
1285 align = 0;
1286
1287 if (align == 0 && !size)
1288 return type;
1289
1290 /* If requested, complete the original type and give it a name. */
1291 if (is_user_type)
1292 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1293 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1294 !(TYPE_NAME (type)
1295 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1296 && DECL_IGNORED_P (TYPE_NAME (type))),
1297 gnat_entity);
1298
1299 /* We used to modify the record in place in some cases, but that could
1300 generate incorrect debugging information. So make a new record
1301 type and name. */
1302 record = make_node (RECORD_TYPE);
1303 TYPE_PADDING_P (record) = 1;
1304
1305 if (Present (gnat_entity))
1306 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1307
44e9e3ec 1308 TYPE_ALIGN (record) = align ? align : orig_align;
842d4ee2
EB
1309 TYPE_SIZE (record) = size ? size : orig_size;
1310 TYPE_SIZE_UNIT (record)
1311 = convert (sizetype,
1312 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1313 bitsize_unit_node));
1314
1315 /* If we are changing the alignment and the input type is a record with
1316 BLKmode and a small constant size, try to make a form that has an
1317 integral mode. This might allow the padding record to also have an
1318 integral mode, which will be much more efficient. There is no point
1319 in doing so if a size is specified unless it is also a small constant
1320 size and it is incorrect to do so if we cannot guarantee that the mode
1321 will be naturally aligned since the field must always be addressable.
1322
1323 ??? This might not always be a win when done for a stand-alone object:
1324 since the nominal and the effective type of the object will now have
1325 different modes, a VIEW_CONVERT_EXPR will be required for converting
1326 between them and it might be hard to overcome afterwards, including
1327 at the RTL level when the stand-alone object is accessed as a whole. */
1328 if (align != 0
1329 && RECORD_OR_UNION_TYPE_P (type)
1330 && TYPE_MODE (type) == BLKmode
1331 && !TYPE_BY_REFERENCE_P (type)
1332 && TREE_CODE (orig_size) == INTEGER_CST
1333 && !TREE_OVERFLOW (orig_size)
1334 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1335 && (!size
1336 || (TREE_CODE (size) == INTEGER_CST
1337 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1338 {
1339 tree packable_type = make_packable_type (type, true);
1340 if (TYPE_MODE (packable_type) != BLKmode
1341 && align >= TYPE_ALIGN (packable_type))
1342 type = packable_type;
1343 }
1344
1345 /* Now create the field with the original size. */
1366ba41
EB
1346 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1347 bitsize_zero_node, 0, 1);
842d4ee2
EB
1348 DECL_INTERNAL_P (field) = 1;
1349
1350 /* Do not emit debug info until after the auxiliary record is built. */
1351 finish_record_type (record, field, 1, false);
1352
1353 /* Set the RM size if requested. */
1354 if (set_rm_size)
1355 {
5cb7516d
EB
1356 tree canonical_pad_type;
1357
842d4ee2
EB
1358 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1359
1360 /* If the padded type is complete and has constant size, we canonicalize
1361 it by means of the hash table. This is consistent with the language
1362 semantics and ensures that gigi and the middle-end have a common view
1363 of these padded types. */
5cb7516d
EB
1364 if (TREE_CONSTANT (TYPE_SIZE (record))
1365 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
842d4ee2 1366 {
5cb7516d
EB
1367 record = canonical_pad_type;
1368 goto built;
842d4ee2
EB
1369 }
1370 }
1371
1372 /* Unless debugging information isn't being written for the input type,
1373 write a record that shows what we are a subtype of and also make a
1374 variable that indicates our size, if still variable. */
1375 if (TREE_CODE (orig_size) != INTEGER_CST
1376 && TYPE_NAME (record)
1377 && TYPE_NAME (type)
1378 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1379 && DECL_IGNORED_P (TYPE_NAME (type))))
1380 {
1381 tree marker = make_node (RECORD_TYPE);
9dba4b55
PC
1382 tree name = TYPE_IDENTIFIER (record);
1383 tree orig_name = TYPE_IDENTIFIER (type);
842d4ee2
EB
1384
1385 TYPE_NAME (marker) = concat_name (name, "XVS");
1386 finish_record_type (marker,
1387 create_field_decl (orig_name,
1388 build_reference_type (type),
1389 marker, NULL_TREE, NULL_TREE,
1390 0, 0),
1391 0, true);
1392
1393 add_parallel_type (record, marker);
1394
1395 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1396 TYPE_SIZE_UNIT (marker)
1397 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1398 TYPE_SIZE_UNIT (record), false, false, false,
1399 false, NULL, gnat_entity);
1400 }
1401
1402 rest_of_record_type_compilation (record);
1403
1404built:
1405 /* If the size was widened explicitly, maybe give a warning. Take the
1406 original size as the maximum size of the input if there was an
1407 unconstrained record involved and round it up to the specified alignment,
1408 if one was specified. But don't do it if we are just annotating types
1409 and the type is tagged, since tagged types aren't fully laid out in this
1410 mode. */
1411 if (!size
1412 || TREE_CODE (size) == COND_EXPR
1413 || TREE_CODE (size) == MAX_EXPR
1414 || No (gnat_entity)
1415 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1416 return record;
1417
1418 if (CONTAINS_PLACEHOLDER_P (orig_size))
1419 orig_size = max_size (orig_size, true);
1420
f42dd37f 1421 if (align && AGGREGATE_TYPE_P (type))
842d4ee2
EB
1422 orig_size = round_up (orig_size, align);
1423
1424 if (!operand_equal_p (size, orig_size, 0)
1425 && !(TREE_CODE (size) == INTEGER_CST
1426 && TREE_CODE (orig_size) == INTEGER_CST
1427 && (TREE_OVERFLOW (size)
1428 || TREE_OVERFLOW (orig_size)
1429 || tree_int_cst_lt (size, orig_size))))
1430 {
1431 Node_Id gnat_error_node = Empty;
1432
1a4cb227
AC
1433 /* For a packed array, post the message on the original array type. */
1434 if (Is_Packed_Array_Impl_Type (gnat_entity))
842d4ee2
EB
1435 gnat_entity = Original_Array_Type (gnat_entity);
1436
1437 if ((Ekind (gnat_entity) == E_Component
1438 || Ekind (gnat_entity) == E_Discriminant)
1439 && Present (Component_Clause (gnat_entity)))
1440 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1441 else if (Present (Size_Clause (gnat_entity)))
1442 gnat_error_node = Expression (Size_Clause (gnat_entity));
1443
1444 /* Generate message only for entities that come from source, since
1445 if we have an entity created by expansion, the message will be
1446 generated for some other corresponding source entity. */
1447 if (Comes_From_Source (gnat_entity))
1448 {
1449 if (Present (gnat_error_node))
1450 post_error_ne_tree ("{^ }bits of & unused?",
1451 gnat_error_node, gnat_entity,
1452 size_diffop (size, orig_size));
1453 else if (is_component_type)
1454 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1455 gnat_entity, gnat_entity,
1456 size_diffop (size, orig_size));
1457 }
1458 }
1459
1460 return record;
1461}
1462\f
1463/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1464 If this is a multi-dimensional array type, do this recursively.
1465
1466 OP may be
1467 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1468 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1469 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1470
1471void
1472relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1473{
1474 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1475 of a one-dimensional array, since the padding has the same alias set
1476 as the field type, but if it's a multi-dimensional array, we need to
1477 see the inner types. */
1478 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1479 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1480 || TYPE_PADDING_P (gnu_old_type)))
1481 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1482
1483 /* Unconstrained array types are deemed incomplete and would thus be given
1484 alias set 0. Retrieve the underlying array type. */
1485 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1486 gnu_old_type
1487 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1488 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1489 gnu_new_type
1490 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1491
1492 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1493 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1494 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1495 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1496
1497 switch (op)
1498 {
1499 case ALIAS_SET_COPY:
1500 /* The alias set shouldn't be copied between array types with different
1501 aliasing settings because this can break the aliasing relationship
1502 between the array type and its element type. */
1503#ifndef ENABLE_CHECKING
1504 if (flag_strict_aliasing)
1505#endif
1506 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1507 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1508 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1509 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1510
1511 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1512 break;
1513
1514 case ALIAS_SET_SUBSET:
1515 case ALIAS_SET_SUPERSET:
1516 {
1517 alias_set_type old_set = get_alias_set (gnu_old_type);
1518 alias_set_type new_set = get_alias_set (gnu_new_type);
1519
1520 /* Do nothing if the alias sets conflict. This ensures that we
1521 never call record_alias_subset several times for the same pair
1522 or at all for alias set 0. */
1523 if (!alias_sets_conflict_p (old_set, new_set))
1524 {
1525 if (op == ALIAS_SET_SUBSET)
1526 record_alias_subset (old_set, new_set);
1527 else
1528 record_alias_subset (new_set, old_set);
1529 }
1530 }
1531 break;
1532
1533 default:
1534 gcc_unreachable ();
1535 }
1536
1537 record_component_aliases (gnu_new_type);
1538}
1539\f
1aeb40dd
EB
1540/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1541 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
a1ab4c31
AC
1542
1543void
1aeb40dd 1544record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1545{
c172df28
AH
1546 tree type_decl = build_decl (input_location,
1547 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1548 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1549 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1550 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1551
10069d53
EB
1552 if (debug_hooks->type_decl)
1553 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1554}
1555\f
e3edbd56
EB
1556/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1557 finish constructing the record type as a fat pointer type. */
1558
1559void
1560finish_fat_pointer_type (tree record_type, tree field_list)
1561{
1562 /* Make sure we can put it into a register. */
5da8c011
EB
1563 if (STRICT_ALIGNMENT)
1564 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
e3edbd56
EB
1565
1566 /* Show what it really is. */
1567 TYPE_FAT_POINTER_P (record_type) = 1;
1568
1569 /* Do not emit debug info for it since the types of its fields may still be
1570 incomplete at this point. */
1571 finish_record_type (record_type, field_list, 0, false);
1572
1573 /* Force type_contains_placeholder_p to return true on it. Although the
1574 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1575 type but the representation of the unconstrained array. */
1576 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1577}
1578
032d1b71 1579/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1580 finish constructing the record or union type. If REP_LEVEL is zero, this
1581 record has no representation clause and so will be entirely laid out here.
1582 If REP_LEVEL is one, this record has a representation clause and has been
1583 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1584 this record is derived from a parent record and thus inherits its layout;
032d1b71
EB
1585 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1586 we need to write debug information about this type. */
a1ab4c31
AC
1587
1588void
032d1b71
EB
1589finish_record_type (tree record_type, tree field_list, int rep_level,
1590 bool debug_info_p)
a1ab4c31
AC
1591{
1592 enum tree_code code = TREE_CODE (record_type);
9dba4b55 1593 tree name = TYPE_IDENTIFIER (record_type);
a1ab4c31
AC
1594 tree ada_size = bitsize_zero_node;
1595 tree size = bitsize_zero_node;
1596 bool had_size = TYPE_SIZE (record_type) != 0;
1597 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1598 bool had_align = TYPE_ALIGN (record_type) != 0;
1599 tree field;
1600
032d1b71 1601 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1602
10069d53
EB
1603 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1604 generate debug info and have a parallel type. */
10069d53 1605 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1606
1607 /* Globally initialize the record first. If this is a rep'ed record,
1608 that just means some initializations; otherwise, layout the record. */
1609 if (rep_level > 0)
1610 {
1611 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
a1ab4c31
AC
1612
1613 if (!had_size_unit)
1614 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1615
a1ab4c31
AC
1616 if (!had_size)
1617 TYPE_SIZE (record_type) = bitsize_zero_node;
1618
1619 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1620 out just like a UNION_TYPE, since the size will be fixed. */
1621 else if (code == QUAL_UNION_TYPE)
1622 code = UNION_TYPE;
1623 }
1624 else
1625 {
1626 /* Ensure there isn't a size already set. There can be in an error
1627 case where there is a rep clause but all fields have errors and
1628 no longer have a position. */
1629 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1630
1631 /* Ensure we use the traditional GCC layout for bitfields when we need
1632 to pack the record type or have a representation clause. The other
1633 possible layout (Microsoft C compiler), if available, would prevent
1634 efficient packing in almost all cases. */
1635#ifdef TARGET_MS_BITFIELD_LAYOUT
1636 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1637 decl_attributes (&record_type,
1638 tree_cons (get_identifier ("gcc_struct"),
1639 NULL_TREE, NULL_TREE),
1640 ATTR_FLAG_TYPE_IN_PLACE);
1641#endif
1642
a1ab4c31
AC
1643 layout_type (record_type);
1644 }
1645
1646 /* At this point, the position and size of each field is known. It was
1647 either set before entry by a rep clause, or by laying out the type above.
1648
1649 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1650 to compute the Ada size; the GCC size and alignment (for rep'ed records
1651 that are not padding types); and the mode (for rep'ed records). We also
1652 clear the DECL_BIT_FIELD indication for the cases we know have not been
1653 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1654
1655 if (code == QUAL_UNION_TYPE)
032d1b71 1656 field_list = nreverse (field_list);
a1ab4c31 1657
910ad8de 1658 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1659 {
1660 tree type = TREE_TYPE (field);
1661 tree pos = bit_position (field);
1662 tree this_size = DECL_SIZE (field);
1663 tree this_ada_size;
1664
e1e5852c 1665 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1666 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1667 && !TYPE_CONTAINS_TEMPLATE_P (type)
1668 && TYPE_ADA_SIZE (type))
1669 this_ada_size = TYPE_ADA_SIZE (type);
1670 else
1671 this_ada_size = this_size;
1672
1673 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1674 if (DECL_BIT_FIELD (field)
1675 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1676 {
1677 unsigned int align = TYPE_ALIGN (type);
1678
1679 /* In the general case, type alignment is required. */
1680 if (value_factor_p (pos, align))
1681 {
1682 /* The enclosing record type must be sufficiently aligned.
1683 Otherwise, if no alignment was specified for it and it
1684 has been laid out already, bump its alignment to the
1685 desired one if this is compatible with its size. */
1686 if (TYPE_ALIGN (record_type) >= align)
1687 {
1688 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1689 DECL_BIT_FIELD (field) = 0;
1690 }
1691 else if (!had_align
1692 && rep_level == 0
1693 && value_factor_p (TYPE_SIZE (record_type), align))
1694 {
1695 TYPE_ALIGN (record_type) = align;
1696 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1697 DECL_BIT_FIELD (field) = 0;
1698 }
1699 }
1700
1701 /* In the non-strict alignment case, only byte alignment is. */
1702 if (!STRICT_ALIGNMENT
1703 && DECL_BIT_FIELD (field)
1704 && value_factor_p (pos, BITS_PER_UNIT))
1705 DECL_BIT_FIELD (field) = 0;
1706 }
1707
c1abd261
EB
1708 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1709 field is technically not addressable. Except that it can actually
1710 be addressed if it is BLKmode and happens to be properly aligned. */
1711 if (DECL_BIT_FIELD (field)
1712 && !(DECL_MODE (field) == BLKmode
1713 && value_factor_p (pos, BITS_PER_UNIT)))
1714 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1715
1716 /* A type must be as aligned as its most aligned field that is not
1717 a bit-field. But this is already enforced by layout_type. */
1718 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1719 TYPE_ALIGN (record_type)
1720 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1721
1722 switch (code)
1723 {
1724 case UNION_TYPE:
1725 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1726 size = size_binop (MAX_EXPR, size, this_size);
1727 break;
1728
1729 case QUAL_UNION_TYPE:
1730 ada_size
1731 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1732 this_ada_size, ada_size);
1733 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1734 this_size, size);
1735 break;
1736
1737 case RECORD_TYPE:
1738 /* Since we know here that all fields are sorted in order of
1739 increasing bit position, the size of the record is one
1740 higher than the ending bit of the last field processed
1741 unless we have a rep clause, since in that case we might
1742 have a field outside a QUAL_UNION_TYPE that has a higher ending
1743 position. So use a MAX in that case. Also, if this field is a
1744 QUAL_UNION_TYPE, we need to take into account the previous size in
1745 the case of empty variants. */
1746 ada_size
1747 = merge_sizes (ada_size, pos, this_ada_size,
1748 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1749 size
1750 = merge_sizes (size, pos, this_size,
1751 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1752 break;
1753
1754 default:
1755 gcc_unreachable ();
1756 }
1757 }
1758
1759 if (code == QUAL_UNION_TYPE)
032d1b71 1760 nreverse (field_list);
a1ab4c31
AC
1761
1762 if (rep_level < 2)
1763 {
1764 /* If this is a padding record, we never want to make the size smaller
1765 than what was specified in it, if any. */
315cff15 1766 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1767 size = TYPE_SIZE (record_type);
1768
1769 /* Now set any of the values we've just computed that apply. */
315cff15 1770 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1771 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1772 SET_TYPE_ADA_SIZE (record_type, ada_size);
1773
1774 if (rep_level > 0)
1775 {
1776 tree size_unit = had_size_unit
1777 ? TYPE_SIZE_UNIT (record_type)
1778 : convert (sizetype,
1779 size_binop (CEIL_DIV_EXPR, size,
1780 bitsize_unit_node));
1781 unsigned int align = TYPE_ALIGN (record_type);
1782
1783 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1784 TYPE_SIZE_UNIT (record_type)
1785 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1786
1787 compute_record_mode (record_type);
1788 }
1789 }
1790
032d1b71 1791 if (debug_info_p)
a1ab4c31
AC
1792 rest_of_record_type_compilation (record_type);
1793}
1794
24d4b3d5
AC
1795/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1796 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1797 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1798 moment TYPE will get a context. */
a5695aa2
EB
1799
1800void
1801add_parallel_type (tree type, tree parallel_type)
1802{
1803 tree decl = TYPE_STUB_DECL (type);
1804
1805 while (DECL_PARALLEL_TYPE (decl))
1806 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1807
1808 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
24d4b3d5
AC
1809
1810 /* If PARALLEL_TYPE already has a context, we are done. */
1811 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1812 return;
1813
1814 /* Otherwise, try to get one from TYPE's context. */
1815 if (TYPE_CONTEXT (type) != NULL_TREE)
1816 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1817 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1818
1819 /* ... otherwise TYPE has not context yet. We know it will thanks to
1820 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1821 So we have nothing to do in this case. */
a5695aa2
EB
1822}
1823
1824/* Return true if TYPE has a parallel type. */
1825
1826static bool
1827has_parallel_type (tree type)
1828{
1829 tree decl = TYPE_STUB_DECL (type);
1830
1831 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1832}
1833
032d1b71
EB
1834/* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1835 associated with it. It need not be invoked directly in most cases since
1836 finish_record_type takes care of doing so, but this can be necessary if
1837 a parallel type is to be attached to the record type. */
a1ab4c31
AC
1838
1839void
1840rest_of_record_type_compilation (tree record_type)
1841{
a1ab4c31 1842 bool var_size = false;
fb88e1dd 1843 tree field;
a1ab4c31 1844
fb88e1dd
EB
1845 /* If this is a padded type, the bulk of the debug info has already been
1846 generated for the field's type. */
1847 if (TYPE_IS_PADDING_P (record_type))
1848 return;
1849
a5695aa2
EB
1850 /* If the type already has a parallel type (XVS type), then we're done. */
1851 if (has_parallel_type (record_type))
1852 return;
1853
fb88e1dd 1854 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1855 {
1856 /* We need to make an XVE/XVU record if any field has variable size,
1857 whether or not the record does. For example, if we have a union,
1858 it may be that all fields, rounded up to the alignment, have the
1859 same size, in which case we'll use that size. But the debug
1860 output routines (except Dwarf2) won't be able to output the fields,
1861 so we need to make the special record. */
1862 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1863 /* If a field has a non-constant qualifier, the record will have
1864 variable size too. */
fb88e1dd 1865 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1866 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1867 {
1868 var_size = true;
1869 break;
1870 }
1871 }
1872
fb88e1dd
EB
1873 /* If this record type is of variable size, make a parallel record type that
1874 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1875 if (var_size)
a1ab4c31
AC
1876 {
1877 tree new_record_type
1878 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1879 ? UNION_TYPE : TREE_CODE (record_type));
9dba4b55 1880 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
a1ab4c31 1881 tree last_pos = bitsize_zero_node;
0fb2335d 1882 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1883
0fb2335d
EB
1884 new_name
1885 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1886 ? "XVU" : "XVE");
1887 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
1888 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1889 TYPE_STUB_DECL (new_record_type)
0fb2335d 1890 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1891 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1892 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1893 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1894 TYPE_SIZE_UNIT (new_record_type)
1895 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1896
5c475ba9
EB
1897 /* Now scan all the fields, replacing each field with a new field
1898 corresponding to the new encoding. */
a1ab4c31 1899 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 1900 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
1901 {
1902 tree field_type = TREE_TYPE (old_field);
1903 tree field_name = DECL_NAME (old_field);
a1ab4c31 1904 tree curpos = bit_position (old_field);
5c475ba9 1905 tree pos, new_field;
a1ab4c31
AC
1906 bool var = false;
1907 unsigned int align = 0;
a1ab4c31 1908
5c475ba9
EB
1909 /* We're going to do some pattern matching below so remove as many
1910 conversions as possible. */
1911 curpos = remove_conversions (curpos, true);
a1ab4c31 1912
5c475ba9 1913 /* See how the position was modified from the last position.
a1ab4c31 1914
5c475ba9
EB
1915 There are two basic cases we support: a value was added
1916 to the last position or the last position was rounded to
1917 a boundary and they something was added. Check for the
1918 first case first. If not, see if there is any evidence
1919 of rounding. If so, round the last position and retry.
a1ab4c31 1920
5c475ba9 1921 If this is a union, the position can be taken as zero. */
a1ab4c31 1922 if (TREE_CODE (new_record_type) == UNION_TYPE)
5c475ba9 1923 pos = bitsize_zero_node;
a1ab4c31
AC
1924 else
1925 pos = compute_related_constant (curpos, last_pos);
1926
5c475ba9
EB
1927 if (!pos
1928 && TREE_CODE (curpos) == MULT_EXPR
cc269bb6 1929 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
a1ab4c31
AC
1930 {
1931 tree offset = TREE_OPERAND (curpos, 0);
ae7e9ddd 1932 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
5c475ba9
EB
1933 align = scale_by_factor_of (offset, align);
1934 last_pos = round_up (last_pos, align);
1935 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1936 }
5c475ba9
EB
1937 else if (!pos
1938 && TREE_CODE (curpos) == PLUS_EXPR
cc269bb6 1939 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
a1ab4c31 1940 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
5a36c51b
RS
1941 && tree_fits_uhwi_p
1942 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
a1ab4c31 1943 {
5c475ba9
EB
1944 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1945 unsigned HOST_WIDE_INT addend
ae7e9ddd 1946 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
a1ab4c31 1947 align
ae7e9ddd 1948 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
5c475ba9
EB
1949 align = scale_by_factor_of (offset, align);
1950 align = MIN (align, addend & -addend);
1951 last_pos = round_up (last_pos, align);
1952 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 1953 }
5c475ba9 1954 else if (potential_alignment_gap (prev_old_field, old_field, pos))
a1ab4c31
AC
1955 {
1956 align = TYPE_ALIGN (field_type);
5c475ba9
EB
1957 last_pos = round_up (last_pos, align);
1958 pos = compute_related_constant (curpos, last_pos);
a1ab4c31
AC
1959 }
1960
1961 /* If we can't compute a position, set it to zero.
1962
5c475ba9
EB
1963 ??? We really should abort here, but it's too much work
1964 to get this correct for all cases. */
a1ab4c31
AC
1965 if (!pos)
1966 pos = bitsize_zero_node;
1967
1968 /* See if this type is variable-sized and make a pointer type
1969 and indicate the indirection if so. Beware that the debug
1970 back-end may adjust the position computed above according
1971 to the alignment of the field type, i.e. the pointer type
1972 in this case, if we don't preventively counter that. */
1973 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1974 {
1975 field_type = build_pointer_type (field_type);
1976 if (align != 0 && TYPE_ALIGN (field_type) > align)
1977 {
1978 field_type = copy_node (field_type);
1979 TYPE_ALIGN (field_type) = align;
1980 }
1981 var = true;
1982 }
1983
1984 /* Make a new field name, if necessary. */
1985 if (var || align != 0)
1986 {
1987 char suffix[16];
1988
1989 if (align != 0)
1990 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1991 align / BITS_PER_UNIT);
1992 else
1993 strcpy (suffix, "XVL");
1994
0fb2335d 1995 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
1996 }
1997
da01bfee
EB
1998 new_field
1999 = create_field_decl (field_name, field_type, new_record_type,
2000 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 2001 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
2002 TYPE_FIELDS (new_record_type) = new_field;
2003
2004 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2005 zero. The only time it's not the last field of the record
2006 is when there are other components at fixed positions after
2007 it (meaning there was a rep clause for every field) and we
2008 want to be able to encode them. */
2009 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2010 (TREE_CODE (TREE_TYPE (old_field))
2011 == QUAL_UNION_TYPE)
2012 ? bitsize_zero_node
2013 : DECL_SIZE (old_field));
2014 prev_old_field = old_field;
2015 }
2016
fb88e1dd 2017 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 2018
a5695aa2 2019 add_parallel_type (record_type, new_record_type);
a1ab4c31 2020 }
a1ab4c31
AC
2021}
2022
a1ab4c31 2023/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
2024 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2025 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2026 replace a value of zero with the old size. If HAS_REP is true, we take the
2027 MAX of the end position of this field with LAST_SIZE. In all other cases,
2028 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
2029
2030static tree
2031merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2032 bool has_rep)
2033{
2034 tree type = TREE_TYPE (last_size);
c6bd4220 2035 tree new_size;
a1ab4c31
AC
2036
2037 if (!special || TREE_CODE (size) != COND_EXPR)
2038 {
c6bd4220 2039 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 2040 if (has_rep)
c6bd4220 2041 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
2042 }
2043
2044 else
c6bd4220
EB
2045 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2046 integer_zerop (TREE_OPERAND (size, 1))
2047 ? last_size : merge_sizes (last_size, first_bit,
2048 TREE_OPERAND (size, 1),
2049 1, has_rep),
2050 integer_zerop (TREE_OPERAND (size, 2))
2051 ? last_size : merge_sizes (last_size, first_bit,
2052 TREE_OPERAND (size, 2),
2053 1, has_rep));
a1ab4c31
AC
2054
2055 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2056 when fed through substitute_in_expr) into thinking that a constant
2057 size is not constant. */
c6bd4220
EB
2058 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2059 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 2060
c6bd4220 2061 return new_size;
a1ab4c31
AC
2062}
2063
2064/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2065 related by the addition of a constant. Return that constant if so. */
2066
2067static tree
2068compute_related_constant (tree op0, tree op1)
2069{
2070 tree op0_var, op1_var;
2071 tree op0_con = split_plus (op0, &op0_var);
2072 tree op1_con = split_plus (op1, &op1_var);
2073 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2074
2075 if (operand_equal_p (op0_var, op1_var, 0))
2076 return result;
2077 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2078 return result;
2079 else
2080 return 0;
2081}
2082
2083/* Utility function of above to split a tree OP which may be a sum, into a
2084 constant part, which is returned, and a variable part, which is stored
2085 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2086 bitsizetype. */
2087
2088static tree
2089split_plus (tree in, tree *pvar)
2090{
722356ce
EB
2091 /* Strip conversions in order to ease the tree traversal and maximize the
2092 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
2093 to always return and set *pvar to bitsizetype trees, but it's worth
2094 the effort. */
722356ce 2095 in = remove_conversions (in, false);
a1ab4c31
AC
2096
2097 *pvar = convert (bitsizetype, in);
2098
2099 if (TREE_CODE (in) == INTEGER_CST)
2100 {
2101 *pvar = bitsize_zero_node;
2102 return convert (bitsizetype, in);
2103 }
2104 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2105 {
2106 tree lhs_var, rhs_var;
2107 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2108 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2109
2110 if (lhs_var == TREE_OPERAND (in, 0)
2111 && rhs_var == TREE_OPERAND (in, 1))
2112 return bitsize_zero_node;
2113
2114 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2115 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2116 }
2117 else
2118 return bitsize_zero_node;
2119}
2120\f
d47d0a8d
EB
2121/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2122 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2123 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2124 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2125 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2126 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2127 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2128 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2129 invisible reference. */
a1ab4c31
AC
2130
2131tree
2132create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
2133 bool return_unconstrained_p, bool return_by_direct_ref_p,
2134 bool return_by_invisi_ref_p)
a1ab4c31 2135{
e5b00edf
NF
2136 /* A list of the data type nodes of the subprogram formal parameters.
2137 This list is generated by traversing the input list of PARM_DECL
2138 nodes. */
9771b263 2139 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 2140 tree t, type;
a1ab4c31 2141
910ad8de 2142 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 2143 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 2144
e5b00edf 2145 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 2146
d47d0a8d
EB
2147 /* TYPE may have been shared since GCC hashes types. If it has a different
2148 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
2149 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2150 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
2151 {
2152 type = copy_type (type);
2153 TYPE_CI_CO_LIST (type) = cico_list;
2154 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2155 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2156 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2157 }
a1ab4c31 2158
a1ab4c31
AC
2159 return type;
2160}
2161\f
2162/* Return a copy of TYPE but safe to modify in any way. */
2163
2164tree
2165copy_type (tree type)
2166{
c6bd4220 2167 tree new_type = copy_node (type);
a1ab4c31 2168
90dcfecb
EB
2169 /* Unshare the language-specific data. */
2170 if (TYPE_LANG_SPECIFIC (type))
2171 {
2172 TYPE_LANG_SPECIFIC (new_type) = NULL;
2173 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2174 }
2175
2176 /* And the contents of the language-specific slot if needed. */
2177 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2178 && TYPE_RM_VALUES (type))
2179 {
2180 TYPE_RM_VALUES (new_type) = NULL_TREE;
2181 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2182 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2183 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2184 }
2185
a1ab4c31
AC
2186 /* copy_node clears this field instead of copying it, because it is
2187 aliased with TREE_CHAIN. */
c6bd4220 2188 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2189
c6bd4220
EB
2190 TYPE_POINTER_TO (new_type) = 0;
2191 TYPE_REFERENCE_TO (new_type) = 0;
2192 TYPE_MAIN_VARIANT (new_type) = new_type;
2193 TYPE_NEXT_VARIANT (new_type) = 0;
a1ab4c31 2194
c6bd4220 2195 return new_type;
a1ab4c31
AC
2196}
2197\f
c1abd261
EB
2198/* Return a subtype of sizetype with range MIN to MAX and whose
2199 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2200 of the associated TYPE_DECL. */
a1ab4c31
AC
2201
2202tree
2203create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2204{
2205 /* First build a type for the desired range. */
523e82a7 2206 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2207
523e82a7 2208 /* Then set the index type. */
a1ab4c31 2209 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2210 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2211
a1ab4c31
AC
2212 return type;
2213}
84fb43a1
EB
2214
2215/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2216 sizetype is used. */
2217
2218tree
2219create_range_type (tree type, tree min, tree max)
2220{
2221 tree range_type;
2222
2223 if (type == NULL_TREE)
2224 type = sizetype;
2225
2226 /* First build a type with the base range. */
523e82a7
EB
2227 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2228 TYPE_MAX_VALUE (type));
84fb43a1
EB
2229
2230 /* Then set the actual range. */
1eb58520
AC
2231 SET_TYPE_RM_MIN_VALUE (range_type, min);
2232 SET_TYPE_RM_MAX_VALUE (range_type, max);
84fb43a1
EB
2233
2234 return range_type;
2235}
a1ab4c31 2236\f
10069d53
EB
2237/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2238 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2239 its data type. */
2240
2241tree
2242create_type_stub_decl (tree type_name, tree type)
2243{
2244 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2245 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2246 emitted in DWARF. */
44e9e3ec 2247 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
10069d53 2248 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2249 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2250 return type_decl;
2251}
2252
2253/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2254 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2255 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2256 true if we need to write debug information about this type. GNAT_NODE
2257 is used for the position of the decl. */
a1ab4c31
AC
2258
2259tree
74746d49
EB
2260create_type_decl (tree type_name, tree type, bool artificial_p,
2261 bool debug_info_p, Node_Id gnat_node)
a1ab4c31 2262{
a1ab4c31 2263 enum tree_code code = TREE_CODE (type);
10069d53
EB
2264 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2265 tree type_decl;
a1ab4c31 2266
10069d53
EB
2267 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2268 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2269
10069d53
EB
2270 /* If the type hasn't been named yet, we're naming it; preserve an existing
2271 TYPE_STUB_DECL that has been attached to it for some purpose. */
2272 if (!named && TYPE_STUB_DECL (type))
2273 {
2274 type_decl = TYPE_STUB_DECL (type);
2275 DECL_NAME (type_decl) = type_name;
2276 }
2277 else
74746d49 2278 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
a1ab4c31 2279
10069d53 2280 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2281 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2282
2283 /* Add this decl to the current binding level. */
10069d53 2284 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2285
aef308d0
PMR
2286 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2287 causes the name to be also viewed as a "tag" by the debug back-end, with
2288 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2289 types in DWARF.
2290
2291 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2292 from multiple contexts, and "type_decl" references a copy of it: in such a
2293 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2294 with the mechanism above. */
2295 if (!named && type != DECL_ORIGINAL_TYPE (type_decl))
10069d53
EB
2296 TYPE_STUB_DECL (type) = type_decl;
2297
50741117
EB
2298 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2299 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2300 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2301 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2302
2303 return type_decl;
2304}
10069d53 2305\f
a1ab4c31
AC
2306/* Return a VAR_DECL or CONST_DECL node.
2307
2308 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2309 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2310 the GCC tree for an optional initial expression; NULL_TREE if none.
2311
2312 CONST_FLAG is true if this variable is constant, in which case we might
2313 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2314
2315 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2316 definition to be made visible outside of the current compilation unit, for
2317 instance variable definitions in a package specification.
2318
1e17ef87 2319 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2320 opposed to a definition: no storage is to be allocated for the variable).
2321
2322 STATIC_FLAG is only relevant when not at top level. In that case
2323 it indicates whether to always allocate storage to the variable.
2324
2325 GNAT_NODE is used for the position of the decl. */
2326
2327tree
2328create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2329 bool const_flag, bool public_flag, bool extern_flag,
2330 bool static_flag, bool const_decl_allowed_p,
2331 struct attrib *attr_list, Node_Id gnat_node)
2332{
5fe48b3d
EB
2333 /* Whether the object has static storage duration, either explicitly or by
2334 virtue of being declared at the global level. */
2335 const bool static_storage = static_flag || global_bindings_p ();
2336
2337 /* Whether the initializer is constant: for an external object or an object
2338 with static storage duration, we check that the initializer is a valid
2339 constant expression for initializing a static variable; otherwise, we
2340 only check that it is constant. */
2341 const bool init_const
2342 = (var_init
a1ab4c31 2343 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
5fe48b3d
EB
2344 && (extern_flag || static_storage
2345 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2346 != NULL_TREE
a1ab4c31
AC
2347 : TREE_CONSTANT (var_init)));
2348
2349 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
5fe48b3d 2350 case the initializer may be used in lieu of the DECL node (as done in
a1ab4c31 2351 Identifier_to_gnu). This is useful to prevent the need of elaboration
5fe48b3d
EB
2352 code when an identifier for which such a DECL is made is in turn used
2353 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2354 but extra constraints apply to this choice (see below) and they are not
2355 relevant to the distinction we wish to make. */
2356 const bool constant_p = const_flag && init_const;
a1ab4c31
AC
2357
2358 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2359 and may be used for scalars in general but not for aggregates. */
2360 tree var_decl
c172df28
AH
2361 = build_decl (input_location,
2362 (constant_p && const_decl_allowed_p
a1ab4c31
AC
2363 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2364 var_name, type);
2365
2366 /* If this is external, throw away any initializations (they will be done
2367 elsewhere) unless this is a constant for which we would like to remain
2368 able to get the initializer. If we are defining a global here, leave a
2369 constant initialization and save any variable elaborations for the
2370 elaboration routine. If we are just annotating types, throw away the
2371 initialization if it isn't a constant. */
2372 if ((extern_flag && !constant_p)
2373 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2374 var_init = NULL_TREE;
2375
5fe48b3d
EB
2376 /* At the global level, a non-constant initializer generates elaboration
2377 statements. Check that such statements are allowed, that is to say,
2378 not violating a No_Elaboration_Code restriction. */
2379 if (var_init && !init_const && global_bindings_p ())
a1ab4c31 2380 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2381
8b7b0c36
JH
2382 DECL_INITIAL (var_decl) = var_init;
2383 TREE_READONLY (var_decl) = const_flag;
2384 DECL_EXTERNAL (var_decl) = extern_flag;
8b7b0c36 2385 TREE_CONSTANT (var_decl) = constant_p;
5fe48b3d
EB
2386
2387 /* We need to allocate static storage for an object with static storage
2388 duration if it isn't external. */
2389 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2390
2391 /* The object is public if it is external or if it is declared public
2392 and has static storage duration. */
2393 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
a1ab4c31
AC
2394
2395 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2396 try to fiddle with DECL_COMMON. However, on platforms that don't
2397 support global BSS sections, uninitialized global variables would
2398 go in DATA instead, thus increasing the size of the executable. */
2399 if (!flag_no_common
2400 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2401 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2402 && !have_global_bss_p ())
2403 DECL_COMMON (var_decl) = 1;
a1ab4c31 2404
5225a138
EB
2405 /* For an external constant whose initializer is not absolute, do not emit
2406 debug info. In DWARF this would mean a global relocation in a read-only
c01fe451 2407 section which runs afoul of the PE-COFF run-time relocation mechanism. */
5225a138
EB
2408 if (extern_flag
2409 && constant_p
50179d58 2410 && var_init
5225a138 2411 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
5fe48b3d 2412 != null_pointer_node)
5225a138
EB
2413 DECL_IGNORED_P (var_decl) = 1;
2414
5fe48b3d
EB
2415 if (TYPE_VOLATILE (type))
2416 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2417
a1ab4c31
AC
2418 if (TREE_SIDE_EFFECTS (var_decl))
2419 TREE_ADDRESSABLE (var_decl) = 1;
2420
74746d49
EB
2421 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2422 if (TREE_CODE (var_decl) == VAR_DECL)
2423 process_attributes (&var_decl, &attr_list, true, gnat_node);
2424
2425 /* Add this decl to the current binding level. */
2426 gnat_pushdecl (var_decl, gnat_node);
2427
58c8f770 2428 if (TREE_CODE (var_decl) == VAR_DECL)
a1ab4c31 2429 {
58c8f770 2430 if (asm_name)
2b50232a
EB
2431 {
2432 /* Let the target mangle the name if this isn't a verbatim asm. */
2433 if (*IDENTIFIER_POINTER (asm_name) != '*')
2434 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2435
2436 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2437 }
74746d49 2438
a1ab4c31
AC
2439 if (global_bindings_p ())
2440 rest_of_decl_compilation (var_decl, true, 0);
2441 }
a1ab4c31
AC
2442
2443 return var_decl;
2444}
2445\f
2446/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2447
2448static bool
2449aggregate_type_contains_array_p (tree type)
2450{
2451 switch (TREE_CODE (type))
2452 {
2453 case RECORD_TYPE:
2454 case UNION_TYPE:
2455 case QUAL_UNION_TYPE:
2456 {
2457 tree field;
910ad8de 2458 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2459 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2460 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2461 return true;
2462 return false;
2463 }
2464
2465 case ARRAY_TYPE:
2466 return true;
2467
2468 default:
2469 gcc_unreachable ();
2470 }
2471}
2472
62f9f3ce 2473/* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
da01bfee
EB
2474 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2475 nonzero, it is the specified size of the field. If POS is nonzero, it is
2476 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2477 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2478 means we are allowed to take the address of the field; if it is negative,
2479 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2480
2481tree
2482create_field_decl (tree field_name, tree field_type, tree record_type,
da01bfee 2483 tree size, tree pos, int packed, int addressable)
a1ab4c31 2484{
c172df28
AH
2485 tree field_decl = build_decl (input_location,
2486 FIELD_DECL, field_name, field_type);
a1ab4c31
AC
2487
2488 DECL_CONTEXT (field_decl) = record_type;
2489 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2490
2491 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2492 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2493 Likewise for an aggregate without specified position that contains an
2494 array, because in this case slices of variable length of this array
2495 must be handled by GCC and variable-sized objects need to be aligned
2496 to at least a byte boundary. */
2497 if (packed && (TYPE_MODE (field_type) == BLKmode
2498 || (!pos
2499 && AGGREGATE_TYPE_P (field_type)
2500 && aggregate_type_contains_array_p (field_type))))
2501 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2502
2503 /* If a size is specified, use it. Otherwise, if the record type is packed
2504 compute a size to use, which may differ from the object's natural size.
2505 We always set a size in this case to trigger the checks for bitfield
2506 creation below, which is typically required when no position has been
2507 specified. */
2508 if (size)
2509 size = convert (bitsizetype, size);
2510 else if (packed == 1)
2511 {
2512 size = rm_size (field_type);
62f9f3ce
EB
2513 if (TYPE_MODE (field_type) == BLKmode)
2514 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2515 }
2516
2517 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2518 specified for two reasons: first if the size differs from the natural
2519 size. Second, if the alignment is insufficient. There are a number of
2520 ways the latter can be true.
2521
2522 We never make a bitfield if the type of the field has a nonconstant size,
2523 because no such entity requiring bitfield operations should reach here.
2524
2525 We do *preventively* make a bitfield when there might be the need for it
2526 but we don't have all the necessary information to decide, as is the case
2527 of a field with no specified position in a packed record.
2528
2529 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2530 in layout_decl or finish_record_type to clear the bit_field indication if
2531 it is in fact not needed. */
2532 if (addressable >= 0
2533 && size
2534 && TREE_CODE (size) == INTEGER_CST
2535 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2536 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2537 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2538 || packed
2539 || (TYPE_ALIGN (record_type) != 0
2540 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2541 {
2542 DECL_BIT_FIELD (field_decl) = 1;
2543 DECL_SIZE (field_decl) = size;
2544 if (!packed && !pos)
feec4372
EB
2545 {
2546 if (TYPE_ALIGN (record_type) != 0
2547 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2548 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2549 else
2550 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2551 }
a1ab4c31
AC
2552 }
2553
2554 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2555
2556 /* Bump the alignment if need be, either for bitfield/packing purposes or
2557 to satisfy the type requirements if no such consideration applies. When
2558 we get the alignment from the type, indicate if this is from an explicit
2559 user request, which prevents stor-layout from lowering it later on. */
2560 {
d9223014 2561 unsigned int bit_align
a1ab4c31
AC
2562 = (DECL_BIT_FIELD (field_decl) ? 1
2563 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2564
2565 if (bit_align > DECL_ALIGN (field_decl))
2566 DECL_ALIGN (field_decl) = bit_align;
2567 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2568 {
2569 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2570 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2571 }
2572 }
2573
2574 if (pos)
2575 {
2576 /* We need to pass in the alignment the DECL is known to have.
2577 This is the lowest-order bit set in POS, but no more than
2578 the alignment of the record, if one is specified. Note
2579 that an alignment of 0 is taken as infinite. */
2580 unsigned int known_align;
2581
cc269bb6 2582 if (tree_fits_uhwi_p (pos))
ae7e9ddd 2583 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
a1ab4c31
AC
2584 else
2585 known_align = BITS_PER_UNIT;
2586
2587 if (TYPE_ALIGN (record_type)
2588 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2589 known_align = TYPE_ALIGN (record_type);
2590
2591 layout_decl (field_decl, known_align);
2592 SET_DECL_OFFSET_ALIGN (field_decl,
cc269bb6 2593 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
a1ab4c31
AC
2594 : BITS_PER_UNIT);
2595 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2596 &DECL_FIELD_BIT_OFFSET (field_decl),
2597 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2598 }
2599
2600 /* In addition to what our caller says, claim the field is addressable if we
2601 know that its type is not suitable.
2602
2603 The field may also be "technically" nonaddressable, meaning that even if
2604 we attempt to take the field's address we will actually get the address
2605 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2606 value we have at this point is not accurate enough, so we don't account
2607 for this here and let finish_record_type decide. */
4c5a0615 2608 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
2609 addressable = 1;
2610
2611 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2612
2613 return field_decl;
2614}
2615\f
a8e05f92
EB
2616/* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2617 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2618 (either an In parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2619
2620tree
2621create_param_decl (tree param_name, tree param_type, bool readonly)
2622{
c172df28
AH
2623 tree param_decl = build_decl (input_location,
2624 PARM_DECL, param_name, param_type);
a1ab4c31 2625
a8e05f92
EB
2626 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2627 can lead to various ABI violations. */
2628 if (targetm.calls.promote_prototypes (NULL_TREE)
2629 && INTEGRAL_TYPE_P (param_type)
a1ab4c31
AC
2630 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2631 {
2632 /* We have to be careful about biased types here. Make a subtype
2633 of integer_type_node with the proper biasing. */
2634 if (TREE_CODE (param_type) == INTEGER_TYPE
2635 && TYPE_BIASED_REPRESENTATION_P (param_type))
2636 {
84fb43a1
EB
2637 tree subtype
2638 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2639 TREE_TYPE (subtype) = integer_type_node;
2640 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
84fb43a1
EB
2641 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2642 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
c1abd261 2643 param_type = subtype;
a1ab4c31
AC
2644 }
2645 else
2646 param_type = integer_type_node;
2647 }
2648
2649 DECL_ARG_TYPE (param_decl) = param_type;
2650 TREE_READONLY (param_decl) = readonly;
2651 return param_decl;
2652}
2653\f
74746d49
EB
2654/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2655 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2656 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2657
74746d49
EB
2658void
2659process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2660 Node_Id gnat_node)
a1ab4c31 2661{
74746d49
EB
2662 struct attrib *attr;
2663
2664 for (attr = *attr_list; attr; attr = attr->next)
2665 switch (attr->type)
a1ab4c31
AC
2666 {
2667 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2668 Sloc_to_locus (Sloc (gnat_node), &input_location);
2669 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2670 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2671 break;
2672
2673 case ATTR_LINK_ALIAS:
74746d49 2674 if (!DECL_EXTERNAL (*node))
a1ab4c31 2675 {
74746d49
EB
2676 TREE_STATIC (*node) = 1;
2677 assemble_alias (*node, attr->name);
a1ab4c31
AC
2678 }
2679 break;
2680
2681 case ATTR_WEAK_EXTERNAL:
2682 if (SUPPORTS_WEAK)
74746d49 2683 declare_weak (*node);
a1ab4c31
AC
2684 else
2685 post_error ("?weak declarations not supported on this target",
74746d49 2686 attr->error_point);
a1ab4c31
AC
2687 break;
2688
2689 case ATTR_LINK_SECTION:
677f3fa8 2690 if (targetm_common.have_named_sections)
a1ab4c31 2691 {
0ab75824 2692 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
74746d49 2693 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2694 }
2695 else
2696 post_error ("?section attributes are not supported for this target",
74746d49 2697 attr->error_point);
a1ab4c31
AC
2698 break;
2699
2700 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2701 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2702 TREE_USED (*node) = 1;
a1ab4c31
AC
2703 break;
2704
2705 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2706 DECL_STATIC_DESTRUCTOR (*node) = 1;
2707 TREE_USED (*node) = 1;
a1ab4c31 2708 break;
40a14772
TG
2709
2710 case ATTR_THREAD_LOCAL_STORAGE:
56363ffd 2711 set_decl_tls_model (*node, decl_default_tls_model (*node));
74746d49 2712 DECL_COMMON (*node) = 0;
40a14772 2713 break;
a1ab4c31 2714 }
74746d49
EB
2715
2716 *attr_list = NULL;
a1ab4c31 2717}
a1ab4c31
AC
2718
2719/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2720 a power of 2. */
2721
2722bool
2723value_factor_p (tree value, HOST_WIDE_INT factor)
2724{
cc269bb6 2725 if (tree_fits_uhwi_p (value))
ae7e9ddd 2726 return tree_to_uhwi (value) % factor == 0;
a1ab4c31
AC
2727
2728 if (TREE_CODE (value) == MULT_EXPR)
2729 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2730 || value_factor_p (TREE_OPERAND (value, 1), factor));
2731
2732 return false;
2733}
2734
e8fa3dcd
PMR
2735/* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2736 from the parameter association for the instantiation of a generic. We do
2737 not want to emit source location for them: the code generated for their
2738 initialization is likely to disturb debugging. */
2739
2740bool
2741renaming_from_generic_instantiation_p (Node_Id gnat_node)
2742{
2743 if (Nkind (gnat_node) != N_Defining_Identifier
2744 || !IN (Ekind (gnat_node), Object_Kind)
2745 || Comes_From_Source (gnat_node)
2746 || !Present (Renamed_Object (gnat_node)))
2747 return false;
2748
2749 /* Get the object declaration of the renamed object, if any and if the
2750 renamed object is a mere identifier. */
2751 gnat_node = Renamed_Object (gnat_node);
2752 if (Nkind (gnat_node) != N_Identifier)
2753 return false;
2754
2755 gnat_node = Entity (gnat_node);
2756 if (!Present (Parent (gnat_node)))
2757 return false;
2758
2759 gnat_node = Parent (gnat_node);
2760 return
2761 (Present (gnat_node)
2762 && Nkind (gnat_node) == N_Object_Declaration
2763 && Present (Corresponding_Generic_Association (gnat_node)));
2764}
2765
9a30c7c4
AC
2766/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2767 feed it with the elaboration of GNAT_SCOPE. */
2768
2769static struct deferred_decl_context_node *
2770add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2771{
2772 struct deferred_decl_context_node *new_node;
2773
2774 new_node
2775 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2776 new_node->decl = decl;
2777 new_node->gnat_scope = gnat_scope;
2778 new_node->force_global = force_global;
2779 new_node->types.create (1);
2780 new_node->next = deferred_decl_context_queue;
2781 deferred_decl_context_queue = new_node;
2782 return new_node;
2783}
2784
2785/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2786 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2787 computed. */
2788
2789static void
2790add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2791{
2792 n->types.safe_push (type);
2793}
2794
2795/* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2796 NULL_TREE if it is not available. */
2797
2798static tree
2799compute_deferred_decl_context (Entity_Id gnat_scope)
2800{
2801 tree context;
2802
2803 if (present_gnu_tree (gnat_scope))
2804 context = get_gnu_tree (gnat_scope);
2805 else
2806 return NULL_TREE;
2807
2808 if (TREE_CODE (context) == TYPE_DECL)
2809 {
2810 const tree context_type = TREE_TYPE (context);
2811
2812 /* Skip dummy types: only the final ones can appear in the context
2813 chain. */
2814 if (TYPE_DUMMY_P (context_type))
2815 return NULL_TREE;
2816
2817 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2818 chain. */
2819 else
2820 context = context_type;
2821 }
2822
2823 return context;
2824}
2825
2826/* Try to process all deferred nodes in the queue. Keep in the queue the ones
2827 that cannot be processed yet, remove the other ones. If FORCE is true,
2828 force the processing for all nodes, use the global context when nodes don't
2829 have a GNU translation. */
2830
2831void
2832process_deferred_decl_context (bool force)
2833{
2834 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2835 struct deferred_decl_context_node *node;
2836
2837 while (*it != NULL)
2838 {
2839 bool processed = false;
2840 tree context = NULL_TREE;
2841 Entity_Id gnat_scope;
2842
2843 node = *it;
2844
2845 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2846 get the first scope. */
2847 gnat_scope = node->gnat_scope;
2848 while (Present (gnat_scope))
2849 {
2850 context = compute_deferred_decl_context (gnat_scope);
2851 if (!force || context != NULL_TREE)
2852 break;
2853 gnat_scope = get_debug_scope (gnat_scope, NULL);
2854 }
2855
2856 /* Imported declarations must not be in a local context (i.e. not inside
2857 a function). */
2858 if (context != NULL_TREE && node->force_global > 0)
2859 {
2860 tree ctx = context;
2861
2862 while (ctx != NULL_TREE)
2863 {
2864 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2865 ctx = (DECL_P (ctx))
2866 ? DECL_CONTEXT (ctx)
2867 : TYPE_CONTEXT (ctx);
2868 }
2869 }
2870
2871 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2872 was no elaborated scope, use the global context. */
2873 if (force && context == NULL_TREE)
2874 context = get_global_context ();
2875
2876 if (context != NULL_TREE)
2877 {
2878 tree t;
2879 int i;
2880
2881 DECL_CONTEXT (node->decl) = context;
2882
2883 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2884 ..._TYPE nodes. */
2885 FOR_EACH_VEC_ELT (node->types, i, t)
2886 {
24d4b3d5 2887 gnat_set_type_context (t, context);
9a30c7c4
AC
2888 }
2889 processed = true;
2890 }
2891
2892 /* If this node has been successfuly processed, remove it from the
2893 queue. Then move to the next node. */
2894 if (processed)
2895 {
2896 *it = node->next;
2897 node->types.release ();
2898 free (node);
2899 }
2900 else
2901 it = &node->next;
2902 }
2903}
2904
2905
5c475ba9
EB
2906/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2907
2908static unsigned int
2909scale_by_factor_of (tree expr, unsigned int value)
2910{
2911 expr = remove_conversions (expr, true);
2912
2913 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2914 corresponding to the number of trailing zeros of the mask. */
2915 if (TREE_CODE (expr) == BIT_AND_EXPR
2916 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2917 {
2918 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2919 unsigned int i = 0;
2920
2921 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2922 {
2923 mask >>= 1;
2924 value *= 2;
2925 i++;
2926 }
2927 }
2928
2929 return value;
2930}
2931
7d7fcb08 2932/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
2933 unless we can prove these 2 fields are laid out in such a way that no gap
2934 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2935 is the distance in bits between the end of PREV_FIELD and the starting
2936 position of CURR_FIELD. It is ignored if null. */
2937
2938static bool
2939potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2940{
2941 /* If this is the first field of the record, there cannot be any gap */
2942 if (!prev_field)
2943 return false;
2944
78df6221 2945 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
2946 time when such a field is not the last field of the record is when
2947 there are other components at fixed positions after it (meaning there
2948 was a rep clause for every field), in which case we don't want the
2949 alignment constraint to override them. */
2950 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2951 return false;
2952
2953 /* If the distance between the end of prev_field and the beginning of
2954 curr_field is constant, then there is a gap if the value of this
2955 constant is not null. */
cc269bb6 2956 if (offset && tree_fits_uhwi_p (offset))
a1ab4c31
AC
2957 return !integer_zerop (offset);
2958
2959 /* If the size and position of the previous field are constant,
2960 then check the sum of this size and position. There will be a gap
2961 iff it is not multiple of the current field alignment. */
cc269bb6
RS
2962 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2963 && tree_fits_uhwi_p (bit_position (prev_field)))
ae7e9ddd
RS
2964 return ((tree_to_uhwi (bit_position (prev_field))
2965 + tree_to_uhwi (DECL_SIZE (prev_field)))
a1ab4c31
AC
2966 % DECL_ALIGN (curr_field) != 0);
2967
2968 /* If both the position and size of the previous field are multiples
2969 of the current field alignment, there cannot be any gap. */
2970 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2971 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2972 return false;
2973
2974 /* Fallback, return that there may be a potential gap */
2975 return true;
2976}
2977
88a94e2b
EB
2978/* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2979 of the decl. */
a1ab4c31
AC
2980
2981tree
88a94e2b 2982create_label_decl (tree label_name, Node_Id gnat_node)
a1ab4c31 2983{
88a94e2b
EB
2984 tree label_decl
2985 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
a1ab4c31 2986
88a94e2b
EB
2987 DECL_MODE (label_decl) = VOIDmode;
2988
2989 /* Add this decl to the current binding level. */
2990 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
2991
2992 return label_decl;
2993}
2994\f
7d7fcb08 2995/* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
a1ab4c31
AC
2996 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2997 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
7d76717d 2998 PARM_DECL nodes chained through the DECL_CHAIN field).
a1ab4c31 2999
0e24192c 3000 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
7d7fcb08
EB
3001 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
3002 used for the position of the decl. */
a1ab4c31
AC
3003
3004tree
7d7fcb08 3005create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
0e24192c
EB
3006 tree param_decl_list, enum inline_status_t inline_status,
3007 bool public_flag, bool extern_flag, bool artificial_flag,
7d7fcb08 3008 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 3009{
d47d0a8d
EB
3010 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3011 subprog_type);
3012 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3013 TREE_TYPE (subprog_type));
7d7fcb08 3014 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 3015
7d7fcb08
EB
3016 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3017 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
3018
3019 switch (inline_status)
3020 {
3021 case is_suppressed:
3022 DECL_UNINLINABLE (subprog_decl) = 1;
3023 break;
3024
3025 case is_disabled:
3026 break;
3027
f087ea44
AC
3028 case is_required:
3029 if (Back_End_Inlining)
1eb58520
AC
3030 decl_attributes (&subprog_decl,
3031 tree_cons (get_identifier ("always_inline"),
3032 NULL_TREE, NULL_TREE),
3033 ATTR_FLAG_TYPE_IN_PLACE);
3034
f087ea44
AC
3035 /* ... fall through ... */
3036
0e24192c
EB
3037 case is_enabled:
3038 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3039 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3040 break;
3041
3042 default:
3043 gcc_unreachable ();
3044 }
7d7fcb08
EB
3045
3046 TREE_PUBLIC (subprog_decl) = public_flag;
3047 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
a1ab4c31
AC
3048 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3049 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
a1ab4c31 3050
d47d0a8d
EB
3051 DECL_ARTIFICIAL (result_decl) = 1;
3052 DECL_IGNORED_P (result_decl) = 1;
3053 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3054 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 3055
2b50232a
EB
3056 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3057
3058 /* Add this decl to the current binding level. */
3059 gnat_pushdecl (subprog_decl, gnat_node);
3060
a1ab4c31
AC
3061 if (asm_name)
3062 {
2b50232a
EB
3063 /* Let the target mangle the name if this isn't a verbatim asm. */
3064 if (*IDENTIFIER_POINTER (asm_name) != '*')
3065 asm_name = targetm.mangle_decl_assembler_name (subprog_decl, asm_name);
3066
a1ab4c31
AC
3067 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3068
3069 /* The expand_main_function circuitry expects "main_identifier_node" to
3070 designate the DECL_NAME of the 'main' entry point, in turn expected
3071 to be declared as the "main" function literally by default. Ada
3072 program entry points are typically declared with a different name
3073 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 3074 system expectations. Force main_identifier_node in this case. */
a1ab4c31 3075 if (asm_name == main_identifier_node)
cfbb663c 3076 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
3077 }
3078
a1ab4c31
AC
3079 /* Output the assembler code and/or RTL for the declaration. */
3080 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3081
3082 return subprog_decl;
3083}
3084\f
3085/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3086 body. This routine needs to be invoked before processing the declarations
3087 appearing in the subprogram. */
3088
3089void
3090begin_subprog_body (tree subprog_decl)
3091{
3092 tree param_decl;
3093
a1ab4c31
AC
3094 announce_function (subprog_decl);
3095
0ae44446
JR
3096 /* This function is being defined. */
3097 TREE_STATIC (subprog_decl) = 1;
3098
e2d13a4a
EB
3099 /* The failure of this assertion will likely come from a wrong context for
3100 the subprogram body, e.g. another procedure for a procedure declared at
3101 library level. */
3102 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3103
58c8f770
EB
3104 current_function_decl = subprog_decl;
3105
a1ab4c31
AC
3106 /* Enter a new binding level and show that all the parameters belong to
3107 this function. */
3108 gnat_pushlevel ();
a09d56d8 3109
a1ab4c31 3110 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 3111 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
3112 DECL_CONTEXT (param_decl) = subprog_decl;
3113
3114 make_decl_rtl (subprog_decl);
a1ab4c31
AC
3115}
3116
71196d4e 3117/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
3118
3119void
a406865a 3120end_subprog_body (tree body)
a1ab4c31
AC
3121{
3122 tree fndecl = current_function_decl;
3123
bd9c7fb9 3124 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
3125 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3126 DECL_INITIAL (fndecl) = current_binding_level->block;
3127 gnat_poplevel ();
3128
a1ab4c31
AC
3129 /* Mark the RESULT_DECL as being in this subprogram. */
3130 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3131
a963da4d
EB
3132 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3133 if (TREE_CODE (body) == BIND_EXPR)
3134 {
3135 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3136 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3137 }
3138
a1ab4c31
AC
3139 DECL_SAVED_TREE (fndecl) = body;
3140
228ee426 3141 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
3142}
3143
3144/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 3145
71196d4e
EB
3146void
3147rest_of_subprog_body_compilation (tree subprog_decl)
3148{
a1ab4c31
AC
3149 /* We cannot track the location of errors past this point. */
3150 error_gnat_node = Empty;
3151
3152 /* If we're only annotating types, don't actually compile this function. */
3153 if (type_annotate_only)
3154 return;
3155
a406865a 3156 /* Dump functions before gimplification. */
71196d4e 3157 dump_function (TDI_original, subprog_decl);
a406865a 3158
228ee426 3159 if (!decl_function_context (subprog_decl))
3dafb85c 3160 cgraph_node::finalize_function (subprog_decl, false);
a1ab4c31
AC
3161 else
3162 /* Register this function with cgraph just far enough to get it
3163 added to our parent's nested function list. */
037e5573 3164 (void) cgraph_node::get_create (subprog_decl);
a1ab4c31
AC
3165}
3166
a1ab4c31
AC
3167tree
3168gnat_builtin_function (tree decl)
3169{
3170 gnat_pushdecl (decl, Empty);
3171 return decl;
3172}
3173
3174/* Return an integer type with the number of bits of precision given by
3175 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3176 it is a signed type. */
3177
3178tree
3179gnat_type_for_size (unsigned precision, int unsignedp)
3180{
3181 tree t;
3182 char type_name[20];
3183
3184 if (precision <= 2 * MAX_BITS_PER_WORD
3185 && signed_and_unsigned_types[precision][unsignedp])
3186 return signed_and_unsigned_types[precision][unsignedp];
3187
3188 if (unsignedp)
3189 t = make_unsigned_type (precision);
3190 else
3191 t = make_signed_type (precision);
3192
3193 if (precision <= 2 * MAX_BITS_PER_WORD)
3194 signed_and_unsigned_types[precision][unsignedp] = t;
3195
3196 if (!TYPE_NAME (t))
3197 {
c3d79c60 3198 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
3199 TYPE_NAME (t) = get_identifier (type_name);
3200 }
3201
3202 return t;
3203}
3204
3205/* Likewise for floating-point types. */
3206
3207static tree
ef4bddc2 3208float_type_for_precision (int precision, machine_mode mode)
a1ab4c31
AC
3209{
3210 tree t;
3211 char type_name[20];
3212
3213 if (float_types[(int) mode])
3214 return float_types[(int) mode];
3215
3216 float_types[(int) mode] = t = make_node (REAL_TYPE);
3217 TYPE_PRECISION (t) = precision;
3218 layout_type (t);
3219
3220 gcc_assert (TYPE_MODE (t) == mode);
3221 if (!TYPE_NAME (t))
3222 {
3223 sprintf (type_name, "FLOAT_%d", precision);
3224 TYPE_NAME (t) = get_identifier (type_name);
3225 }
3226
3227 return t;
3228}
3229
3230/* Return a data type that has machine mode MODE. UNSIGNEDP selects
3231 an unsigned type; otherwise a signed type is returned. */
3232
3233tree
ef4bddc2 3234gnat_type_for_mode (machine_mode mode, int unsignedp)
a1ab4c31
AC
3235{
3236 if (mode == BLKmode)
3237 return NULL_TREE;
2799d18c
EB
3238
3239 if (mode == VOIDmode)
a1ab4c31 3240 return void_type_node;
2799d18c
EB
3241
3242 if (COMPLEX_MODE_P (mode))
a1ab4c31 3243 return NULL_TREE;
2799d18c
EB
3244
3245 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 3246 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
3247
3248 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 3249 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
3250
3251 if (VECTOR_MODE_P (mode))
3252 {
ef4bddc2 3253 machine_mode inner_mode = GET_MODE_INNER (mode);
2799d18c
EB
3254 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3255 if (inner_type)
3256 return build_vector_type_for_mode (inner_type, mode);
3257 }
3258
3259 return NULL_TREE;
a1ab4c31
AC
3260}
3261
3262/* Return the unsigned version of a TYPE_NODE, a scalar type. */
3263
3264tree
3265gnat_unsigned_type (tree type_node)
3266{
3267 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3268
3269 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3270 {
3271 type = copy_node (type);
3272 TREE_TYPE (type) = type_node;
3273 }
3274 else if (TREE_TYPE (type_node)
3275 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3276 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3277 {
3278 type = copy_node (type);
3279 TREE_TYPE (type) = TREE_TYPE (type_node);
3280 }
3281
3282 return type;
3283}
3284
3285/* Return the signed version of a TYPE_NODE, a scalar type. */
3286
3287tree
3288gnat_signed_type (tree type_node)
3289{
3290 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3291
3292 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3293 {
3294 type = copy_node (type);
3295 TREE_TYPE (type) = type_node;
3296 }
3297 else if (TREE_TYPE (type_node)
3298 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3299 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3300 {
3301 type = copy_node (type);
3302 TREE_TYPE (type) = TREE_TYPE (type_node);
3303 }
3304
3305 return type;
3306}
3307
3308/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3309 transparently converted to each other. */
3310
3311int
3312gnat_types_compatible_p (tree t1, tree t2)
3313{
3314 enum tree_code code;
3315
3316 /* This is the default criterion. */
3317 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3318 return 1;
3319
3320 /* We only check structural equivalence here. */
3321 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3322 return 0;
3323
7948ae37
OH
3324 /* Vector types are also compatible if they have the same number of subparts
3325 and the same form of (scalar) element type. */
3326 if (code == VECTOR_TYPE
3327 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3328 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3329 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3330 return 1;
3331
cfa0bd19 3332 /* Array types are also compatible if they are constrained and have the same
96bba5e6 3333 domain(s) and the same component type. */
a1ab4c31 3334 if (code == ARRAY_TYPE
0adef32b
JJ
3335 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3336 || (TYPE_DOMAIN (t1)
b4680ca1 3337 && TYPE_DOMAIN (t2)
0adef32b
JJ
3338 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3339 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3340 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 3341 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
3342 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3343 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3344 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
a1ab4c31
AC
3345 return 1;
3346
a1ab4c31
AC
3347 return 0;
3348}
523e82a7 3349
71196d4e
EB
3350/* Return true if EXPR is a useless type conversion. */
3351
3352bool
3353gnat_useless_type_conversion (tree expr)
3354{
3355 if (CONVERT_EXPR_P (expr)
3356 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3357 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3358 return gnat_types_compatible_p (TREE_TYPE (expr),
3359 TREE_TYPE (TREE_OPERAND (expr, 0)));
3360
3361 return false;
3362}
3363
523e82a7
EB
3364/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3365
3366bool
3367fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3368 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3369{
3370 return TYPE_CI_CO_LIST (t) == cico_list
3371 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3372 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3373 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3374}
a1ab4c31
AC
3375\f
3376/* EXP is an expression for the size of an object. If this size contains
3377 discriminant references, replace them with the maximum (if MAX_P) or
3378 minimum (if !MAX_P) possible value of the discriminant. */
3379
3380tree
3381max_size (tree exp, bool max_p)
3382{
3383 enum tree_code code = TREE_CODE (exp);
3384 tree type = TREE_TYPE (exp);
3385
3386 switch (TREE_CODE_CLASS (code))
3387 {
3388 case tcc_declaration:
3389 case tcc_constant:
3390 return exp;
3391
3392 case tcc_vl_exp:
3393 if (code == CALL_EXPR)
3394 {
f82a627c
EB
3395 tree t, *argarray;
3396 int n, i;
3397
3398 t = maybe_inline_call_in_expr (exp);
3399 if (t)
3400 return max_size (t, max_p);
a1ab4c31 3401
f82a627c
EB
3402 n = call_expr_nargs (exp);
3403 gcc_assert (n > 0);
2bb1fc26 3404 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3405 for (i = 0; i < n; i++)
3406 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3407 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3408 }
3409 break;
3410
3411 case tcc_reference:
3412 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3413 modify. Otherwise, we treat it like a variable. */
1eb58520
AC
3414 if (CONTAINS_PLACEHOLDER_P (exp))
3415 {
3416 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3417 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3418 return max_size (convert (get_base_type (val_type), val), true);
3419 }
a1ab4c31 3420
1eb58520 3421 return exp;
a1ab4c31
AC
3422
3423 case tcc_comparison:
3424 return max_p ? size_one_node : size_zero_node;
3425
3426 case tcc_unary:
ce3da0d0
EB
3427 if (code == NON_LVALUE_EXPR)
3428 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3429
ce3da0d0
EB
3430 return fold_build1 (code, type,
3431 max_size (TREE_OPERAND (exp, 0),
3432 code == NEGATE_EXPR ? !max_p : max_p));
3433
a1ab4c31 3434 case tcc_binary:
ce3da0d0
EB
3435 {
3436 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3437 tree rhs = max_size (TREE_OPERAND (exp, 1),
3438 code == MINUS_EXPR ? !max_p : max_p);
3439
3440 /* Special-case wanting the maximum value of a MIN_EXPR.
3441 In that case, if one side overflows, return the other. */
3442 if (max_p && code == MIN_EXPR)
3443 {
3444 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3445 return lhs;
3446
3447 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3448 return rhs;
3449 }
3450
3451 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3452 overflowing and the RHS a variable. */
3453 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3454 && TREE_CODE (lhs) == INTEGER_CST
3455 && TREE_OVERFLOW (lhs)
396e67d2 3456 && TREE_CODE (rhs) != INTEGER_CST)
ce3da0d0
EB
3457 return lhs;
3458
396e67d2
EB
3459 /* If we are going to subtract a "negative" value in an unsigned type,
3460 do the operation as an addition of the negated value, in order to
3461 avoid creating a spurious overflow below. */
3462 if (code == MINUS_EXPR
3463 && TYPE_UNSIGNED (type)
3464 && TREE_CODE (rhs) == INTEGER_CST
3465 && !TREE_OVERFLOW (rhs)
3466 && tree_int_cst_sign_bit (rhs) != 0)
3467 {
3468 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3469 code = PLUS_EXPR;
3470 }
3471
3472 /* We need to detect overflows so we call size_binop here. */
ce3da0d0
EB
3473 return size_binop (code, lhs, rhs);
3474 }
3475
a1ab4c31
AC
3476 case tcc_expression:
3477 switch (TREE_CODE_LENGTH (code))
3478 {
3479 case 1:
722356ce
EB
3480 if (code == SAVE_EXPR)
3481 return exp;
ce3da0d0
EB
3482
3483 return fold_build1 (code, type,
3484 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3485
3486 case 2:
3487 if (code == COMPOUND_EXPR)
3488 return max_size (TREE_OPERAND (exp, 1), max_p);
3489
ce3da0d0
EB
3490 return fold_build2 (code, type,
3491 max_size (TREE_OPERAND (exp, 0), max_p),
3492 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3493
3494 case 3:
722356ce 3495 if (code == COND_EXPR)
a1ab4c31
AC
3496 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3497 max_size (TREE_OPERAND (exp, 1), max_p),
3498 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3499
3500 default:
3501 break;
a1ab4c31
AC
3502 }
3503
3504 /* Other tree classes cannot happen. */
3505 default:
3506 break;
3507 }
3508
3509 gcc_unreachable ();
3510}
3511\f
3512/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3513 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3514 Return a constructor for the template. */
3515
3516tree
3517build_template (tree template_type, tree array_type, tree expr)
3518{
9771b263 3519 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3520 tree bound_list = NULL_TREE;
3521 tree field;
3522
3523 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3524 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3525 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3526 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3527
3528 if (TREE_CODE (array_type) == ARRAY_TYPE
3529 || (TREE_CODE (array_type) == INTEGER_TYPE
3530 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3531 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3532
3533 /* First make the list for a CONSTRUCTOR for the template. Go down the
3534 field list of the template instead of the type chain because this
3535 array might be an Ada array of arrays and we can't tell where the
3536 nested arrays stop being the underlying object. */
3537
3538 for (field = TYPE_FIELDS (template_type); field;
3539 (bound_list
3540 ? (bound_list = TREE_CHAIN (bound_list))
3541 : (array_type = TREE_TYPE (array_type))),
910ad8de 3542 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3543 {
3544 tree bounds, min, max;
3545
3546 /* If we have a bound list, get the bounds from there. Likewise
3547 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3548 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3549 This will give us a maximum range. */
3550 if (bound_list)
3551 bounds = TREE_VALUE (bound_list);
3552 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3553 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3554 else if (expr && TREE_CODE (expr) == PARM_DECL
3555 && DECL_BY_COMPONENT_PTR_P (expr))
3556 bounds = TREE_TYPE (field);
3557 else
3558 gcc_unreachable ();
3559
3560 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3561 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3562
3563 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3564 substitute it from OBJECT. */
3565 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3566 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3567
0e228dd9 3568 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3569 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3570 }
3571
0e228dd9 3572 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3573}
3574\f
e63b36bd
EB
3575/* Return true if TYPE is suitable for the element type of a vector. */
3576
3577static bool
3578type_for_vector_element_p (tree type)
3579{
ef4bddc2 3580 machine_mode mode;
e63b36bd
EB
3581
3582 if (!INTEGRAL_TYPE_P (type)
3583 && !SCALAR_FLOAT_TYPE_P (type)
3584 && !FIXED_POINT_TYPE_P (type))
3585 return false;
3586
3587 mode = TYPE_MODE (type);
3588 if (GET_MODE_CLASS (mode) != MODE_INT
3589 && !SCALAR_FLOAT_MODE_P (mode)
3590 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3591 return false;
3592
3593 return true;
3594}
3595
3596/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3597 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3598 attribute declaration and want to issue error messages on failure. */
3599
3600static tree
3601build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3602{
3603 unsigned HOST_WIDE_INT size_int, inner_size_int;
3604 int nunits;
3605
3606 /* Silently punt on variable sizes. We can't make vector types for them,
3607 need to ignore them on front-end generated subtypes of unconstrained
3608 base types, and this attribute is for binding implementors, not end
3609 users, so we should never get there from legitimate explicit uses. */
3610 if (!tree_fits_uhwi_p (size))
3611 return NULL_TREE;
3612 size_int = tree_to_uhwi (size);
3613
3614 if (!type_for_vector_element_p (inner_type))
3615 {
3616 if (attribute)
3617 error ("invalid element type for attribute %qs",
3618 IDENTIFIER_POINTER (attribute));
3619 return NULL_TREE;
3620 }
3621 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3622
3623 if (size_int % inner_size_int)
3624 {
3625 if (attribute)
3626 error ("vector size not an integral multiple of component size");
3627 return NULL_TREE;
3628 }
3629
3630 if (size_int == 0)
3631 {
3632 if (attribute)
3633 error ("zero vector size");
3634 return NULL_TREE;
3635 }
3636
3637 nunits = size_int / inner_size_int;
3638 if (nunits & (nunits - 1))
3639 {
3640 if (attribute)
3641 error ("number of components of vector not a power of two");
3642 return NULL_TREE;
3643 }
3644
3645 return build_vector_type (inner_type, nunits);
3646}
3647
3648/* Return a vector type whose representative array type is ARRAY_TYPE, or
3649 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3650 processing the attribute and want to issue error messages on failure. */
3651
3652static tree
3653build_vector_type_for_array (tree array_type, tree attribute)
3654{
3655 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3656 TYPE_SIZE_UNIT (array_type),
3657 attribute);
3658 if (!vector_type)
3659 return NULL_TREE;
3660
3661 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3662 return vector_type;
3663}
3664\f
928dfa4b
EB
3665/* Build a type to be used to represent an aliased object whose nominal type
3666 is an unconstrained array. This consists of a RECORD_TYPE containing a
3667 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3668 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3669 an arbitrary unconstrained object. Use NAME as the name of the record.
3670 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
3671
3672tree
928dfa4b
EB
3673build_unc_object_type (tree template_type, tree object_type, tree name,
3674 bool debug_info_p)
a1ab4c31 3675{
24d4b3d5 3676 tree decl;
a1ab4c31 3677 tree type = make_node (RECORD_TYPE);
da01bfee
EB
3678 tree template_field
3679 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3680 NULL_TREE, NULL_TREE, 0, 1);
3681 tree array_field
3682 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3683 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
3684
3685 TYPE_NAME (type) = name;
3686 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 3687 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
3688 finish_record_type (type, template_field, 0, true);
3689
3690 /* Declare it now since it will never be declared otherwise. This is
3691 necessary to ensure that its subtrees are properly marked. */
24d4b3d5
AC
3692 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3693
3694 /* template_type will not be used elsewhere than here, so to keep the debug
3695 info clean and in order to avoid scoping issues, make decl its
3696 context. */
3697 gnat_set_type_context (template_type, decl);
a1ab4c31
AC
3698
3699 return type;
3700}
3701
3702/* Same, taking a thin or fat pointer type instead of a template type. */
3703
3704tree
3705build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 3706 tree name, bool debug_info_p)
a1ab4c31
AC
3707{
3708 tree template_type;
3709
315cff15 3710 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
3711
3712 template_type
315cff15 3713 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 3714 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 3715 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
3716
3717 return
3718 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 3719}
a1ab4c31 3720\f
229077b0
EB
3721/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3722 In the normal case this is just two adjustments, but we have more to
3723 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3724
3725void
3726update_pointer_to (tree old_type, tree new_type)
3727{
3728 tree ptr = TYPE_POINTER_TO (old_type);
3729 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 3730 tree t;
a1ab4c31
AC
3731
3732 /* If this is the main variant, process all the other variants first. */
3733 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
3734 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3735 update_pointer_to (t, new_type);
a1ab4c31 3736
229077b0 3737 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3738 if (!ptr && !ref)
3739 return;
3740
3741 /* Merge the old type qualifiers in the new type.
3742
3743 Each old variant has qualifiers for specific reasons, and the new
229077b0 3744 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3745 information grabbed at some point, and merging the two simply unifies
3746 these inputs into the final type description.
3747
3748 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3749 type designating it; after the designated type's freeze, we get here with
3750 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3751 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3752 designated type, because that's what it really is.
3753
229077b0
EB
3754 We might also get here for a non-dummy OLD_TYPE variant with different
3755 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3756 to private record type elaboration (see the comments around the call to
229077b0
EB
3757 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3758 the qualifiers in those cases too, to avoid accidentally discarding the
3759 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3760 new_type
3761 = build_qualified_type (new_type,
3762 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3763
3764 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3765 if (old_type == new_type)
3766 return;
3767
3768 /* Otherwise, first handle the simple case. */
3769 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3770 {
aeecf17c
EB
3771 tree new_ptr, new_ref;
3772
3773 /* If pointer or reference already points to new type, nothing to do.
3774 This can happen as update_pointer_to can be invoked multiple times
3775 on the same couple of types because of the type variants. */
3776 if ((ptr && TREE_TYPE (ptr) == new_type)
3777 || (ref && TREE_TYPE (ref) == new_type))
3778 return;
3779
3780 /* Chain PTR and its variants at the end. */
3781 new_ptr = TYPE_POINTER_TO (new_type);
3782 if (new_ptr)
3783 {
3784 while (TYPE_NEXT_PTR_TO (new_ptr))
3785 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3786 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3787 }
3788 else
3789 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 3790
aeecf17c 3791 /* Now adjust them. */
a1ab4c31 3792 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 3793 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
3794 {
3795 TREE_TYPE (t) = new_type;
3796 if (TYPE_NULL_BOUNDS (t))
3797 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3798 }
de9528f0 3799
aeecf17c
EB
3800 /* Chain REF and its variants at the end. */
3801 new_ref = TYPE_REFERENCE_TO (new_type);
3802 if (new_ref)
3803 {
3804 while (TYPE_NEXT_REF_TO (new_ref))
3805 new_ref = TYPE_NEXT_REF_TO (new_ref);
3806 TYPE_NEXT_REF_TO (new_ref) = ref;
3807 }
3808 else
3809 TYPE_REFERENCE_TO (new_type) = ref;
3810
3811 /* Now adjust them. */
a1ab4c31 3812 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
3813 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3814 TREE_TYPE (t) = new_type;
de9528f0
EB
3815
3816 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 3817 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3818 }
3819
aeecf17c
EB
3820 /* Now deal with the unconstrained array case. In this case the pointer
3821 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
3822 Turn them into pointers to the correct types using update_pointer_to.
3823 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
3824 else
3825 {
e3edbd56 3826 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
3827
3828 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3829
e3edbd56 3830 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
3831 since update_pointer_to can be invoked multiple times on the same
3832 couple of types because of the type variants. */
3833 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3834 return;
3835
a1ab4c31 3836 update_pointer_to
e3edbd56
EB
3837 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3838 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 3839
a1ab4c31 3840 update_pointer_to
e3edbd56
EB
3841 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3842 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 3843
e3edbd56
EB
3844 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3845 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 3846
e3edbd56 3847 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3848 }
3849}
3850\f
8df2e902
EB
3851/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3852 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3853
3854static tree
3855convert_to_fat_pointer (tree type, tree expr)
3856{
910ad8de 3857 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 3858 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3859 tree etype = TREE_TYPE (expr);
88293f03 3860 tree template_addr;
9771b263
DN
3861 vec<constructor_elt, va_gc> *v;
3862 vec_alloc (v, 2);
a1ab4c31 3863
50179d58
EB
3864 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3865 array (compare_fat_pointers ensures that this is the full discriminant)
3866 and a valid pointer to the bounds. This latter property is necessary
3867 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 3868 if (integer_zerop (expr))
0e228dd9 3869 {
50179d58
EB
3870 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3871 tree null_bounds, t;
3872
3873 if (TYPE_NULL_BOUNDS (ptr_template_type))
3874 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3875 else
3876 {
3877 /* The template type can still be dummy at this point so we build an
3878 empty constructor. The middle-end will fill it in with zeros. */
90b4c164 3879 t = build_constructor (template_type, NULL);
50179d58
EB
3880 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3881 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3882 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3883 }
3884
0e228dd9 3885 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
3886 fold_convert (p_array_type, null_pointer_node));
3887 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3888 t = build_constructor (type, v);
3889 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3890 TREE_CONSTANT (t) = 0;
3891 TREE_STATIC (t) = 1;
3892
3893 return t;
0e228dd9 3894 }
a1ab4c31 3895
0d7de0e1
EB
3896 /* If EXPR is a thin pointer, make template and data from the record. */
3897 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 3898 {
0d7de0e1 3899 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 3900
7d7a1fe8 3901 expr = gnat_protect_expr (expr);
88293f03
EB
3902
3903 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3904 the thin pointer value has been shifted so we shift it back to get
3905 the template address. */
3906 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
2b45154d 3907 {
88293f03
EB
3908 template_addr
3909 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3910 fold_build1 (NEGATE_EXPR, sizetype,
3911 byte_position
3912 (DECL_CHAIN (field))));
3913 template_addr
3914 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3915 template_addr);
2b45154d 3916 }
a1ab4c31 3917
88293f03
EB
3918 /* Otherwise we explicitly take the address of the fields. */
3919 else
3920 {
3921 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3922 template_addr
3923 = build_unary_op (ADDR_EXPR, NULL_TREE,
3924 build_component_ref (expr, NULL_TREE, field,
3925 false));
3926 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3927 build_component_ref (expr, NULL_TREE,
3928 DECL_CHAIN (field),
3929 false));
3930 }
a1ab4c31 3931 }
8df2e902
EB
3932
3933 /* Otherwise, build the constructor for the template. */
a1ab4c31 3934 else
88293f03
EB
3935 template_addr
3936 = build_unary_op (ADDR_EXPR, NULL_TREE,
3937 build_template (template_type, TREE_TYPE (etype),
3938 expr));
a1ab4c31 3939
8df2e902 3940 /* The final result is a constructor for the fat pointer.
a1ab4c31 3941
8df2e902
EB
3942 If EXPR is an argument of a foreign convention subprogram, the type it
3943 points to is directly the component type. In this case, the expression
a1ab4c31 3944 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 3945 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 3946 required, for instance because it ensures that possible later folding of
8df2e902 3947 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
3948 same type as the initial reference.
3949
8df2e902
EB
3950 Note that the call to "build_template" above is still fine because it
3951 will only refer to the provided TEMPLATE_TYPE in this case. */
88293f03
EB
3952 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3953 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
0e228dd9 3954 return gnat_build_constructor (type, v);
a1ab4c31
AC
3955}
3956\f
a1ab4c31
AC
3957/* Create an expression whose value is that of EXPR,
3958 converted to type TYPE. The TREE_TYPE of the value
3959 is always TYPE. This function implements all reasonable
3960 conversions; callers should filter out those that are
3961 not permitted by the language being compiled. */
3962
3963tree
3964convert (tree type, tree expr)
3965{
a1ab4c31
AC
3966 tree etype = TREE_TYPE (expr);
3967 enum tree_code ecode = TREE_CODE (etype);
c34f3839 3968 enum tree_code code = TREE_CODE (type);
a1ab4c31 3969
c34f3839
EB
3970 /* If the expression is already of the right type, we are done. */
3971 if (etype == type)
a1ab4c31
AC
3972 return expr;
3973
3974 /* If both input and output have padding and are of variable size, do this
3975 as an unchecked conversion. Likewise if one is a mere variant of the
3976 other, so we avoid a pointless unpad/repad sequence. */
3977 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 3978 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
3979 && (!TREE_CONSTANT (TYPE_SIZE (type))
3980 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 3981 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
3982 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3983 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3984 ;
3985
f88facfe
EB
3986 /* If the output type has padding, convert to the inner type and make a
3987 constructor to build the record, unless a variable size is involved. */
315cff15 3988 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 3989 {
9771b263 3990 vec<constructor_elt, va_gc> *v;
0e228dd9 3991
a1ab4c31
AC
3992 /* If we previously converted from another type and our type is
3993 of variable size, remove the conversion to avoid the need for
f88facfe 3994 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
3995 original and packable version. */
3996 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3997 && (!TREE_CONSTANT (TYPE_SIZE (type))
3998 || (ecode == RECORD_TYPE
3999 && TYPE_NAME (etype)
4000 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4001 expr = TREE_OPERAND (expr, 0);
4002
4003 /* If we are just removing the padding from expr, convert the original
4004 object if we have variable size in order to avoid the need for some
f88facfe 4005 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4006 of the other, so we avoid a pointless unpad/repad sequence. */
4007 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4008 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4009 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4010 || TYPE_MAIN_VARIANT (type)
4011 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4012 || (ecode == RECORD_TYPE
4013 && TYPE_NAME (etype)
4014 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4015 return convert (type, TREE_OPERAND (expr, 0));
4016
431cfac1
EB
4017 /* If the inner type is of self-referential size and the expression type
4018 is a record, do this as an unchecked conversion. But first pad the
4019 expression if possible to have the same size on both sides. */
c34f3839 4020 if (ecode == RECORD_TYPE
f88facfe 4021 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4022 {
980a0501 4023 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4024 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4025 false, false, false, true),
4026 expr);
431cfac1
EB
4027 return unchecked_convert (type, expr, false);
4028 }
a1ab4c31 4029
f88facfe
EB
4030 /* If we are converting between array types with variable size, do the
4031 final conversion as an unchecked conversion, again to avoid the need
4032 for some variable-sized temporaries. If valid, this conversion is
4033 very likely purely technical and without real effects. */
c34f3839 4034 if (ecode == ARRAY_TYPE
f88facfe
EB
4035 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4036 && !TREE_CONSTANT (TYPE_SIZE (etype))
4037 && !TREE_CONSTANT (TYPE_SIZE (type)))
4038 return unchecked_convert (type,
4039 convert (TREE_TYPE (TYPE_FIELDS (type)),
4040 expr),
4041 false);
4042
9771b263 4043 vec_alloc (v, 1);
0e228dd9
NF
4044 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4045 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4046 return gnat_build_constructor (type, v);
a1ab4c31
AC
4047 }
4048
4049 /* If the input type has padding, remove it and convert to the output type.
4050 The conditions ordering is arranged to ensure that the output type is not
4051 a padding type here, as it is not clear whether the conversion would
4052 always be correct if this was to happen. */
315cff15 4053 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4054 {
4055 tree unpadded;
4056
4057 /* If we have just converted to this padded type, just get the
4058 inner expression. */
2117b9bb
EB
4059 if (TREE_CODE (expr) == CONSTRUCTOR)
4060 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
a1ab4c31
AC
4061
4062 /* Otherwise, build an explicit component reference. */
4063 else
4064 unpadded
4065 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4066
4067 return convert (type, unpadded);
4068 }
4069
4070 /* If the input is a biased type, adjust first. */
4071 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4072 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
1eb58520 4073 fold_convert (TREE_TYPE (etype), expr),
a1ab4c31 4074 fold_convert (TREE_TYPE (etype),
1eb58520 4075 TYPE_MIN_VALUE (etype))));
a1ab4c31
AC
4076
4077 /* If the input is a justified modular type, we need to extract the actual
4078 object before converting it to any other type with the exceptions of an
4079 unconstrained array or of a mere type variant. It is useful to avoid the
4080 extraction and conversion in the type variant case because it could end
4081 up replacing a VAR_DECL expr by a constructor and we might be about the
4082 take the address of the result. */
4083 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4084 && code != UNCONSTRAINED_ARRAY_TYPE
4085 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4086 return convert (type, build_component_ref (expr, NULL_TREE,
4087 TYPE_FIELDS (etype), false));
4088
4089 /* If converting to a type that contains a template, convert to the data
4090 type and then build the template. */
4091 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4092 {
910ad8de 4093 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4094 vec<constructor_elt, va_gc> *v;
4095 vec_alloc (v, 2);
a1ab4c31
AC
4096
4097 /* If the source already has a template, get a reference to the
4098 associated array only, as we are going to rebuild a template
4099 for the target type anyway. */
4100 expr = maybe_unconstrained_array (expr);
4101
0e228dd9
NF
4102 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4103 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4104 obj_type, NULL_TREE));
73a1a803
EB
4105 if (expr)
4106 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4107 convert (obj_type, expr));
0e228dd9 4108 return gnat_build_constructor (type, v);
a1ab4c31
AC
4109 }
4110
a1c7d797 4111 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4112 switch (TREE_CODE (expr))
4113 {
4114 case ERROR_MARK:
4115 return expr;
4116
4117 case NULL_EXPR:
4118 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4119 conversion in gnat_expand_expr. NULL_EXPR does not represent
4120 and actual value, so no conversion is needed. */
4121 expr = copy_node (expr);
4122 TREE_TYPE (expr) = type;
4123 return expr;
4124
4125 case STRING_CST:
4126 /* If we are converting a STRING_CST to another constrained array type,
4127 just make a new one in the proper type. */
4128 if (code == ecode && AGGREGATE_TYPE_P (etype)
4129 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4130 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4131 {
4132 expr = copy_node (expr);
4133 TREE_TYPE (expr) = type;
4134 return expr;
4135 }
4136 break;
4137
7948ae37 4138 case VECTOR_CST:
44e9e3ec 4139 /* If we are converting a VECTOR_CST to a mere type variant, just make
7948ae37
OH
4140 a new one in the proper type. */
4141 if (code == ecode && gnat_types_compatible_p (type, etype))
4142 {
4143 expr = copy_node (expr);
4144 TREE_TYPE (expr) = type;
4145 return expr;
4146 }
4147
a1ab4c31 4148 case CONSTRUCTOR:
44e9e3ec
EB
4149 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4150 another padding type around the same type, just make a new one in
4151 the proper type. */
4152 if (code == ecode
4153 && (gnat_types_compatible_p (type, etype)
4154 || (code == RECORD_TYPE
4155 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4156 && TREE_TYPE (TYPE_FIELDS (type))
4157 == TREE_TYPE (TYPE_FIELDS (etype)))))
a1ab4c31
AC
4158 {
4159 expr = copy_node (expr);
4160 TREE_TYPE (expr) = type;
9771b263 4161 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4162 return expr;
4163 }
4164
cb3d597d
EB
4165 /* Likewise for a conversion between original and packable version, or
4166 conversion between types of the same size and with the same list of
4167 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4168 if (code == ecode
4169 && code == RECORD_TYPE
cb3d597d
EB
4170 && (TYPE_NAME (type) == TYPE_NAME (etype)
4171 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4172
a1ab4c31 4173 {
9771b263
DN
4174 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4175 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4176 vec<constructor_elt, va_gc> *v;
4177 vec_alloc (v, len);
a1ab4c31
AC
4178 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4179 unsigned HOST_WIDE_INT idx;
4180 tree index, value;
4181
db868e1e
OH
4182 /* Whether we need to clear TREE_CONSTANT et al. on the output
4183 constructor when we convert in place. */
4184 bool clear_constant = false;
4185
a1ab4c31
AC
4186 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4187 {
44e9e3ec
EB
4188 /* Skip the missing fields in the CONSTRUCTOR. */
4189 while (efield && field && !SAME_FIELD_P (efield, index))
4190 {
4191 efield = DECL_CHAIN (efield);
4192 field = DECL_CHAIN (field);
4193 }
cb3d597d 4194 /* The field must be the same. */
44e9e3ec 4195 if (!(efield && field && SAME_FIELD_P (efield, field)))
a1ab4c31 4196 break;
44e9e3ec
EB
4197 constructor_elt elt
4198 = {field, convert (TREE_TYPE (field), value)};
9771b263 4199 v->quick_push (elt);
db868e1e
OH
4200
4201 /* If packing has made this field a bitfield and the input
4202 value couldn't be emitted statically any more, we need to
4203 clear TREE_CONSTANT on our output. */
ced57283
EB
4204 if (!clear_constant
4205 && TREE_CONSTANT (expr)
db868e1e
OH
4206 && !CONSTRUCTOR_BITFIELD_P (efield)
4207 && CONSTRUCTOR_BITFIELD_P (field)
4208 && !initializer_constant_valid_for_bitfield_p (value))
4209 clear_constant = true;
4210
910ad8de
NF
4211 efield = DECL_CHAIN (efield);
4212 field = DECL_CHAIN (field);
a1ab4c31
AC
4213 }
4214
db868e1e
OH
4215 /* If we have been able to match and convert all the input fields
4216 to their output type, convert in place now. We'll fallback to a
4217 view conversion downstream otherwise. */
a1ab4c31
AC
4218 if (idx == len)
4219 {
4220 expr = copy_node (expr);
4221 TREE_TYPE (expr) = type;
4222 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4223 if (clear_constant)
ced57283 4224 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4225 return expr;
4226 }
4227 }
7948ae37
OH
4228
4229 /* Likewise for a conversion between array type and vector type with a
4230 compatible representative array. */
4231 else if (code == VECTOR_TYPE
4232 && ecode == ARRAY_TYPE
4233 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4234 etype))
4235 {
9771b263
DN
4236 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4237 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4238 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4239 unsigned HOST_WIDE_INT ix;
4240 tree value;
4241
4242 /* Build a VECTOR_CST from a *constant* array constructor. */
4243 if (TREE_CONSTANT (expr))
4244 {
4245 bool constant_p = true;
4246
4247 /* Iterate through elements and check if all constructor
4248 elements are *_CSTs. */
4249 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4250 if (!CONSTANT_CLASS_P (value))
4251 {
4252 constant_p = false;
4253 break;
4254 }
4255
4256 if (constant_p)
4257 return build_vector_from_ctor (type,
4258 CONSTRUCTOR_ELTS (expr));
4259 }
4260
4261 /* Otherwise, build a regular vector constructor. */
9771b263 4262 vec_alloc (v, len);
7948ae37
OH
4263 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4264 {
f32682ca 4265 constructor_elt elt = {NULL_TREE, value};
9771b263 4266 v->quick_push (elt);
7948ae37
OH
4267 }
4268 expr = copy_node (expr);
4269 TREE_TYPE (expr) = type;
4270 CONSTRUCTOR_ELTS (expr) = v;
4271 return expr;
4272 }
a1ab4c31
AC
4273 break;
4274
4275 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4276 /* First retrieve the underlying array. */
4277 expr = maybe_unconstrained_array (expr);
4278 etype = TREE_TYPE (expr);
4279 ecode = TREE_CODE (etype);
4280 break;
a1ab4c31
AC
4281
4282 case VIEW_CONVERT_EXPR:
4283 {
4284 /* GCC 4.x is very sensitive to type consistency overall, and view
4285 conversions thus are very frequent. Even though just "convert"ing
4286 the inner operand to the output type is fine in most cases, it
4287 might expose unexpected input/output type mismatches in special
4288 circumstances so we avoid such recursive calls when we can. */
4289 tree op0 = TREE_OPERAND (expr, 0);
4290
4291 /* If we are converting back to the original type, we can just
4292 lift the input conversion. This is a common occurrence with
4293 switches back-and-forth amongst type variants. */
4294 if (type == TREE_TYPE (op0))
4295 return op0;
4296
7948ae37
OH
4297 /* Otherwise, if we're converting between two aggregate or vector
4298 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4299 target type in place or to just convert the inner expression. */
4300 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4301 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4302 {
4303 /* If we are converting between mere variants, we can just
4304 substitute the VIEW_CONVERT_EXPR in place. */
4305 if (gnat_types_compatible_p (type, etype))
4306 return build1 (VIEW_CONVERT_EXPR, type, op0);
4307
4308 /* Otherwise, we may just bypass the input view conversion unless
4309 one of the types is a fat pointer, which is handled by
4310 specialized code below which relies on exact type matching. */
315cff15
EB
4311 else if (!TYPE_IS_FAT_POINTER_P (type)
4312 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4313 return convert (type, op0);
4314 }
ad1d36ba
EB
4315
4316 break;
a1ab4c31 4317 }
a1ab4c31 4318
a1ab4c31
AC
4319 default:
4320 break;
4321 }
4322
4323 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4324 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4325 return convert_to_fat_pointer (type, expr);
4326
7948ae37
OH
4327 /* If we are converting between two aggregate or vector types that are mere
4328 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4329 to a vector type from its representative array type. */
4330 else if ((code == ecode
4331 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4332 && gnat_types_compatible_p (type, etype))
4333 || (code == VECTOR_TYPE
4334 && ecode == ARRAY_TYPE
4335 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4336 etype)))
a1ab4c31
AC
4337 return build1 (VIEW_CONVERT_EXPR, type, expr);
4338
76af763d
EB
4339 /* If we are converting between tagged types, try to upcast properly. */
4340 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4341 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4342 {
4343 tree child_etype = etype;
4344 do {
4345 tree field = TYPE_FIELDS (child_etype);
4346 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4347 return build_component_ref (expr, NULL_TREE, field, false);
4348 child_etype = TREE_TYPE (field);
4349 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4350 }
4351
bb1f7929
EB
4352 /* If we are converting from a smaller form of record type back to it, just
4353 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4354 size on both sides. */
4355 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4356 && smaller_form_type_p (etype, type))
4357 {
4358 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4359 false, false, false, true),
4360 expr);
4361 return build1 (VIEW_CONVERT_EXPR, type, expr);
4362 }
4363
a1ab4c31 4364 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4365 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4366 return fold_convert (type, expr);
4367
4368 switch (code)
4369 {
4370 case VOID_TYPE:
4371 return fold_build1 (CONVERT_EXPR, type, expr);
4372
a1ab4c31
AC
4373 case INTEGER_TYPE:
4374 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4375 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4376 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4377 return unchecked_convert (type, expr, false);
4378 else if (TYPE_BIASED_REPRESENTATION_P (type))
4379 return fold_convert (type,
4380 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4381 convert (TREE_TYPE (type), expr),
1eb58520
AC
4382 convert (TREE_TYPE (type),
4383 TYPE_MIN_VALUE (type))));
a1ab4c31
AC
4384
4385 /* ... fall through ... */
4386
4387 case ENUMERAL_TYPE:
01ddebf2 4388 case BOOLEAN_TYPE:
a1ab4c31
AC
4389 /* If we are converting an additive expression to an integer type
4390 with lower precision, be wary of the optimization that can be
4391 applied by convert_to_integer. There are 2 problematic cases:
4392 - if the first operand was originally of a biased type,
4393 because we could be recursively called to convert it
4394 to an intermediate type and thus rematerialize the
4395 additive operator endlessly,
4396 - if the expression contains a placeholder, because an
4397 intermediate conversion that changes the sign could
4398 be inserted and thus introduce an artificial overflow
4399 at compile time when the placeholder is substituted. */
4400 if (code == INTEGER_TYPE
4401 && ecode == INTEGER_TYPE
4402 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4403 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4404 {
4405 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4406
4407 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4408 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4409 || CONTAINS_PLACEHOLDER_P (expr))
4410 return build1 (NOP_EXPR, type, expr);
4411 }
4412
4413 return fold (convert_to_integer (type, expr));
4414
4415 case POINTER_TYPE:
4416 case REFERENCE_TYPE:
0d7de0e1 4417 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4418 for differing offsets from the base pointer, depending on whether
4419 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4420 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4421 {
2b45154d
EB
4422 tree etype_pos
4423 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4424 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4425 : size_zero_node;
4426 tree type_pos
4427 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4428 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4429 : size_zero_node;
4430 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4431
a1ab4c31 4432 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4433 if (integer_zerop (byte_diff))
4434 return expr;
4435
4436 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4437 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4438 }
4439
0d7de0e1
EB
4440 /* If converting fat pointer to normal or thin pointer, get the pointer
4441 to the array and then convert it. */
4442 if (TYPE_IS_FAT_POINTER_P (etype))
86060344
EB
4443 expr
4444 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4445
4446 return fold (convert_to_pointer (type, expr));
4447
4448 case REAL_TYPE:
4449 return fold (convert_to_real (type, expr));
4450
4451 case RECORD_TYPE:
4452 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4453 {
9771b263
DN
4454 vec<constructor_elt, va_gc> *v;
4455 vec_alloc (v, 1);
0e228dd9
NF
4456
4457 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4458 convert (TREE_TYPE (TYPE_FIELDS (type)),
4459 expr));
4460 return gnat_build_constructor (type, v);
4461 }
a1ab4c31
AC
4462
4463 /* ... fall through ... */
4464
4465 case ARRAY_TYPE:
4466 /* In these cases, assume the front-end has validated the conversion.
4467 If the conversion is valid, it will be a bit-wise conversion, so
4468 it can be viewed as an unchecked conversion. */
4469 return unchecked_convert (type, expr, false);
4470
4471 case UNION_TYPE:
4472 /* This is a either a conversion between a tagged type and some
4473 subtype, which we have to mark as a UNION_TYPE because of
4474 overlapping fields or a conversion of an Unchecked_Union. */
4475 return unchecked_convert (type, expr, false);
4476
4477 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4478 /* If the input is a VECTOR_TYPE, convert to the representative
4479 array type first. */
4480 if (ecode == VECTOR_TYPE)
4481 {
4482 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4483 etype = TREE_TYPE (expr);
4484 ecode = TREE_CODE (etype);
4485 }
4486
a1ab4c31
AC
4487 /* If EXPR is a constrained array, take its address, convert it to a
4488 fat pointer, and then dereference it. Likewise if EXPR is a
4489 record containing both a template and a constrained array.
4490 Note that a record representing a justified modular type
4491 always represents a packed constrained array. */
4492 if (ecode == ARRAY_TYPE
4493 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4494 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4495 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4496 return
4497 build_unary_op
4498 (INDIRECT_REF, NULL_TREE,
4499 convert_to_fat_pointer (TREE_TYPE (type),
4500 build_unary_op (ADDR_EXPR,
4501 NULL_TREE, expr)));
4502
4503 /* Do something very similar for converting one unconstrained
4504 array to another. */
4505 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4506 return
4507 build_unary_op (INDIRECT_REF, NULL_TREE,
4508 convert (TREE_TYPE (type),
4509 build_unary_op (ADDR_EXPR,
4510 NULL_TREE, expr)));
4511 else
4512 gcc_unreachable ();
4513
4514 case COMPLEX_TYPE:
4515 return fold (convert_to_complex (type, expr));
4516
4517 default:
4518 gcc_unreachable ();
4519 }
4520}
15bf7d19
EB
4521
4522/* Create an expression whose value is that of EXPR converted to the common
4523 index type, which is sizetype. EXPR is supposed to be in the base type
4524 of the GNAT index type. Calling it is equivalent to doing
4525
4526 convert (sizetype, expr)
4527
4528 but we try to distribute the type conversion with the knowledge that EXPR
4529 cannot overflow in its type. This is a best-effort approach and we fall
4530 back to the above expression as soon as difficulties are encountered.
4531
4532 This is necessary to overcome issues that arise when the GNAT base index
4533 type and the GCC common index type (sizetype) don't have the same size,
4534 which is quite frequent on 64-bit architectures. In this case, and if
4535 the GNAT base index type is signed but the iteration type of the loop has
4536 been forced to unsigned, the loop scalar evolution engine cannot compute
4537 a simple evolution for the general induction variables associated with the
4538 array indices, because it will preserve the wrap-around semantics in the
4539 unsigned type of their "inner" part. As a result, many loop optimizations
4540 are blocked.
4541
4542 The solution is to use a special (basic) induction variable that is at
4543 least as large as sizetype, and to express the aforementioned general
4544 induction variables in terms of this induction variable, eliminating
4545 the problematic intermediate truncation to the GNAT base index type.
4546 This is possible as long as the original expression doesn't overflow
4547 and if the middle-end hasn't introduced artificial overflows in the
4548 course of the various simplification it can make to the expression. */
4549
4550tree
4551convert_to_index_type (tree expr)
4552{
4553 enum tree_code code = TREE_CODE (expr);
4554 tree type = TREE_TYPE (expr);
4555
4556 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4557 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4558 if (TYPE_UNSIGNED (type) || !optimize)
4559 return convert (sizetype, expr);
4560
4561 switch (code)
4562 {
4563 case VAR_DECL:
4564 /* The main effect of the function: replace a loop parameter with its
4565 associated special induction variable. */
4566 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4567 expr = DECL_INDUCTION_VAR (expr);
4568 break;
4569
4570 CASE_CONVERT:
4571 {
4572 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4573 /* Bail out as soon as we suspect some sort of type frobbing. */
4574 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4575 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4576 break;
4577 }
4578
4579 /* ... fall through ... */
4580
4581 case NON_LVALUE_EXPR:
4582 return fold_build1 (code, sizetype,
4583 convert_to_index_type (TREE_OPERAND (expr, 0)));
4584
4585 case PLUS_EXPR:
4586 case MINUS_EXPR:
4587 case MULT_EXPR:
4588 return fold_build2 (code, sizetype,
4589 convert_to_index_type (TREE_OPERAND (expr, 0)),
4590 convert_to_index_type (TREE_OPERAND (expr, 1)));
4591
4592 case COMPOUND_EXPR:
4593 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4594 convert_to_index_type (TREE_OPERAND (expr, 1)));
4595
4596 case COND_EXPR:
4597 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4598 convert_to_index_type (TREE_OPERAND (expr, 1)),
4599 convert_to_index_type (TREE_OPERAND (expr, 2)));
4600
4601 default:
4602 break;
4603 }
4604
4605 return convert (sizetype, expr);
4606}
a1ab4c31
AC
4607\f
4608/* Remove all conversions that are done in EXP. This includes converting
4609 from a padded type or to a justified modular type. If TRUE_ADDRESS
4610 is true, always return the address of the containing object even if
4611 the address is not bit-aligned. */
4612
4613tree
4614remove_conversions (tree exp, bool true_address)
4615{
4616 switch (TREE_CODE (exp))
4617 {
4618 case CONSTRUCTOR:
4619 if (true_address
4620 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4621 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4622 return
2117b9bb 4623 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
a1ab4c31
AC
4624 break;
4625
4626 case COMPONENT_REF:
315cff15 4627 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
4628 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4629 break;
4630
a1ab4c31 4631 CASE_CONVERT:
722356ce
EB
4632 case VIEW_CONVERT_EXPR:
4633 case NON_LVALUE_EXPR:
a1ab4c31
AC
4634 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4635
4636 default:
4637 break;
4638 }
4639
4640 return exp;
4641}
4642\f
4643/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 4644 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
4645 likewise return an expression pointing to the underlying array. */
4646
4647tree
4648maybe_unconstrained_array (tree exp)
4649{
4650 enum tree_code code = TREE_CODE (exp);
1aa291f7 4651 tree type = TREE_TYPE (exp);
a1ab4c31 4652
1aa291f7 4653 switch (TREE_CODE (type))
a1ab4c31
AC
4654 {
4655 case UNCONSTRAINED_ARRAY_TYPE:
4656 if (code == UNCONSTRAINED_ARRAY_REF)
4657 {
7e169899 4658 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
4659 const bool no_trap = TREE_THIS_NOTRAP (exp);
4660
7e169899 4661 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
4662 type = TREE_TYPE (exp);
4663
7e169899
EB
4664 if (TREE_CODE (exp) == COND_EXPR)
4665 {
4666 tree op1
4667 = build_unary_op (INDIRECT_REF, NULL_TREE,
4668 build_component_ref (TREE_OPERAND (exp, 1),
4669 NULL_TREE,
1aa291f7 4670 TYPE_FIELDS (type),
7e169899
EB
4671 false));
4672 tree op2
4673 = build_unary_op (INDIRECT_REF, NULL_TREE,
4674 build_component_ref (TREE_OPERAND (exp, 2),
4675 NULL_TREE,
1aa291f7 4676 TYPE_FIELDS (type),
7e169899
EB
4677 false));
4678
4679 exp = build3 (COND_EXPR,
1aa291f7 4680 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
4681 TREE_OPERAND (exp, 0), op1, op2);
4682 }
4683 else
a1c7d797
EB
4684 {
4685 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4686 build_component_ref (exp, NULL_TREE,
1aa291f7 4687 TYPE_FIELDS (type),
a1c7d797
EB
4688 false));
4689 TREE_READONLY (exp) = read_only;
4690 TREE_THIS_NOTRAP (exp) = no_trap;
4691 }
a1ab4c31
AC
4692 }
4693
4694 else if (code == NULL_EXPR)
1aa291f7
EB
4695 exp = build1 (NULL_EXPR,
4696 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4697 TREE_OPERAND (exp, 0));
4698 break;
a1ab4c31
AC
4699
4700 case RECORD_TYPE:
1aa291f7
EB
4701 /* If this is a padded type and it contains a template, convert to the
4702 unpadded type first. */
4703 if (TYPE_PADDING_P (type)
4704 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4705 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 4706 {
1aa291f7
EB
4707 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4708 type = TREE_TYPE (exp);
4709 }
4710
4711 if (TYPE_CONTAINS_TEMPLATE_P (type))
4712 {
73a1a803
EB
4713 exp = build_simple_component_ref (exp, NULL_TREE,
4714 DECL_CHAIN (TYPE_FIELDS (type)),
4715 false);
1aa291f7
EB
4716
4717 /* If the array type is padded, convert to the unpadded type. */
73a1a803
EB
4718 if (exp && TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4719 exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
a1ab4c31 4720 }
a1ab4c31
AC
4721 break;
4722
4723 default:
4724 break;
4725 }
4726
4727 return exp;
4728}
4729\f
afcea859 4730/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 4731 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
4732
4733static bool
4734can_fold_for_view_convert_p (tree expr)
4735{
4736 tree t1, t2;
4737
4738 /* The folder will fold NOP_EXPRs between integral types with the same
4739 precision (in the middle-end's sense). We cannot allow it if the
4740 types don't have the same precision in the Ada sense as well. */
4741 if (TREE_CODE (expr) != NOP_EXPR)
4742 return true;
4743
4744 t1 = TREE_TYPE (expr);
4745 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4746
4747 /* Defer to the folder for non-integral conversions. */
4748 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4749 return true;
4750
4751 /* Only fold conversions that preserve both precisions. */
4752 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4753 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4754 return true;
4755
4756 return false;
4757}
4758
a1ab4c31 4759/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4760 If NOTRUNC_P is true, truncation operations should be suppressed.
4761
4762 Special care is required with (source or target) integral types whose
4763 precision is not equal to their size, to make sure we fetch or assign
4764 the value bits whose location might depend on the endianness, e.g.
4765
4766 Rmsize : constant := 8;
4767 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4768
4769 type Bit_Array is array (1 .. Rmsize) of Boolean;
4770 pragma Pack (Bit_Array);
4771
4772 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4773
4774 Value : Int := 2#1000_0001#;
4775 Vbits : Bit_Array := To_Bit_Array (Value);
4776
4777 we expect the 8 bits at Vbits'Address to always contain Value, while
4778 their original location depends on the endianness, at Value'Address
84fb43a1 4779 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
4780
4781tree
4782unchecked_convert (tree type, tree expr, bool notrunc_p)
4783{
4784 tree etype = TREE_TYPE (expr);
c34f3839
EB
4785 enum tree_code ecode = TREE_CODE (etype);
4786 enum tree_code code = TREE_CODE (type);
e63b36bd 4787 tree tem;
980a0501 4788 int c;
a1ab4c31 4789
c34f3839 4790 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
4791 if (etype == type)
4792 return expr;
4793
4794 /* If both types types are integral just do a normal conversion.
4795 Likewise for a conversion to an unconstrained array. */
1eb58520 4796 if (((INTEGRAL_TYPE_P (type)
0d7de0e1 4797 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 4798 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
1eb58520 4799 && (INTEGRAL_TYPE_P (etype)
315cff15 4800 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
4801 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4802 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 4803 {
c34f3839 4804 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
4805 {
4806 tree ntype = copy_type (etype);
a1ab4c31
AC
4807 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4808 TYPE_MAIN_VARIANT (ntype) = ntype;
4809 expr = build1 (NOP_EXPR, ntype, expr);
4810 }
4811
c34f3839 4812 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 4813 {
afcea859 4814 tree rtype = copy_type (type);
a1ab4c31
AC
4815 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4816 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4817 expr = convert (rtype, expr);
4818 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 4819 }
afcea859
EB
4820 else
4821 expr = convert (type, expr);
a1ab4c31
AC
4822 }
4823
afcea859 4824 /* If we are converting to an integral type whose precision is not equal
416de7d5
EB
4825 to its size, first unchecked convert to a record type that contains an
4826 field of the given precision. Then extract the field. */
980a0501
EB
4827 else if (INTEGRAL_TYPE_P (type)
4828 && TYPE_RM_SIZE (type)
a1ab4c31
AC
4829 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4830 GET_MODE_BITSIZE (TYPE_MODE (type))))
4831 {
4832 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
4833 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4834 tree field_type, field;
4835
4836 if (TYPE_UNSIGNED (type))
4837 field_type = make_unsigned_type (prec);
4838 else
4839 field_type = make_signed_type (prec);
4840 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4841
4842 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 4843 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 4844
44e9e3ec 4845 finish_record_type (rec_type, field, 1, false);
a1ab4c31
AC
4846
4847 expr = unchecked_convert (rec_type, expr, notrunc_p);
3cd64bab 4848 expr = build_component_ref (expr, NULL_TREE, field, false);
416de7d5 4849 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4850 }
4851
416de7d5
EB
4852 /* Similarly if we are converting from an integral type whose precision is
4853 not equal to its size, first copy into a field of the given precision
4854 and unchecked convert the record type. */
980a0501
EB
4855 else if (INTEGRAL_TYPE_P (etype)
4856 && TYPE_RM_SIZE (etype)
4857 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4858 GET_MODE_BITSIZE (TYPE_MODE (etype))))
a1ab4c31
AC
4859 {
4860 tree rec_type = make_node (RECORD_TYPE);
416de7d5 4861 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
4862 vec<constructor_elt, va_gc> *v;
4863 vec_alloc (v, 1);
416de7d5
EB
4864 tree field_type, field;
4865
4866 if (TYPE_UNSIGNED (etype))
4867 field_type = make_unsigned_type (prec);
4868 else
4869 field_type = make_signed_type (prec);
4870 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4871
4872 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 4873 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 4874
44e9e3ec 4875 finish_record_type (rec_type, field, 1, false);
a1ab4c31 4876
416de7d5 4877 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
4878 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4879 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
4880 expr = unchecked_convert (type, expr, notrunc_p);
4881 }
4882
980a0501
EB
4883 /* If we are converting from a scalar type to a type with a different size,
4884 we need to pad to have the same size on both sides.
4885
4886 ??? We cannot do it unconditionally because unchecked conversions are
4887 used liberally by the front-end to implement polymorphism, e.g. in:
4888
4889 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4890 return p___size__4 (p__object!(S191s.all));
4891
4892 so we skip all expressions that are references. */
4893 else if (!REFERENCE_CLASS_P (expr)
4894 && !AGGREGATE_TYPE_P (etype)
4895 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4896 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4897 {
4898 if (c < 0)
4899 {
4900 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4901 false, false, false, true),
4902 expr);
4903 expr = unchecked_convert (type, expr, notrunc_p);
4904 }
4905 else
4906 {
4907 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4908 false, false, false, true);
4909 expr = unchecked_convert (rec_type, expr, notrunc_p);
4910 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4911 false);
4912 }
4913 }
4914
7948ae37
OH
4915 /* We have a special case when we are converting between two unconstrained
4916 array types. In that case, take the address, convert the fat pointer
4917 types, and dereference. */
c34f3839 4918 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
4919 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4920 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4921 build_unary_op (ADDR_EXPR, NULL_TREE,
4922 expr)));
7948ae37
OH
4923
4924 /* Another special case is when we are converting to a vector type from its
4925 representative array type; this a regular conversion. */
c34f3839
EB
4926 else if (code == VECTOR_TYPE
4927 && ecode == ARRAY_TYPE
7948ae37
OH
4928 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4929 etype))
4930 expr = convert (type, expr);
4931
e63b36bd
EB
4932 /* And, if the array type is not the representative, we try to build an
4933 intermediate vector type of which the array type is the representative
4934 and to do the unchecked conversion between the vector types, in order
4935 to enable further simplifications in the middle-end. */
4936 else if (code == VECTOR_TYPE
4937 && ecode == ARRAY_TYPE
4938 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4939 {
4940 expr = convert (tem, expr);
4941 return unchecked_convert (type, expr, notrunc_p);
4942 }
4943
44e9e3ec
EB
4944 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4945 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4946 else if (TREE_CODE (expr) == CONSTRUCTOR
4947 && code == RECORD_TYPE
4948 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4949 {
4950 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4951 Empty, false, false, false, true),
4952 expr);
4953 return unchecked_convert (type, expr, notrunc_p);
4954 }
4955
4956 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
a1ab4c31
AC
4957 else
4958 {
4959 expr = maybe_unconstrained_array (expr);
4960 etype = TREE_TYPE (expr);
c34f3839 4961 ecode = TREE_CODE (etype);
afcea859
EB
4962 if (can_fold_for_view_convert_p (expr))
4963 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4964 else
4965 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4966 }
4967
afcea859
EB
4968 /* If the result is an integral type whose precision is not equal to its
4969 size, sign- or zero-extend the result. We need not do this if the input
4970 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
4971 is a biased type or if both the input and output are unsigned. */
4972 if (!notrunc_p
4973 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
c34f3839 4974 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
4975 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4976 GET_MODE_BITSIZE (TYPE_MODE (type)))
4977 && !(INTEGRAL_TYPE_P (etype)
4978 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4979 && operand_equal_p (TYPE_RM_SIZE (type),
4980 (TYPE_RM_SIZE (etype) != 0
4981 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4982 0))
4983 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4984 {
c34f3839
EB
4985 tree base_type
4986 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
a1ab4c31
AC
4987 tree shift_expr
4988 = convert (base_type,
4989 size_binop (MINUS_EXPR,
4990 bitsize_int
4991 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4992 TYPE_RM_SIZE (type)));
4993 expr
4994 = convert (type,
4995 build_binary_op (RSHIFT_EXPR, base_type,
4996 build_binary_op (LSHIFT_EXPR, base_type,
4997 convert (base_type, expr),
4998 shift_expr),
4999 shift_expr));
5000 }
5001
5002 /* An unchecked conversion should never raise Constraint_Error. The code
5003 below assumes that GCC's conversion routines overflow the same way that
5004 the underlying hardware does. This is probably true. In the rare case
5005 when it is false, we can rely on the fact that such conversions are
5006 erroneous anyway. */
5007 if (TREE_CODE (expr) == INTEGER_CST)
5008 TREE_OVERFLOW (expr) = 0;
5009
5010 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5011 show no longer constant. */
5012 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5013 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5014 OEP_ONLY_CONST))
5015 TREE_CONSTANT (expr) = 0;
5016
5017 return expr;
5018}
5019\f
feec4372 5020/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5021 the latter being a record type as predicated by Is_Record_Type. */
5022
5023enum tree_code
5024tree_code_for_record_type (Entity_Id gnat_type)
5025{
b1a785fb 5026 Node_Id component_list, component;
a1ab4c31 5027
b1a785fb
EB
5028 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5029 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5030 if (!Is_Unchecked_Union (gnat_type))
5031 return RECORD_TYPE;
5032
b1a785fb
EB
5033 gnat_type = Implementation_Base_Type (gnat_type);
5034 component_list
5035 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5036
a1ab4c31
AC
5037 for (component = First_Non_Pragma (Component_Items (component_list));
5038 Present (component);
5039 component = Next_Non_Pragma (component))
5040 if (Ekind (Defining_Entity (component)) == E_Component)
5041 return RECORD_TYPE;
5042
5043 return UNION_TYPE;
5044}
5045
caa9d12a
EB
5046/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5047 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5048 according to the presence of an alignment clause on the type or, if it
5049 is an array, on the component type. */
5050
5051bool
5052is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5053{
5054 gnat_type = Underlying_Type (gnat_type);
5055
5056 *align_clause = Present (Alignment_Clause (gnat_type));
5057
5058 if (Is_Array_Type (gnat_type))
5059 {
5060 gnat_type = Underlying_Type (Component_Type (gnat_type));
5061 if (Present (Alignment_Clause (gnat_type)))
5062 *align_clause = true;
5063 }
5064
5065 if (!Is_Floating_Point_Type (gnat_type))
5066 return false;
5067
5068 if (UI_To_Int (Esize (gnat_type)) != 64)
5069 return false;
5070
5071 return true;
5072}
5073
5074/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5075 size is greater or equal to 64 bits, or an array of such a type. Set
5076 ALIGN_CLAUSE according to the presence of an alignment clause on the
5077 type or, if it is an array, on the component type. */
5078
5079bool
5080is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5081{
5082 gnat_type = Underlying_Type (gnat_type);
5083
5084 *align_clause = Present (Alignment_Clause (gnat_type));
5085
5086 if (Is_Array_Type (gnat_type))
5087 {
5088 gnat_type = Underlying_Type (Component_Type (gnat_type));
5089 if (Present (Alignment_Clause (gnat_type)))
5090 *align_clause = true;
5091 }
5092
5093 if (!Is_Scalar_Type (gnat_type))
5094 return false;
5095
5096 if (UI_To_Int (Esize (gnat_type)) < 64)
5097 return false;
5098
5099 return true;
5100}
5101
a1ab4c31
AC
5102/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5103 component of an aggregate type. */
5104
5105bool
5106type_for_nonaliased_component_p (tree gnu_type)
5107{
5108 /* If the type is passed by reference, we may have pointers to the
5109 component so it cannot be made non-aliased. */
5110 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5111 return false;
5112
5113 /* We used to say that any component of aggregate type is aliased
5114 because the front-end may take 'Reference of it. The front-end
5115 has been enhanced in the meantime so as to use a renaming instead
5116 in most cases, but the back-end can probably take the address of
5117 such a component too so we go for the conservative stance.
5118
5119 For instance, we might need the address of any array type, even
5120 if normally passed by copy, to construct a fat pointer if the
5121 component is used as an actual for an unconstrained formal.
5122
5123 Likewise for record types: even if a specific record subtype is
5124 passed by copy, the parent type might be passed by ref (e.g. if
5125 it's of variable size) and we might take the address of a child
5126 component to pass to a parent formal. We have no way to check
5127 for such conditions here. */
5128 if (AGGREGATE_TYPE_P (gnu_type))
5129 return false;
5130
5131 return true;
5132}
5133
bb1f7929
EB
5134/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5135
5136bool
5137smaller_form_type_p (tree type, tree orig_type)
5138{
5139 tree size, osize;
5140
5141 /* We're not interested in variants here. */
5142 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5143 return false;
5144
5145 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5146 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5147 return false;
5148
5149 size = TYPE_SIZE (type);
5150 osize = TYPE_SIZE (orig_type);
5151
5152 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5153 return false;
5154
5155 return tree_int_cst_lt (size, osize) != 0;
5156}
5157
a1ab4c31
AC
5158/* Perform final processing on global variables. */
5159
65444786
EB
5160static GTY (()) tree dummy_global;
5161
a1ab4c31
AC
5162void
5163gnat_write_global_declarations (void)
5164{
10e4d056
EB
5165 unsigned int i;
5166 tree iter;
5167
65444786
EB
5168 /* If we have declared types as used at the global level, insert them in
5169 the global hash table. We use a dummy variable for this purpose. */
9771b263 5170 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
65444786 5171 {
35e8bcf4 5172 struct varpool_node *node;
d3c268ab
EB
5173 char *label;
5174
5175 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5176 dummy_global
d3c268ab
EB
5177 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5178 void_type_node);
dd25fe0a 5179 DECL_HARD_REGISTER (dummy_global) = 1;
65444786 5180 TREE_STATIC (dummy_global) = 1;
037e5573 5181 node = varpool_node::get_create (dummy_global);
dd25fe0a 5182 node->definition = 1;
67348ccc 5183 node->force_output = 1;
65444786 5184
9771b263 5185 while (!types_used_by_cur_var_decl->is_empty ())
65444786 5186 {
9771b263 5187 tree t = types_used_by_cur_var_decl->pop ();
65444786
EB
5188 types_used_by_var_decl_insert (t, dummy_global);
5189 }
5190 }
5191
10e4d056
EB
5192 /* Output debug information for all global type declarations first. This
5193 ensures that global types whose compilation hasn't been finalized yet,
5194 for example pointers to Taft amendment types, have their compilation
5195 finalized in the right context. */
9771b263 5196 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
9a30c7c4 5197 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
1f0e2688 5198 debug_hooks->type_decl (iter, false);
10e4d056 5199
65d630d4 5200 /* Proceed to optimize and emit assembly. */
3dafb85c 5201 symtab->finalize_compilation_unit ();
a1ab4c31 5202
10e4d056
EB
5203 /* After cgraph has had a chance to emit everything that's going to
5204 be emitted, output debug information for the rest of globals. */
5205 if (!seen_error ())
5206 {
5207 timevar_push (TV_SYMOUT);
9771b263 5208 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
9a30c7c4 5209 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
10e4d056
EB
5210 debug_hooks->global_decl (iter);
5211 timevar_pop (TV_SYMOUT);
5212 }
a1ab4c31
AC
5213}
5214
5215/* ************************************************************************
5216 * * GCC builtins support *
5217 * ************************************************************************ */
5218
5219/* The general scheme is fairly simple:
5220
5221 For each builtin function/type to be declared, gnat_install_builtins calls
aef308d0 5222 internal facilities which eventually get to gnat_pushdecl, which in turn
a1ab4c31
AC
5223 tracks the so declared builtin function decls in the 'builtin_decls' global
5224 datastructure. When an Intrinsic subprogram declaration is processed, we
5225 search this global datastructure to retrieve the associated BUILT_IN DECL
5226 node. */
5227
5228/* Search the chain of currently available builtin declarations for a node
5229 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5230 found, if any, or NULL_TREE otherwise. */
5231tree
5232builtin_decl_for (tree name)
5233{
5234 unsigned i;
5235 tree decl;
5236
9771b263 5237 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5238 if (DECL_NAME (decl) == name)
5239 return decl;
5240
5241 return NULL_TREE;
5242}
5243
5244/* The code below eventually exposes gnat_install_builtins, which declares
5245 the builtin types and functions we might need, either internally or as
5246 user accessible facilities.
5247
5248 ??? This is a first implementation shot, still in rough shape. It is
5249 heavily inspired from the "C" family implementation, with chunks copied
5250 verbatim from there.
5251
5252 Two obvious TODO candidates are
5253 o Use a more efficient name/decl mapping scheme
5254 o Devise a middle-end infrastructure to avoid having to copy
5255 pieces between front-ends. */
5256
5257/* ----------------------------------------------------------------------- *
5258 * BUILTIN ELEMENTARY TYPES *
5259 * ----------------------------------------------------------------------- */
5260
5261/* Standard data types to be used in builtin argument declarations. */
5262
5263enum c_tree_index
5264{
5265 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5266 CTI_STRING_TYPE,
5267 CTI_CONST_STRING_TYPE,
5268
5269 CTI_MAX
5270};
5271
5272static tree c_global_trees[CTI_MAX];
5273
5274#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5275#define string_type_node c_global_trees[CTI_STRING_TYPE]
5276#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5277
5278/* ??? In addition some attribute handlers, we currently don't support a
5279 (small) number of builtin-types, which in turns inhibits support for a
5280 number of builtin functions. */
5281#define wint_type_node void_type_node
5282#define intmax_type_node void_type_node
5283#define uintmax_type_node void_type_node
5284
5285/* Build the void_list_node (void_type_node having been created). */
5286
5287static tree
5288build_void_list_node (void)
5289{
5290 tree t = build_tree_list (NULL_TREE, void_type_node);
5291 return t;
5292}
5293
5294/* Used to help initialize the builtin-types.def table. When a type of
5295 the correct size doesn't exist, use error_mark_node instead of NULL.
5296 The later results in segfaults even when a decl using the type doesn't
5297 get invoked. */
5298
5299static tree
5300builtin_type_for_size (int size, bool unsignedp)
5301{
ced57283 5302 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5303 return type ? type : error_mark_node;
5304}
5305
5306/* Build/push the elementary type decls that builtin functions/types
5307 will need. */
5308
5309static void
5310install_builtin_elementary_types (void)
5311{
728936bb 5312 signed_size_type_node = gnat_signed_type (size_type_node);
a1ab4c31
AC
5313 pid_type_node = integer_type_node;
5314 void_list_node = build_void_list_node ();
5315
5316 string_type_node = build_pointer_type (char_type_node);
5317 const_string_type_node
5318 = build_pointer_type (build_qualified_type
5319 (char_type_node, TYPE_QUAL_CONST));
5320}
5321
5322/* ----------------------------------------------------------------------- *
5323 * BUILTIN FUNCTION TYPES *
5324 * ----------------------------------------------------------------------- */
5325
5326/* Now, builtin function types per se. */
5327
5328enum c_builtin_type
5329{
5330#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5331#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5332#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5333#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5334#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5335#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5336#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
f6a7cffc
TS
5337#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5338 ARG6) NAME,
5339#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5340 ARG6, ARG7) NAME,
5341#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5342 ARG6, ARG7, ARG8) NAME,
a1ab4c31
AC
5343#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5344#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5345#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5346#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5347#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
f6a7cffc 5348#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
56a9f6bc
TS
5349 NAME,
5350#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5351 ARG6, ARG7) NAME,
5352#define DEF_FUNCTION_TYPE_VAR_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5353 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
a1ab4c31
AC
5354#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5355#include "builtin-types.def"
5356#undef DEF_PRIMITIVE_TYPE
5357#undef DEF_FUNCTION_TYPE_0
5358#undef DEF_FUNCTION_TYPE_1
5359#undef DEF_FUNCTION_TYPE_2
5360#undef DEF_FUNCTION_TYPE_3
5361#undef DEF_FUNCTION_TYPE_4
5362#undef DEF_FUNCTION_TYPE_5
5363#undef DEF_FUNCTION_TYPE_6
5364#undef DEF_FUNCTION_TYPE_7
acf0174b 5365#undef DEF_FUNCTION_TYPE_8
a1ab4c31
AC
5366#undef DEF_FUNCTION_TYPE_VAR_0
5367#undef DEF_FUNCTION_TYPE_VAR_1
5368#undef DEF_FUNCTION_TYPE_VAR_2
5369#undef DEF_FUNCTION_TYPE_VAR_3
5370#undef DEF_FUNCTION_TYPE_VAR_4
5371#undef DEF_FUNCTION_TYPE_VAR_5
56a9f6bc
TS
5372#undef DEF_FUNCTION_TYPE_VAR_7
5373#undef DEF_FUNCTION_TYPE_VAR_11
a1ab4c31
AC
5374#undef DEF_POINTER_TYPE
5375 BT_LAST
5376};
5377
5378typedef enum c_builtin_type builtin_type;
5379
5380/* A temporary array used in communication with def_fn_type. */
5381static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5382
5383/* A helper function for install_builtin_types. Build function type
5384 for DEF with return type RET and N arguments. If VAR is true, then the
5385 function should be variadic after those N arguments.
5386
5387 Takes special care not to ICE if any of the types involved are
5388 error_mark_node, which indicates that said type is not in fact available
5389 (see builtin_type_for_size). In which case the function type as a whole
5390 should be error_mark_node. */
5391
5392static void
5393def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5394{
e5b00edf
NF
5395 tree t;
5396 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5397 va_list list;
5398 int i;
5399
5400 va_start (list, n);
5401 for (i = 0; i < n; ++i)
5402 {
c6bd4220 5403 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5404 t = builtin_types[a];
5405 if (t == error_mark_node)
5406 goto egress;
e5b00edf 5407 args[i] = t;
a1ab4c31 5408 }
a1ab4c31 5409
a1ab4c31
AC
5410 t = builtin_types[ret];
5411 if (t == error_mark_node)
5412 goto egress;
e5b00edf
NF
5413 if (var)
5414 t = build_varargs_function_type_array (t, n, args);
5415 else
5416 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5417
5418 egress:
5419 builtin_types[def] = t;
0edf1bb2 5420 va_end (list);
a1ab4c31
AC
5421}
5422
5423/* Build the builtin function types and install them in the builtin_types
5424 array for later use in builtin function decls. */
5425
5426static void
5427install_builtin_function_types (void)
5428{
5429 tree va_list_ref_type_node;
5430 tree va_list_arg_type_node;
5431
5432 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5433 {
5434 va_list_arg_type_node = va_list_ref_type_node =
5435 build_pointer_type (TREE_TYPE (va_list_type_node));
5436 }
5437 else
5438 {
5439 va_list_arg_type_node = va_list_type_node;
5440 va_list_ref_type_node = build_reference_type (va_list_type_node);
5441 }
5442
5443#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5444 builtin_types[ENUM] = VALUE;
5445#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5446 def_fn_type (ENUM, RETURN, 0, 0);
5447#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5448 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5449#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5450 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5451#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5452 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5453#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5454 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5455#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5456 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5457#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5458 ARG6) \
5459 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5460#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5461 ARG6, ARG7) \
5462 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
5463#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5464 ARG6, ARG7, ARG8) \
5465 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5466 ARG7, ARG8);
a1ab4c31
AC
5467#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5468 def_fn_type (ENUM, RETURN, 1, 0);
5469#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5470 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5471#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5472 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5473#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5474 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5475#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5476 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5477#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5478 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
56a9f6bc
TS
5479#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5480 ARG6, ARG7) \
5481 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5482#define DEF_FUNCTION_TYPE_VAR_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5483 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5484 def_fn_type (ENUM, RETURN, 1, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5485 ARG7, ARG8, ARG9, ARG10, ARG11);
a1ab4c31
AC
5486#define DEF_POINTER_TYPE(ENUM, TYPE) \
5487 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5488
5489#include "builtin-types.def"
5490
5491#undef DEF_PRIMITIVE_TYPE
f6a7cffc 5492#undef DEF_FUNCTION_TYPE_0
a1ab4c31
AC
5493#undef DEF_FUNCTION_TYPE_1
5494#undef DEF_FUNCTION_TYPE_2
5495#undef DEF_FUNCTION_TYPE_3
5496#undef DEF_FUNCTION_TYPE_4
5497#undef DEF_FUNCTION_TYPE_5
5498#undef DEF_FUNCTION_TYPE_6
f6a7cffc
TS
5499#undef DEF_FUNCTION_TYPE_7
5500#undef DEF_FUNCTION_TYPE_8
a1ab4c31
AC
5501#undef DEF_FUNCTION_TYPE_VAR_0
5502#undef DEF_FUNCTION_TYPE_VAR_1
5503#undef DEF_FUNCTION_TYPE_VAR_2
5504#undef DEF_FUNCTION_TYPE_VAR_3
5505#undef DEF_FUNCTION_TYPE_VAR_4
5506#undef DEF_FUNCTION_TYPE_VAR_5
56a9f6bc
TS
5507#undef DEF_FUNCTION_TYPE_VAR_7
5508#undef DEF_FUNCTION_TYPE_VAR_11
a1ab4c31
AC
5509#undef DEF_POINTER_TYPE
5510 builtin_types[(int) BT_LAST] = NULL_TREE;
5511}
5512
5513/* ----------------------------------------------------------------------- *
5514 * BUILTIN ATTRIBUTES *
5515 * ----------------------------------------------------------------------- */
5516
5517enum built_in_attribute
5518{
5519#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5520#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5521#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5522#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5523#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5524#include "builtin-attrs.def"
5525#undef DEF_ATTR_NULL_TREE
5526#undef DEF_ATTR_INT
e384e6b5 5527#undef DEF_ATTR_STRING
a1ab4c31
AC
5528#undef DEF_ATTR_IDENT
5529#undef DEF_ATTR_TREE_LIST
5530 ATTR_LAST
5531};
5532
5533static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5534
5535static void
5536install_builtin_attributes (void)
5537{
5538 /* Fill in the built_in_attributes array. */
5539#define DEF_ATTR_NULL_TREE(ENUM) \
5540 built_in_attributes[(int) ENUM] = NULL_TREE;
5541#define DEF_ATTR_INT(ENUM, VALUE) \
5542 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5543#define DEF_ATTR_STRING(ENUM, VALUE) \
5544 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5545#define DEF_ATTR_IDENT(ENUM, STRING) \
5546 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5547#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5548 built_in_attributes[(int) ENUM] \
5549 = tree_cons (built_in_attributes[(int) PURPOSE], \
5550 built_in_attributes[(int) VALUE], \
5551 built_in_attributes[(int) CHAIN]);
5552#include "builtin-attrs.def"
5553#undef DEF_ATTR_NULL_TREE
5554#undef DEF_ATTR_INT
e384e6b5 5555#undef DEF_ATTR_STRING
a1ab4c31
AC
5556#undef DEF_ATTR_IDENT
5557#undef DEF_ATTR_TREE_LIST
5558}
5559
5560/* Handle a "const" attribute; arguments as in
5561 struct attribute_spec.handler. */
5562
5563static tree
5564handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5565 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5566 bool *no_add_attrs)
5567{
5568 if (TREE_CODE (*node) == FUNCTION_DECL)
5569 TREE_READONLY (*node) = 1;
5570 else
5571 *no_add_attrs = true;
5572
5573 return NULL_TREE;
5574}
5575
5576/* Handle a "nothrow" attribute; arguments as in
5577 struct attribute_spec.handler. */
5578
5579static tree
5580handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5581 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5582 bool *no_add_attrs)
5583{
5584 if (TREE_CODE (*node) == FUNCTION_DECL)
5585 TREE_NOTHROW (*node) = 1;
5586 else
5587 *no_add_attrs = true;
5588
5589 return NULL_TREE;
5590}
5591
5592/* Handle a "pure" attribute; arguments as in
5593 struct attribute_spec.handler. */
5594
5595static tree
5596handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5597 int ARG_UNUSED (flags), bool *no_add_attrs)
5598{
5599 if (TREE_CODE (*node) == FUNCTION_DECL)
5600 DECL_PURE_P (*node) = 1;
5601 /* ??? TODO: Support types. */
5602 else
5603 {
7948ae37
OH
5604 warning (OPT_Wattributes, "%qs attribute ignored",
5605 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5606 *no_add_attrs = true;
5607 }
5608
5609 return NULL_TREE;
5610}
5611
5612/* Handle a "no vops" attribute; arguments as in
5613 struct attribute_spec.handler. */
5614
5615static tree
5616handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5617 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5618 bool *ARG_UNUSED (no_add_attrs))
5619{
5620 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5621 DECL_IS_NOVOPS (*node) = 1;
5622 return NULL_TREE;
5623}
5624
5625/* Helper for nonnull attribute handling; fetch the operand number
5626 from the attribute argument list. */
5627
5628static bool
5629get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5630{
5631 /* Verify the arg number is a constant. */
807e902e 5632 if (!tree_fits_uhwi_p (arg_num_expr))
a1ab4c31
AC
5633 return false;
5634
5635 *valp = TREE_INT_CST_LOW (arg_num_expr);
5636 return true;
5637}
5638
5639/* Handle the "nonnull" attribute. */
5640static tree
5641handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5642 tree args, int ARG_UNUSED (flags),
5643 bool *no_add_attrs)
5644{
5645 tree type = *node;
5646 unsigned HOST_WIDE_INT attr_arg_num;
5647
5648 /* If no arguments are specified, all pointer arguments should be
5649 non-null. Verify a full prototype is given so that the arguments
5650 will have the correct types when we actually check them later. */
5651 if (!args)
5652 {
f4da8dce 5653 if (!prototype_p (type))
a1ab4c31
AC
5654 {
5655 error ("nonnull attribute without arguments on a non-prototype");
5656 *no_add_attrs = true;
5657 }
5658 return NULL_TREE;
5659 }
5660
5661 /* Argument list specified. Verify that each argument number references
5662 a pointer argument. */
5663 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5664 {
a1ab4c31
AC
5665 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5666
5667 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5668 {
5669 error ("nonnull argument has invalid operand number (argument %lu)",
5670 (unsigned long) attr_arg_num);
5671 *no_add_attrs = true;
5672 return NULL_TREE;
5673 }
5674
d7d058c5 5675 if (prototype_p (type))
a1ab4c31 5676 {
d7d058c5
NF
5677 function_args_iterator iter;
5678 tree argument;
5679
5680 function_args_iter_init (&iter, type);
5681 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 5682 {
d7d058c5 5683 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
5684 if (!argument || ck_num == arg_num)
5685 break;
a1ab4c31
AC
5686 }
5687
5688 if (!argument
d7d058c5 5689 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 5690 {
58c8f770
EB
5691 error ("nonnull argument with out-of-range operand number "
5692 "(argument %lu, operand %lu)",
a1ab4c31
AC
5693 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5694 *no_add_attrs = true;
5695 return NULL_TREE;
5696 }
5697
d7d058c5 5698 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 5699 {
58c8f770
EB
5700 error ("nonnull argument references non-pointer operand "
5701 "(argument %lu, operand %lu)",
a1ab4c31
AC
5702 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5703 *no_add_attrs = true;
5704 return NULL_TREE;
5705 }
5706 }
5707 }
5708
5709 return NULL_TREE;
5710}
5711
5712/* Handle a "sentinel" attribute. */
5713
5714static tree
5715handle_sentinel_attribute (tree *node, tree name, tree args,
5716 int ARG_UNUSED (flags), bool *no_add_attrs)
5717{
f4da8dce 5718 if (!prototype_p (*node))
a1ab4c31
AC
5719 {
5720 warning (OPT_Wattributes,
7948ae37
OH
5721 "%qs attribute requires prototypes with named arguments",
5722 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5723 *no_add_attrs = true;
5724 }
5725 else
5726 {
dcf0c47e 5727 if (!stdarg_p (*node))
a1ab4c31
AC
5728 {
5729 warning (OPT_Wattributes,
7948ae37
OH
5730 "%qs attribute only applies to variadic functions",
5731 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5732 *no_add_attrs = true;
5733 }
5734 }
5735
5736 if (args)
5737 {
5738 tree position = TREE_VALUE (args);
5739
5740 if (TREE_CODE (position) != INTEGER_CST)
5741 {
5742 warning (0, "requested position is not an integer constant");
5743 *no_add_attrs = true;
5744 }
5745 else
5746 {
5747 if (tree_int_cst_lt (position, integer_zero_node))
5748 {
5749 warning (0, "requested position is less than zero");
5750 *no_add_attrs = true;
5751 }
5752 }
5753 }
5754
5755 return NULL_TREE;
5756}
5757
5758/* Handle a "noreturn" attribute; arguments as in
5759 struct attribute_spec.handler. */
5760
5761static tree
5762handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5763 int ARG_UNUSED (flags), bool *no_add_attrs)
5764{
5765 tree type = TREE_TYPE (*node);
5766
5767 /* See FIXME comment in c_common_attribute_table. */
5768 if (TREE_CODE (*node) == FUNCTION_DECL)
5769 TREE_THIS_VOLATILE (*node) = 1;
5770 else if (TREE_CODE (type) == POINTER_TYPE
5771 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5772 TREE_TYPE (*node)
5773 = build_pointer_type
5774 (build_type_variant (TREE_TYPE (type),
5775 TYPE_READONLY (TREE_TYPE (type)), 1));
5776 else
5777 {
7948ae37
OH
5778 warning (OPT_Wattributes, "%qs attribute ignored",
5779 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5780 *no_add_attrs = true;
5781 }
5782
5783 return NULL_TREE;
5784}
5785
0d6e14fd
JH
5786/* Handle a "leaf" attribute; arguments as in
5787 struct attribute_spec.handler. */
5788
5789static tree
f087ea44 5790handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
0d6e14fd
JH
5791 int ARG_UNUSED (flags), bool *no_add_attrs)
5792{
5793 if (TREE_CODE (*node) != FUNCTION_DECL)
5794 {
5795 warning (OPT_Wattributes, "%qE attribute ignored", name);
5796 *no_add_attrs = true;
5797 }
5798 if (!TREE_PUBLIC (*node))
5799 {
32a5388a 5800 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd 5801 *no_add_attrs = true;
f087ea44
AC
5802 }
5803
5804 return NULL_TREE;
5805}
5806
5807/* Handle a "always_inline" attribute; arguments as in
5808 struct attribute_spec.handler. */
5809
5810static tree
5811handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5812 int ARG_UNUSED (flags), bool *no_add_attrs)
5813{
5814 if (TREE_CODE (*node) == FUNCTION_DECL)
5815 {
5816 /* Set the attribute and mark it for disregarding inline limits. */
5817 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5818 }
5819 else
5820 {
5821 warning (OPT_Wattributes, "%qE attribute ignored", name);
5822 *no_add_attrs = true;
0d6e14fd
JH
5823 }
5824
5825 return NULL_TREE;
5826}
5827
a1ab4c31
AC
5828/* Handle a "malloc" attribute; arguments as in
5829 struct attribute_spec.handler. */
5830
5831static tree
5832handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5833 int ARG_UNUSED (flags), bool *no_add_attrs)
5834{
5835 if (TREE_CODE (*node) == FUNCTION_DECL
5836 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5837 DECL_IS_MALLOC (*node) = 1;
5838 else
5839 {
7948ae37
OH
5840 warning (OPT_Wattributes, "%qs attribute ignored",
5841 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5842 *no_add_attrs = true;
5843 }
5844
5845 return NULL_TREE;
5846}
5847
5848/* Fake handler for attributes we don't properly support. */
5849
5850tree
5851fake_attribute_handler (tree * ARG_UNUSED (node),
5852 tree ARG_UNUSED (name),
5853 tree ARG_UNUSED (args),
5854 int ARG_UNUSED (flags),
5855 bool * ARG_UNUSED (no_add_attrs))
5856{
5857 return NULL_TREE;
5858}
5859
5860/* Handle a "type_generic" attribute. */
5861
5862static tree
5863handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5864 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5865 bool * ARG_UNUSED (no_add_attrs))
5866{
a1ab4c31
AC
5867 /* Ensure we have a function type. */
5868 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 5869
a1ab4c31 5870 /* Ensure we have a variadic function. */
dcf0c47e 5871 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
5872
5873 return NULL_TREE;
5874}
5875
2724e58f
OH
5876/* Handle a "vector_size" attribute; arguments as in
5877 struct attribute_spec.handler. */
5878
5879static tree
5880handle_vector_size_attribute (tree *node, tree name, tree args,
e63b36bd 5881 int ARG_UNUSED (flags), bool *no_add_attrs)
2724e58f 5882{
e63b36bd
EB
5883 tree type = *node;
5884 tree vector_type;
2724e58f
OH
5885
5886 *no_add_attrs = true;
5887
2724e58f
OH
5888 /* We need to provide for vector pointers, vector arrays, and
5889 functions returning vectors. For example:
5890
5891 __attribute__((vector_size(16))) short *foo;
5892
5893 In this case, the mode is SI, but the type being modified is
5894 HI, so we need to look further. */
2724e58f
OH
5895 while (POINTER_TYPE_P (type)
5896 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 5897 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
5898 type = TREE_TYPE (type);
5899
e63b36bd
EB
5900 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5901 if (!vector_type)
5902 return NULL_TREE;
2724e58f
OH
5903
5904 /* Build back pointers if needed. */
e63b36bd 5905 *node = reconstruct_complex_type (*node, vector_type);
2724e58f
OH
5906
5907 return NULL_TREE;
5908}
5909
7948ae37
OH
5910/* Handle a "vector_type" attribute; arguments as in
5911 struct attribute_spec.handler. */
5912
5913static tree
5914handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
e63b36bd 5915 int ARG_UNUSED (flags), bool *no_add_attrs)
7948ae37 5916{
e63b36bd
EB
5917 tree type = *node;
5918 tree vector_type;
7948ae37
OH
5919
5920 *no_add_attrs = true;
5921
e63b36bd 5922 if (TREE_CODE (type) != ARRAY_TYPE)
7948ae37
OH
5923 {
5924 error ("attribute %qs applies to array types only",
5925 IDENTIFIER_POINTER (name));
5926 return NULL_TREE;
5927 }
5928
e63b36bd
EB
5929 vector_type = build_vector_type_for_array (type, name);
5930 if (!vector_type)
7948ae37
OH
5931 return NULL_TREE;
5932
e63b36bd
EB
5933 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5934 *node = vector_type;
7948ae37
OH
5935
5936 return NULL_TREE;
5937}
5938
a1ab4c31
AC
5939/* ----------------------------------------------------------------------- *
5940 * BUILTIN FUNCTIONS *
5941 * ----------------------------------------------------------------------- */
5942
5943/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5944 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5945 if nonansi_p and flag_no_nonansi_builtin. */
5946
5947static void
5948def_builtin_1 (enum built_in_function fncode,
5949 const char *name,
5950 enum built_in_class fnclass,
5951 tree fntype, tree libtype,
5952 bool both_p, bool fallback_p,
5953 bool nonansi_p ATTRIBUTE_UNUSED,
5954 tree fnattrs, bool implicit_p)
5955{
5956 tree decl;
5957 const char *libname;
5958
5959 /* Preserve an already installed decl. It most likely was setup in advance
5960 (e.g. as part of the internal builtins) for specific reasons. */
e79983f4 5961 if (builtin_decl_explicit (fncode) != NULL_TREE)
a1ab4c31
AC
5962 return;
5963
5964 gcc_assert ((!both_p && !fallback_p)
5965 || !strncmp (name, "__builtin_",
5966 strlen ("__builtin_")));
5967
5968 libname = name + strlen ("__builtin_");
5969 decl = add_builtin_function (name, fntype, fncode, fnclass,
5970 (fallback_p ? libname : NULL),
5971 fnattrs);
5972 if (both_p)
5973 /* ??? This is normally further controlled by command-line options
5974 like -fno-builtin, but we don't have them for Ada. */
5975 add_builtin_function (libname, libtype, fncode, fnclass,
5976 NULL, fnattrs);
5977
e79983f4 5978 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
5979}
5980
5981static int flag_isoc94 = 0;
5982static int flag_isoc99 = 0;
22869a37 5983static int flag_isoc11 = 0;
a1ab4c31
AC
5984
5985/* Install what the common builtins.def offers. */
5986
5987static void
5988install_builtin_functions (void)
5989{
5990#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5991 NONANSI_P, ATTRS, IMPLICIT, COND) \
5992 if (NAME && COND) \
5993 def_builtin_1 (ENUM, NAME, CLASS, \
5994 builtin_types[(int) TYPE], \
5995 builtin_types[(int) LIBTYPE], \
5996 BOTH_P, FALLBACK_P, NONANSI_P, \
5997 built_in_attributes[(int) ATTRS], IMPLICIT);
5998#include "builtins.def"
5999#undef DEF_BUILTIN
6000}
6001
6002/* ----------------------------------------------------------------------- *
6003 * BUILTIN FUNCTIONS *
6004 * ----------------------------------------------------------------------- */
6005
6006/* Install the builtin functions we might need. */
6007
6008void
6009gnat_install_builtins (void)
6010{
6011 install_builtin_elementary_types ();
6012 install_builtin_function_types ();
6013 install_builtin_attributes ();
6014
6015 /* Install builtins used by generic middle-end pieces first. Some of these
6016 know about internal specificities and control attributes accordingly, for
6017 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6018 the generic definition from builtins.def. */
384c400a 6019 build_common_builtin_nodes ();
a1ab4c31
AC
6020
6021 /* Now, install the target specific builtins, such as the AltiVec family on
6022 ppc, and the common set as exposed by builtins.def. */
6023 targetm.init_builtins ();
6024 install_builtin_functions ();
6025}
6026
6027#include "gt-ada-utils.h"
6028#include "gtype-ada.h"
This page took 3.140955 seconds and 5 git commands to generate.