]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
re PR ada/48835 (porting GNAT to m68k-linux)
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
396a2ee2 9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
2adfab87
AM
29#include "target.h"
30#include "function.h"
a1ab4c31 31#include "tree.h"
d8a2d370 32#include "stringpool.h"
2adfab87
AM
33#include "cgraph.h"
34#include "diagnostic.h"
35#include "alias.h"
36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "attribs.h"
39#include "varasm.h"
a1ab4c31
AC
40#include "toplev.h"
41#include "output.h"
a1ab4c31
AC
42#include "debug.h"
43#include "convert.h"
677f3fa8 44#include "common/common-target.h"
8713b7e4 45#include "langhooks.h"
8713b7e4 46#include "tree-dump.h"
a1ab4c31 47#include "tree-inline.h"
a1ab4c31
AC
48
49#include "ada.h"
50#include "types.h"
51#include "atree.h"
a1ab4c31 52#include "nlists.h"
a1ab4c31
AC
53#include "uintp.h"
54#include "fe.h"
55#include "sinfo.h"
56#include "einfo.h"
57#include "ada-tree.h"
58#include "gigi.h"
59
a1ab4c31
AC
60/* If nonzero, pretend we are allocating at global level. */
61int force_global;
62
caa9d12a
EB
63/* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66int double_float_alignment;
67
68/* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71int double_scalar_alignment;
72
24228312
AC
73/* True if floating-point arithmetics may use wider intermediate results. */
74bool fp_arith_may_widen = true;
75
a1ab4c31
AC
76/* Tree nodes for the various types and decls we create. */
77tree gnat_std_decls[(int) ADT_LAST];
78
79/* Functions to call for each of the possible raise reasons. */
80tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
ca8e13e8 82/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
83tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
a1ab4c31
AC
85/* Forward declarations for handlers of attributes. */
86static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
1f0b1322
EB
93static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 95static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
f087ea44 96static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
97static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 99static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 100static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
101
102/* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106/* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108const struct attribute_spec gnat_internal_attribute_table[] =
109{
62d784f7
KT
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
1f0b1322
EB
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
62d784f7
KT
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
f087ea44
AC
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
62d784f7
KT
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
138
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
144
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
62d784f7
KT
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 150
62d784f7 151 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
152};
153
154/* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
158
159#define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
161
162#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
164
165#define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
167
168/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
170
171#define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
173
174#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
176
177#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
179
180/* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
182
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
187
188/* Likewise for float types, but record these by mode. */
189static GTY(()) tree float_types[NUM_MACHINE_MODES];
190
191/* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
193
d1b38208 194struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
202};
203
204/* The binding level currently in effect. */
205static GTY(()) struct gnat_binding_level *current_binding_level;
206
207/* A chain of gnat_binding_level structures awaiting reuse. */
208static GTY((deletable)) struct gnat_binding_level *free_binding_level;
209
228ee426
EB
210/* The context to be used for global declarations. */
211static GTY(()) tree global_context;
212
a22b794d
EB
213/* An array of global declarations. */
214static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
215
216/* An array of builtin function declarations. */
9771b263 217static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31 218
a1ab4c31
AC
219/* A chain of unused BLOCK nodes. */
220static GTY((deletable)) tree free_block_chain;
221
842d4ee2
EB
222/* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
d242408f
TS
224
225struct GTY((for_user)) pad_type_hash {
842d4ee2
EB
226 unsigned long hash;
227 tree type;
228};
229
6c907cff 230struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
d242408f
TS
231{
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
08ec2754 234 static int keep_cache_entry (pad_type_hash *&);
d242408f
TS
235};
236
237static GTY ((cache))
238 hash_table<pad_type_hasher> *pad_type_hash_table;
842d4ee2 239
a1ab4c31
AC
240static tree merge_sizes (tree, tree, tree, bool, bool);
241static tree compute_related_constant (tree, tree);
242static tree split_plus (tree, tree *);
ef4bddc2 243static tree float_type_for_precision (int, machine_mode);
a1ab4c31 244static tree convert_to_fat_pointer (tree, tree);
5c475ba9 245static unsigned int scale_by_factor_of (tree, unsigned int);
a1ab4c31 246static bool potential_alignment_gap (tree, tree, tree);
9a30c7c4 247
afc737f0
EB
248/* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
249 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
9a30c7c4
AC
250struct deferred_decl_context_node
251{
afc737f0
EB
252 /* The ..._DECL node to work on. */
253 tree decl;
254
255 /* The corresponding entity's Scope. */
256 Entity_Id gnat_scope;
257
258 /* The value of force_global when DECL was pushed. */
259 int force_global;
260
261 /* The list of ..._TYPE nodes to propagate the context to. */
262 vec<tree> types;
263
264 /* The next queue item. */
265 struct deferred_decl_context_node *next;
9a30c7c4
AC
266};
267
268static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
269
270/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
271 feed it with the elaboration of GNAT_SCOPE. */
272static struct deferred_decl_context_node *
273add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
274
275/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
276 feed it with the DECL_CONTEXT computed as part of N as soon as it is
277 computed. */
278static void add_deferred_type_context (struct deferred_decl_context_node *n,
279 tree type);
a1ab4c31 280\f
842d4ee2 281/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
282
283void
842d4ee2 284init_gnat_utils (void)
a1ab4c31 285{
842d4ee2 286 /* Initialize the association of GNAT nodes to GCC trees. */
766090c2 287 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
288
289 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
766090c2 290 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
291
292 /* Initialize the hash table of padded types. */
d242408f 293 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
a1ab4c31
AC
294}
295
842d4ee2 296/* Destroy data structures of the utils.c module. */
f04b8d69
EB
297
298void
842d4ee2 299destroy_gnat_utils (void)
f04b8d69 300{
842d4ee2 301 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
302 ggc_free (associate_gnat_to_gnu);
303 associate_gnat_to_gnu = NULL;
f04b8d69 304
842d4ee2
EB
305 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
306 ggc_free (dummy_node_table);
307 dummy_node_table = NULL;
308
309 /* Destroy the hash table of padded types. */
d242408f 310 pad_type_hash_table->empty ();
842d4ee2 311 pad_type_hash_table = NULL;
842d4ee2
EB
312}
313\f
a1d8cc63
EB
314/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
315 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
316 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 317
a1d8cc63 318 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
319
320void
321save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
322{
323 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 324 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
325 means GNAT_ENTITY is defined twice, but occasionally is due to some
326 Gigi problem. */
327 gcc_assert (!(gnu_decl
328 && (PRESENT_GNU_TREE (gnat_entity)
329 || (!no_check && !DECL_P (gnu_decl)))));
330
331 SET_GNU_TREE (gnat_entity, gnu_decl);
332}
333
a1d8cc63
EB
334/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
335 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
336
337 In some cases, such as delayed elaboration or expressions that need to
338 be elaborated only once, GNAT_ENTITY is really not an entity. */
339
340tree
341get_gnu_tree (Entity_Id gnat_entity)
342{
343 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
344 return GET_GNU_TREE (gnat_entity);
345}
346
347/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
348
349bool
350present_gnu_tree (Entity_Id gnat_entity)
351{
352 return PRESENT_GNU_TREE (gnat_entity);
353}
354\f
a1ab4c31
AC
355/* Make a dummy type corresponding to GNAT_TYPE. */
356
357tree
358make_dummy_type (Entity_Id gnat_type)
359{
bf0b0e5e 360 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
a1ab4c31
AC
361 tree gnu_type;
362
a1ab4c31
AC
363 /* If there was no equivalent type (can only happen when just annotating
364 types) or underlying type, go back to the original type. */
bf0b0e5e
AC
365 if (No (gnat_equiv))
366 gnat_equiv = gnat_type;
a1ab4c31
AC
367
368 /* If it there already a dummy type, use that one. Else make one. */
bf0b0e5e
AC
369 if (PRESENT_DUMMY_NODE (gnat_equiv))
370 return GET_DUMMY_NODE (gnat_equiv);
a1ab4c31
AC
371
372 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
373 an ENUMERAL_TYPE. */
bf0b0e5e
AC
374 gnu_type = make_node (Is_Record_Type (gnat_equiv)
375 ? tree_code_for_record_type (gnat_equiv)
a1ab4c31
AC
376 : ENUMERAL_TYPE);
377 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
378 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
379 TYPE_STUB_DECL (gnu_type)
380 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
bf0b0e5e 381 if (Is_By_Reference_Type (gnat_equiv))
a0b8b1b7 382 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31 383
bf0b0e5e 384 SET_DUMMY_NODE (gnat_equiv, gnu_type);
a1ab4c31
AC
385
386 return gnu_type;
387}
e3edbd56
EB
388
389/* Return the dummy type that was made for GNAT_TYPE, if any. */
390
391tree
392get_dummy_type (Entity_Id gnat_type)
393{
394 return GET_DUMMY_NODE (gnat_type);
395}
396
397/* Build dummy fat and thin pointer types whose designated type is specified
398 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
399
400void
401build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
402{
403 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
404 tree gnu_fat_type, fields, gnu_object_type;
405
406 gnu_template_type = make_node (RECORD_TYPE);
407 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
408 TYPE_DUMMY_P (gnu_template_type) = 1;
409 gnu_ptr_template = build_pointer_type (gnu_template_type);
410
411 gnu_array_type = make_node (ENUMERAL_TYPE);
412 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
413 TYPE_DUMMY_P (gnu_array_type) = 1;
414 gnu_ptr_array = build_pointer_type (gnu_array_type);
415
416 gnu_fat_type = make_node (RECORD_TYPE);
417 /* Build a stub DECL to trigger the special processing for fat pointer types
418 in gnat_pushdecl. */
419 TYPE_NAME (gnu_fat_type)
420 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
421 gnu_fat_type);
422 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
423 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
424 DECL_CHAIN (fields)
425 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
426 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
427 finish_fat_pointer_type (gnu_fat_type, fields);
428 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
429 /* Suppress debug info until after the type is completed. */
430 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
431
432 gnu_object_type = make_node (RECORD_TYPE);
433 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
434 TYPE_DUMMY_P (gnu_object_type) = 1;
435
436 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
1e55d29a 437 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
e3edbd56
EB
438 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
439}
a1ab4c31 440\f
c99c0026 441/* Return true if we are in the global binding level. */
a1ab4c31 442
c99c0026 443bool
a1ab4c31
AC
444global_bindings_p (void)
445{
7c775aca 446 return force_global || !current_function_decl;
a1ab4c31
AC
447}
448
a09d56d8 449/* Enter a new binding level. */
a1ab4c31
AC
450
451void
c6bd4220 452gnat_pushlevel (void)
a1ab4c31
AC
453{
454 struct gnat_binding_level *newlevel = NULL;
455
456 /* Reuse a struct for this binding level, if there is one. */
457 if (free_binding_level)
458 {
459 newlevel = free_binding_level;
460 free_binding_level = free_binding_level->chain;
461 }
462 else
766090c2 463 newlevel = ggc_alloc<gnat_binding_level> ();
a1ab4c31
AC
464
465 /* Use a free BLOCK, if any; otherwise, allocate one. */
466 if (free_block_chain)
467 {
468 newlevel->block = free_block_chain;
469 free_block_chain = BLOCK_CHAIN (free_block_chain);
470 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
471 }
472 else
473 newlevel->block = make_node (BLOCK);
474
475 /* Point the BLOCK we just made to its parent. */
476 if (current_binding_level)
477 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
478
a09d56d8
EB
479 BLOCK_VARS (newlevel->block) = NULL_TREE;
480 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
481 TREE_USED (newlevel->block) = 1;
482
a09d56d8 483 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
484 newlevel->chain = current_binding_level;
485 newlevel->jmpbuf_decl = NULL_TREE;
486 current_binding_level = newlevel;
487}
488
489/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
490 and point FNDECL to this BLOCK. */
491
492void
493set_current_block_context (tree fndecl)
494{
495 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
496 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 497 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
498}
499
500/* Set the jmpbuf_decl for the current binding level to DECL. */
501
502void
503set_block_jmpbuf_decl (tree decl)
504{
505 current_binding_level->jmpbuf_decl = decl;
506}
507
508/* Get the jmpbuf_decl, if any, for the current binding level. */
509
510tree
c6bd4220 511get_block_jmpbuf_decl (void)
a1ab4c31
AC
512{
513 return current_binding_level->jmpbuf_decl;
514}
515
a09d56d8 516/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
517
518void
c6bd4220 519gnat_poplevel (void)
a1ab4c31
AC
520{
521 struct gnat_binding_level *level = current_binding_level;
522 tree block = level->block;
523
524 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 525 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
526
527 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
528 are no variables free the block and merge its subblocks into those of its
a09d56d8 529 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
530 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
531 ;
7c775aca 532 else if (!BLOCK_VARS (block))
a1ab4c31
AC
533 {
534 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
535 = block_chainon (BLOCK_SUBBLOCKS (block),
536 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
537 BLOCK_CHAIN (block) = free_block_chain;
538 free_block_chain = block;
539 }
540 else
541 {
542 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
543 BLOCK_SUBBLOCKS (level->chain->block) = block;
544 TREE_USED (block) = 1;
545 set_block_for_group (block);
546 }
547
548 /* Free this binding structure. */
549 current_binding_level = level->chain;
550 level->chain = free_binding_level;
551 free_binding_level = level;
552}
553
2231f17f
EB
554/* Exit a binding level and discard the associated BLOCK. */
555
556void
557gnat_zaplevel (void)
558{
559 struct gnat_binding_level *level = current_binding_level;
560 tree block = level->block;
561
562 BLOCK_CHAIN (block) = free_block_chain;
563 free_block_chain = block;
564
565 /* Free this binding structure. */
566 current_binding_level = level->chain;
567 level->chain = free_binding_level;
568 free_binding_level = level;
569}
a1ab4c31 570\f
4708440c
EB
571/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
572
573static void
574gnat_set_type_context (tree type, tree context)
575{
576 tree decl = TYPE_STUB_DECL (type);
577
578 TYPE_CONTEXT (type) = context;
579
580 while (decl && DECL_PARALLEL_TYPE (decl))
581 {
24d4b3d5
AC
582 tree parallel_type = DECL_PARALLEL_TYPE (decl);
583
584 /* Give a context to the parallel types and their stub decl, if any.
585 Some parallel types seems to be present in multiple parallel type
586 chains, so don't mess with their context if they already have one. */
7c775aca 587 if (!TYPE_CONTEXT (parallel_type))
24d4b3d5 588 {
7c775aca 589 if (TYPE_STUB_DECL (parallel_type))
24d4b3d5
AC
590 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
591 TYPE_CONTEXT (parallel_type) = context;
592 }
593
4708440c
EB
594 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
595 }
596}
597
9a30c7c4
AC
598/* Return the innermost scope, starting at GNAT_NODE, we are be interested in
599 the debug info, or Empty if there is no such scope. If not NULL, set
600 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
601
1d4b96e0 602Entity_Id
9a30c7c4
AC
603get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
604{
605 Entity_Id gnat_entity;
606
607 if (is_subprogram)
608 *is_subprogram = false;
609
1d4b96e0
AC
610 if (Nkind (gnat_node) == N_Defining_Identifier
611 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
9a30c7c4
AC
612 gnat_entity = Scope (gnat_node);
613 else
614 return Empty;
615
616 while (Present (gnat_entity))
617 {
618 switch (Ekind (gnat_entity))
619 {
620 case E_Function:
621 case E_Procedure:
622 if (Present (Protected_Body_Subprogram (gnat_entity)))
623 gnat_entity = Protected_Body_Subprogram (gnat_entity);
624
625 /* If the scope is a subprogram, then just rely on
626 current_function_decl, so that we don't have to defer
627 anything. This is needed because other places rely on the
628 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
629 if (is_subprogram)
630 *is_subprogram = true;
631 return gnat_entity;
632
633 case E_Record_Type:
634 case E_Record_Subtype:
635 return gnat_entity;
636
637 default:
638 /* By default, we are not interested in this particular scope: go to
639 the outer one. */
640 break;
641 }
7c775aca 642
9a30c7c4
AC
643 gnat_entity = Scope (gnat_entity);
644 }
7c775aca 645
9a30c7c4
AC
646 return Empty;
647}
648
7c775aca
EB
649/* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
650 of N otherwise. */
9a30c7c4
AC
651
652static void
7c775aca 653defer_or_set_type_context (tree type, tree context,
9a30c7c4
AC
654 struct deferred_decl_context_node *n)
655{
656 if (n)
657 add_deferred_type_context (n, type);
658 else
659 gnat_set_type_context (type, context);
660}
661
7c775aca 662/* Return global_context, but create it first if need be. */
9a30c7c4
AC
663
664static tree
665get_global_context (void)
666{
667 if (!global_context)
881a5e60
PMR
668 {
669 global_context = build_translation_unit_decl (NULL_TREE);
670 debug_hooks->register_main_translation_unit (global_context);
671 }
7c775aca 672
9a30c7c4
AC
673 return global_context;
674}
675
228ee426
EB
676/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
677 for location information and flag propagation. */
a1ab4c31
AC
678
679void
680gnat_pushdecl (tree decl, Node_Id gnat_node)
681{
9a30c7c4
AC
682 tree context = NULL_TREE;
683 struct deferred_decl_context_node *deferred_decl_context = NULL;
684
685 /* If explicitely asked to make DECL global or if it's an imported nested
686 object, short-circuit the regular Scope-based context computation. */
687 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
a1ab4c31 688 {
9a30c7c4
AC
689 /* Rely on the GNAT scope, or fallback to the current_function_decl if
690 the GNAT scope reached the global scope, if it reached a subprogram
691 or the declaration is a subprogram or a variable (for them we skip
692 intermediate context types because the subprogram body elaboration
693 machinery and the inliner both expect a subprogram context).
694
695 Falling back to current_function_decl is necessary for implicit
696 subprograms created by gigi, such as the elaboration subprograms. */
697 bool context_is_subprogram = false;
698 const Entity_Id gnat_scope
699 = get_debug_scope (gnat_node, &context_is_subprogram);
700
701 if (Present (gnat_scope)
702 && !context_is_subprogram
703 && TREE_CODE (decl) != FUNCTION_DECL
704 && TREE_CODE (decl) != VAR_DECL)
705 /* Always assume the scope has not been elaborated, thus defer the
706 context propagation to the time its elaboration will be
707 available. */
708 deferred_decl_context
709 = add_deferred_decl_context (decl, gnat_scope, force_global);
710
711 /* External declarations (when force_global > 0) may not be in a
712 local context. */
7c775aca 713 else if (current_function_decl && force_global == 0)
9a30c7c4 714 context = current_function_decl;
a1ab4c31
AC
715 }
716
9a30c7c4 717 /* If either we are forced to be in global mode or if both the GNAT scope and
7c775aca 718 the current_function_decl did not help in determining the context, use the
9a30c7c4 719 global scope. */
7c775aca 720 if (!deferred_decl_context && !context)
9a30c7c4
AC
721 context = get_global_context ();
722
723 /* Functions imported in another function are not really nested.
724 For really nested functions mark them initially as needing
725 a static chain for uses of that flag before unnesting;
726 lower_nested_functions will then recompute it. */
727 if (TREE_CODE (decl) == FUNCTION_DECL
728 && !TREE_PUBLIC (decl)
7c775aca 729 && context
9a30c7c4 730 && (TREE_CODE (context) == FUNCTION_DECL
7c775aca 731 || decl_function_context (context)))
9a30c7c4
AC
732 DECL_STATIC_CHAIN (decl) = 1;
733
734 if (!deferred_decl_context)
735 DECL_CONTEXT (decl) = context;
736
228ee426 737 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
738
739 /* Set the location of DECL and emit a declaration for it. */
e8fa3dcd 740 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
a1ab4c31 741 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 742
a1ab4c31
AC
743 add_decl_expr (decl, gnat_node);
744
745 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
746 order. The list will be reversed later. Put global declarations in the
747 globals list and local ones in the current block. But skip TYPE_DECLs
748 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
749 with the debugger and aren't needed anyway. */
750 if (!(TREE_CODE (decl) == TYPE_DECL
751 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 752 {
9083aacd 753 if (DECL_EXTERNAL (decl))
a1ab4c31 754 {
a1ab4c31 755 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 756 vec_safe_push (builtin_decls, decl);
a1ab4c31 757 }
9083aacd 758 else if (global_bindings_p ())
a22b794d 759 vec_safe_push (global_decls, decl);
9083aacd 760 else
a1ab4c31 761 {
a963da4d
EB
762 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
763 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
764 }
765 }
766
aef308d0 767 /* For the declaration of a type, set its name either if it isn't already
10069d53 768 set or if the previous type name was not derived from a source name.
a1ab4c31 769 We'd rather have the type named with a real name and all the pointer
aef308d0
PMR
770 types to the same object have the same node, except when the names are
771 both derived from source names. */
a1ab4c31
AC
772 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
773 {
774 tree t = TREE_TYPE (decl);
775
79714815
EB
776 /* Array and pointer types aren't tagged types in the C sense so we need
777 to generate a typedef in DWARF for them and make sure it is preserved,
778 unless the type is artificial. */
aef308d0 779 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
79714815
EB
780 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
781 || DECL_ARTIFICIAL (decl)))
782 ;
783 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
784 generate the typedef in DWARF. Also do that for fat pointer types
785 because, even though they are tagged types in the C sense, they are
786 still XUP types attached to the base array type at this point. */
aef308d0 787 else if (!DECL_ARTIFICIAL (decl)
79714815
EB
788 && (TREE_CODE (t) == ARRAY_TYPE
789 || TREE_CODE (t) == POINTER_TYPE
790 || TYPE_IS_FAT_POINTER_P (t)))
a1ab4c31 791 {
d42b7559 792 tree tt = build_variant_type_copy (t);
a1ab4c31 793 TYPE_NAME (tt) = decl;
9a30c7c4
AC
794 defer_or_set_type_context (tt,
795 DECL_CONTEXT (decl),
796 deferred_decl_context);
a1ab4c31 797 TREE_TYPE (decl) = tt;
79714815 798 if (TYPE_NAME (t)
aef308d0
PMR
799 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
800 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
40c88b94
EB
801 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
802 else
803 DECL_ORIGINAL_TYPE (decl) = t;
79714815
EB
804 /* Array types need to have a name so that they can be related to
805 their GNAT encodings. */
806 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
807 TYPE_NAME (t) = DECL_NAME (decl);
e3edbd56 808 t = NULL_TREE;
a1ab4c31 809 }
79714815 810 else if (TYPE_NAME (t)
aef308d0
PMR
811 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
812 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
a1ab4c31
AC
813 ;
814 else
815 t = NULL_TREE;
816
79714815
EB
817 /* Propagate the name to all the variants, this is needed for the type
818 qualifiers machinery to work properly (see check_qualified_type).
819 Also propagate the context to them. Note that it will be propagated
820 to all parallel types too thanks to gnat_set_type_context. */
a1ab4c31
AC
821 if (t)
822 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
79714815
EB
823 /* ??? Because of the previous kludge, we can have variants of fat
824 pointer types with different names. */
825 if (!(TYPE_IS_FAT_POINTER_P (t)
826 && TYPE_NAME (t)
827 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
828 {
829 TYPE_NAME (t) = decl;
9a30c7c4
AC
830 defer_or_set_type_context (t,
831 DECL_CONTEXT (decl),
832 deferred_decl_context);
d4d05b52 833 }
a1ab4c31
AC
834 }
835}
836\f
842d4ee2
EB
837/* Create a record type that contains a SIZE bytes long field of TYPE with a
838 starting bit position so that it is aligned to ALIGN bits, and leaving at
839 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
840 record is guaranteed to get. GNAT_NODE is used for the position of the
841 associated TYPE_DECL. */
842d4ee2
EB
842
843tree
844make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 845 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
846{
847 /* We will be crafting a record type with one field at a position set to be
848 the next multiple of ALIGN past record'address + room bytes. We use a
849 record placeholder to express record'address. */
850 tree record_type = make_node (RECORD_TYPE);
851 tree record = build0 (PLACEHOLDER_EXPR, record_type);
852
853 tree record_addr_st
854 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
855
856 /* The diagram below summarizes the shape of what we manipulate:
857
858 <--------- pos ---------->
859 { +------------+-------------+-----------------+
860 record =>{ |############| ... | field (type) |
861 { +------------+-------------+-----------------+
862 |<-- room -->|<- voffset ->|<---- size ----->|
863 o o
864 | |
865 record_addr vblock_addr
866
867 Every length is in sizetype bytes there, except "pos" which has to be
868 set as a bit position in the GCC tree for the record. */
869 tree room_st = size_int (room);
870 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
871 tree voffset_st, pos, field;
872
9dba4b55 873 tree name = TYPE_IDENTIFIER (type);
842d4ee2 874
842d4ee2
EB
875 name = concat_name (name, "ALIGN");
876 TYPE_NAME (record_type) = name;
877
878 /* Compute VOFFSET and then POS. The next byte position multiple of some
879 alignment after some address is obtained by "and"ing the alignment minus
880 1 with the two's complement of the address. */
881 voffset_st = size_binop (BIT_AND_EXPR,
882 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
883 size_int ((align / BITS_PER_UNIT) - 1));
884
885 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
886 pos = size_binop (MULT_EXPR,
887 convert (bitsizetype,
888 size_binop (PLUS_EXPR, room_st, voffset_st)),
889 bitsize_unit_node);
890
891 /* Craft the GCC record representation. We exceptionally do everything
892 manually here because 1) our generic circuitry is not quite ready to
893 handle the complex position/size expressions we are setting up, 2) we
894 have a strong simplifying factor at hand: we know the maximum possible
895 value of voffset, and 3) we have to set/reset at least the sizes in
896 accordance with this maximum value anyway, as we need them to convey
897 what should be "alloc"ated for this type.
898
899 Use -1 as the 'addressable' indication for the field to prevent the
900 creation of a bitfield. We don't need one, it would have damaging
901 consequences on the alignment computation, and create_field_decl would
902 make one without this special argument, for instance because of the
903 complex position expression. */
904 field = create_field_decl (get_identifier ("F"), type, record_type, size,
905 pos, 1, -1);
906 TYPE_FIELDS (record_type) = field;
907
fe37c7af 908 SET_TYPE_ALIGN (record_type, base_align);
842d4ee2
EB
909 TYPE_USER_ALIGN (record_type) = 1;
910
911 TYPE_SIZE (record_type)
912 = size_binop (PLUS_EXPR,
913 size_binop (MULT_EXPR, convert (bitsizetype, size),
914 bitsize_unit_node),
915 bitsize_int (align + room * BITS_PER_UNIT));
916 TYPE_SIZE_UNIT (record_type)
917 = size_binop (PLUS_EXPR, size,
918 size_int (room + align / BITS_PER_UNIT));
919
920 SET_TYPE_MODE (record_type, BLKmode);
921 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
922
923 /* Declare it now since it will never be declared otherwise. This is
924 necessary to ensure that its subtrees are properly marked. */
74746d49 925 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
926
927 return record_type;
928}
929
930/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
931 as the field type of a packed record if IN_RECORD is true, or as the
932 component type of a packed array if IN_RECORD is false. See if we can
b3f75672
EB
933 rewrite it either as a type that has non-BLKmode, which we can pack
934 tighter in the packed record case, or as a smaller type with at most
935 MAX_ALIGN alignment if the value is non-zero. If so, return the new
936 type; if not, return the original type. */
842d4ee2
EB
937
938tree
b3f75672 939make_packable_type (tree type, bool in_record, unsigned int max_align)
842d4ee2 940{
ae7e9ddd 941 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
842d4ee2 942 unsigned HOST_WIDE_INT new_size;
b3f75672
EB
943 unsigned int align = TYPE_ALIGN (type);
944 unsigned int new_align;
842d4ee2
EB
945
946 /* No point in doing anything if the size is zero. */
947 if (size == 0)
948 return type;
949
b3f75672 950 tree new_type = make_node (TREE_CODE (type));
842d4ee2
EB
951
952 /* Copy the name and flags from the old type to that of the new.
953 Note that we rely on the pointer equality created here for
954 TYPE_NAME to look through conversions in various places. */
955 TYPE_NAME (new_type) = TYPE_NAME (type);
956 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
957 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
ee45a32d 958 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
842d4ee2
EB
959 if (TREE_CODE (type) == RECORD_TYPE)
960 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
961
962 /* If we are in a record and have a small size, set the alignment to
963 try for an integral mode. Otherwise set it to try for a smaller
964 type with BLKmode. */
965 if (in_record && size <= MAX_FIXED_MODE_SIZE)
966 {
b3f75672
EB
967 new_size = ceil_pow2 (size);
968 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
969 SET_TYPE_ALIGN (new_type, new_align);
842d4ee2
EB
970 }
971 else
972 {
842d4ee2
EB
973 /* Do not try to shrink the size if the RM size is not constant. */
974 if (TYPE_CONTAINS_TEMPLATE_P (type)
cc269bb6 975 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
842d4ee2
EB
976 return type;
977
978 /* Round the RM size up to a unit boundary to get the minimal size
b3f75672
EB
979 for a BLKmode record. Give up if it's already the size and we
980 don't need to lower the alignment. */
eb1ce453 981 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
842d4ee2 982 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
b3f75672 983 if (new_size == size && (max_align == 0 || align <= max_align))
842d4ee2
EB
984 return type;
985
b3f75672
EB
986 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
987 if (max_align > 0 && new_align > max_align)
988 new_align = max_align;
989 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
842d4ee2
EB
990 }
991
992 TYPE_USER_ALIGN (new_type) = 1;
993
994 /* Now copy the fields, keeping the position and size as we don't want
995 to change the layout by propagating the packedness downwards. */
b3f75672
EB
996 tree new_field_list = NULL_TREE;
997 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
842d4ee2 998 {
b3f75672 999 tree new_field_type = TREE_TYPE (field);
842d4ee2
EB
1000 tree new_field, new_size;
1001
1002 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1003 && !TYPE_FAT_POINTER_P (new_field_type)
cc269bb6 1004 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
b3f75672 1005 new_field_type = make_packable_type (new_field_type, true, max_align);
842d4ee2
EB
1006
1007 /* However, for the last field in a not already packed record type
1008 that is of an aggregate type, we need to use the RM size in the
1009 packable version of the record type, see finish_record_type. */
b3f75672 1010 if (!DECL_CHAIN (field)
842d4ee2
EB
1011 && !TYPE_PACKED (type)
1012 && RECORD_OR_UNION_TYPE_P (new_field_type)
1013 && !TYPE_FAT_POINTER_P (new_field_type)
1014 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1015 && TYPE_ADA_SIZE (new_field_type))
1016 new_size = TYPE_ADA_SIZE (new_field_type);
1017 else
b3f75672 1018 new_size = DECL_SIZE (field);
842d4ee2
EB
1019
1020 new_field
b3f75672
EB
1021 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1022 new_size, bit_position (field),
842d4ee2 1023 TYPE_PACKED (type),
b3f75672 1024 !DECL_NONADDRESSABLE_P (field));
842d4ee2 1025
b3f75672
EB
1026 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1027 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
842d4ee2 1028 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
b3f75672 1029 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
842d4ee2 1030
b3f75672
EB
1031 DECL_CHAIN (new_field) = new_field_list;
1032 new_field_list = new_field;
842d4ee2
EB
1033 }
1034
b3f75672 1035 finish_record_type (new_type, nreverse (new_field_list), 2, false);
842d4ee2 1036 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
44e9e3ec
EB
1037 if (TYPE_STUB_DECL (type))
1038 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1039 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
842d4ee2
EB
1040
1041 /* If this is a padding record, we never want to make the size smaller
1042 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1043 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1044 {
1045 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1046 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1047 new_size = size;
1048 }
1049 else
1050 {
1051 TYPE_SIZE (new_type) = bitsize_int (new_size);
b3f75672 1052 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
842d4ee2
EB
1053 }
1054
1055 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1056 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1057
1058 compute_record_mode (new_type);
1059
1060 /* Try harder to get a packable type if necessary, for example
1061 in case the record itself contains a BLKmode field. */
1062 if (in_record && TYPE_MODE (new_type) == BLKmode)
1063 SET_TYPE_MODE (new_type,
1064 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1065
b3f75672
EB
1066 /* If neither mode nor size nor alignment shrunk, return the old type. */
1067 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
842d4ee2
EB
1068 return type;
1069
1070 return new_type;
1071}
1072
1073/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1074 If TYPE is the best type, return it. Otherwise, make a new type. We
1075 only support new integral and pointer types. FOR_BIASED is true if
1076 we are making a biased type. */
1077
1078tree
1079make_type_from_size (tree type, tree size_tree, bool for_biased)
1080{
1081 unsigned HOST_WIDE_INT size;
1082 bool biased_p;
1083 tree new_type;
1084
1085 /* If size indicates an error, just return TYPE to avoid propagating
1086 the error. Likewise if it's too large to represent. */
cc269bb6 1087 if (!size_tree || !tree_fits_uhwi_p (size_tree))
842d4ee2
EB
1088 return type;
1089
ae7e9ddd 1090 size = tree_to_uhwi (size_tree);
842d4ee2
EB
1091
1092 switch (TREE_CODE (type))
1093 {
1094 case INTEGER_TYPE:
1095 case ENUMERAL_TYPE:
1096 case BOOLEAN_TYPE:
1097 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1098 && TYPE_BIASED_REPRESENTATION_P (type));
1099
1100 /* Integer types with precision 0 are forbidden. */
1101 if (size == 0)
1102 size = 1;
1103
1104 /* Only do something if the type isn't a packed array type and doesn't
1105 already have the proper size and the size isn't too large. */
1106 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1107 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1108 || size > LONG_LONG_TYPE_SIZE)
1109 break;
1110
1111 biased_p |= for_biased;
55c8849f
EB
1112
1113 /* The type should be an unsigned type if the original type is unsigned
1114 or if the lower bound is constant and non-negative or if the type is
1115 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1116 if (TYPE_UNSIGNED (type)
1117 || (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1118 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1119 || biased_p)
842d4ee2
EB
1120 new_type = make_unsigned_type (size);
1121 else
1122 new_type = make_signed_type (size);
1123 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1eb58520
AC
1124 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1125 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
842d4ee2
EB
1126 /* Copy the name to show that it's essentially the same type and
1127 not a subrange type. */
1128 TYPE_NAME (new_type) = TYPE_NAME (type);
1129 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1130 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1131 return new_type;
1132
1133 case RECORD_TYPE:
1134 /* Do something if this is a fat pointer, in which case we
1135 may need to return the thin pointer. */
1136 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1137 {
ef4bddc2 1138 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
842d4ee2
EB
1139 if (!targetm.valid_pointer_mode (p_mode))
1140 p_mode = ptr_mode;
1141 return
1142 build_pointer_type_for_mode
1143 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1144 p_mode, 0);
1145 }
1146 break;
1147
1148 case POINTER_TYPE:
1149 /* Only do something if this is a thin pointer, in which case we
1150 may need to return the fat pointer. */
1151 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1152 return
1153 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1154 break;
1155
1156 default:
1157 break;
1158 }
1159
1160 return type;
1161}
1162
1163/* See if the data pointed to by the hash table slot is marked. */
1164
08ec2754
RS
1165int
1166pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
842d4ee2 1167{
08ec2754 1168 return ggc_marked_p (t->type);
842d4ee2
EB
1169}
1170
d242408f 1171/* Return true iff the padded types are equivalent. */
842d4ee2 1172
d242408f
TS
1173bool
1174pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
842d4ee2 1175{
842d4ee2
EB
1176 tree type1, type2;
1177
1178 if (t1->hash != t2->hash)
1179 return 0;
1180
1181 type1 = t1->type;
1182 type2 = t2->type;
1183
ee45a32d
EB
1184 /* We consider that the padded types are equivalent if they pad the same type
1185 and have the same size, alignment, RM size and storage order. Taking the
1186 mode into account is redundant since it is determined by the others. */
842d4ee2
EB
1187 return
1188 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1189 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1190 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
ee45a32d
EB
1191 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1192 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
842d4ee2
EB
1193}
1194
5cb7516d
EB
1195/* Look up the padded TYPE in the hash table and return its canonical version
1196 if it exists; otherwise, insert it into the hash table. */
1197
1198static tree
1199lookup_and_insert_pad_type (tree type)
1200{
1201 hashval_t hashcode;
1202 struct pad_type_hash in, *h;
5cb7516d
EB
1203
1204 hashcode
1205 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1206 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1207 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1208 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1209
1210 in.hash = hashcode;
1211 in.type = type;
d242408f 1212 h = pad_type_hash_table->find_with_hash (&in, hashcode);
5cb7516d
EB
1213 if (h)
1214 return h->type;
1215
1216 h = ggc_alloc<pad_type_hash> ();
1217 h->hash = hashcode;
1218 h->type = type;
d242408f 1219 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
5cb7516d
EB
1220 return NULL_TREE;
1221}
1222
842d4ee2 1223/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
5cb7516d 1224 if needed. We have already verified that SIZE and ALIGN are large enough.
842d4ee2
EB
1225 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1226 IS_COMPONENT_TYPE is true if this is being done for the component type of
1227 an array. IS_USER_TYPE is true if the original type needs to be completed.
1228 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
afc737f0
EB
1229 the RM size of the resulting type is to be set to SIZE too; in this case,
1230 the padded type is canonicalized before being returned. */
842d4ee2
EB
1231
1232tree
1233maybe_pad_type (tree type, tree size, unsigned int align,
1234 Entity_Id gnat_entity, bool is_component_type,
1235 bool is_user_type, bool definition, bool set_rm_size)
1236{
1237 tree orig_size = TYPE_SIZE (type);
44e9e3ec 1238 unsigned int orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1239 tree record, field;
1240
1241 /* If TYPE is a padded type, see if it agrees with any size and alignment
1242 we were given. If so, return the original type. Otherwise, strip
1243 off the padding, since we will either be returning the inner type
1244 or repadding it. If no size or alignment is specified, use that of
1245 the original padded type. */
1246 if (TYPE_IS_PADDING_P (type))
1247 {
1248 if ((!size
44e9e3ec
EB
1249 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1250 && (align == 0 || align == orig_align))
842d4ee2
EB
1251 return type;
1252
1253 if (!size)
44e9e3ec 1254 size = orig_size;
842d4ee2 1255 if (align == 0)
44e9e3ec 1256 align = orig_align;
842d4ee2
EB
1257
1258 type = TREE_TYPE (TYPE_FIELDS (type));
1259 orig_size = TYPE_SIZE (type);
44e9e3ec 1260 orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1261 }
1262
1263 /* If the size is either not being changed or is being made smaller (which
1264 is not done here and is only valid for bitfields anyway), show the size
1265 isn't changing. Likewise, clear the alignment if it isn't being
1266 changed. Then return if we aren't doing anything. */
1267 if (size
1268 && (operand_equal_p (size, orig_size, 0)
1269 || (TREE_CODE (orig_size) == INTEGER_CST
1270 && tree_int_cst_lt (size, orig_size))))
1271 size = NULL_TREE;
1272
44e9e3ec 1273 if (align == orig_align)
842d4ee2
EB
1274 align = 0;
1275
1276 if (align == 0 && !size)
1277 return type;
1278
1279 /* If requested, complete the original type and give it a name. */
1280 if (is_user_type)
1281 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1282 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1283 !(TYPE_NAME (type)
1284 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1285 && DECL_IGNORED_P (TYPE_NAME (type))),
1286 gnat_entity);
1287
1288 /* We used to modify the record in place in some cases, but that could
1289 generate incorrect debugging information. So make a new record
1290 type and name. */
1291 record = make_node (RECORD_TYPE);
1292 TYPE_PADDING_P (record) = 1;
1293
7c775aca 1294 /* ??? Padding types around packed array implementation types will be
2d595887
PMR
1295 considered as root types in the array descriptor language hook (see
1296 gnat_get_array_descr_info). Give them the original packed array type
1297 name so that the one coming from sources appears in the debugging
1298 information. */
7c775aca
EB
1299 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1300 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1301 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1302 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
2d595887 1303 else if (Present (gnat_entity))
842d4ee2
EB
1304 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1305
fe37c7af 1306 SET_TYPE_ALIGN (record, align ? align : orig_align);
842d4ee2
EB
1307 TYPE_SIZE (record) = size ? size : orig_size;
1308 TYPE_SIZE_UNIT (record)
1309 = convert (sizetype,
1310 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1311 bitsize_unit_node));
1312
1313 /* If we are changing the alignment and the input type is a record with
1314 BLKmode and a small constant size, try to make a form that has an
1315 integral mode. This might allow the padding record to also have an
1316 integral mode, which will be much more efficient. There is no point
1317 in doing so if a size is specified unless it is also a small constant
1318 size and it is incorrect to do so if we cannot guarantee that the mode
1319 will be naturally aligned since the field must always be addressable.
1320
1321 ??? This might not always be a win when done for a stand-alone object:
1322 since the nominal and the effective type of the object will now have
1323 different modes, a VIEW_CONVERT_EXPR will be required for converting
1324 between them and it might be hard to overcome afterwards, including
1325 at the RTL level when the stand-alone object is accessed as a whole. */
1326 if (align != 0
1327 && RECORD_OR_UNION_TYPE_P (type)
1328 && TYPE_MODE (type) == BLKmode
1329 && !TYPE_BY_REFERENCE_P (type)
1330 && TREE_CODE (orig_size) == INTEGER_CST
1331 && !TREE_OVERFLOW (orig_size)
1332 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1333 && (!size
1334 || (TREE_CODE (size) == INTEGER_CST
1335 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1336 {
1337 tree packable_type = make_packable_type (type, true);
1338 if (TYPE_MODE (packable_type) != BLKmode
1339 && align >= TYPE_ALIGN (packable_type))
1340 type = packable_type;
1341 }
1342
1343 /* Now create the field with the original size. */
1366ba41
EB
1344 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1345 bitsize_zero_node, 0, 1);
842d4ee2
EB
1346 DECL_INTERNAL_P (field) = 1;
1347
afc737f0 1348 /* We will output additional debug info manually below. */
842d4ee2
EB
1349 finish_record_type (record, field, 1, false);
1350
afc737f0
EB
1351 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1352 SET_TYPE_DEBUG_TYPE (record, type);
1353
842d4ee2
EB
1354 /* Set the RM size if requested. */
1355 if (set_rm_size)
1356 {
5cb7516d
EB
1357 tree canonical_pad_type;
1358
842d4ee2
EB
1359 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1360
1361 /* If the padded type is complete and has constant size, we canonicalize
1362 it by means of the hash table. This is consistent with the language
1363 semantics and ensures that gigi and the middle-end have a common view
1364 of these padded types. */
5cb7516d
EB
1365 if (TREE_CONSTANT (TYPE_SIZE (record))
1366 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
842d4ee2 1367 {
5cb7516d
EB
1368 record = canonical_pad_type;
1369 goto built;
842d4ee2
EB
1370 }
1371 }
1372
1373 /* Unless debugging information isn't being written for the input type,
1374 write a record that shows what we are a subtype of and also make a
eb59e428
PMR
1375 variable that indicates our size, if still variable. */
1376 if (TREE_CODE (orig_size) != INTEGER_CST
842d4ee2
EB
1377 && TYPE_NAME (record)
1378 && TYPE_NAME (type)
1379 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1380 && DECL_IGNORED_P (TYPE_NAME (type))))
1381 {
9dba4b55 1382 tree name = TYPE_IDENTIFIER (record);
c1a569ef
EB
1383 tree size_unit = TYPE_SIZE_UNIT (record);
1384
1385 /* A variable that holds the size is required even with no encoding since
1386 it will be referenced by debugging information attributes. At global
1387 level, we need a single variable across all translation units. */
1388 if (size
1389 && TREE_CODE (size) != INTEGER_CST
1390 && (definition || global_bindings_p ()))
1391 {
eb59e428
PMR
1392 /* Whether or not gnat_entity comes from source, this XVZ variable is
1393 is a compilation artifact. */
c1a569ef
EB
1394 size_unit
1395 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1396 size_unit, true, global_bindings_p (),
1397 !definition && global_bindings_p (), false,
2056c5ed 1398 false, true, true, NULL, gnat_entity);
c1a569ef
EB
1399 TYPE_SIZE_UNIT (record) = size_unit;
1400 }
1401
eb59e428
PMR
1402 /* There is no need to show what we are a subtype of when outputting as
1403 few encodings as possible: regular debugging infomation makes this
1404 redundant. */
1405 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1406 {
1407 tree marker = make_node (RECORD_TYPE);
1408 tree orig_name = TYPE_IDENTIFIER (type);
1409
1410 TYPE_NAME (marker) = concat_name (name, "XVS");
1411 finish_record_type (marker,
1412 create_field_decl (orig_name,
1413 build_reference_type (type),
1414 marker, NULL_TREE, NULL_TREE,
1415 0, 0),
1416 0, true);
1417 TYPE_SIZE_UNIT (marker) = size_unit;
1418
1419 add_parallel_type (record, marker);
1420 }
842d4ee2
EB
1421 }
1422
842d4ee2 1423built:
80746f5d 1424 /* If a simple size was explicitly given, maybe issue a warning. */
842d4ee2
EB
1425 if (!size
1426 || TREE_CODE (size) == COND_EXPR
1427 || TREE_CODE (size) == MAX_EXPR
80746f5d 1428 || No (gnat_entity))
842d4ee2
EB
1429 return record;
1430
80746f5d
EB
1431 /* But don't do it if we are just annotating types and the type is tagged or
1432 concurrent, since these types aren't fully laid out in this mode. */
1433 if (type_annotate_only)
1434 {
1435 Entity_Id gnat_type
1436 = is_component_type
1437 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1438
1439 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1440 return record;
1441 }
1442
1443 /* Take the original size as the maximum size of the input if there was an
1444 unconstrained record involved and round it up to the specified alignment,
1445 if one was specified, but only for aggregate types. */
842d4ee2
EB
1446 if (CONTAINS_PLACEHOLDER_P (orig_size))
1447 orig_size = max_size (orig_size, true);
1448
f42dd37f 1449 if (align && AGGREGATE_TYPE_P (type))
842d4ee2
EB
1450 orig_size = round_up (orig_size, align);
1451
1452 if (!operand_equal_p (size, orig_size, 0)
1453 && !(TREE_CODE (size) == INTEGER_CST
1454 && TREE_CODE (orig_size) == INTEGER_CST
1455 && (TREE_OVERFLOW (size)
1456 || TREE_OVERFLOW (orig_size)
1457 || tree_int_cst_lt (size, orig_size))))
1458 {
1459 Node_Id gnat_error_node = Empty;
1460
1a4cb227
AC
1461 /* For a packed array, post the message on the original array type. */
1462 if (Is_Packed_Array_Impl_Type (gnat_entity))
842d4ee2
EB
1463 gnat_entity = Original_Array_Type (gnat_entity);
1464
1465 if ((Ekind (gnat_entity) == E_Component
1466 || Ekind (gnat_entity) == E_Discriminant)
1467 && Present (Component_Clause (gnat_entity)))
1468 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1469 else if (Present (Size_Clause (gnat_entity)))
1470 gnat_error_node = Expression (Size_Clause (gnat_entity));
1471
1472 /* Generate message only for entities that come from source, since
1473 if we have an entity created by expansion, the message will be
1474 generated for some other corresponding source entity. */
1475 if (Comes_From_Source (gnat_entity))
1476 {
1477 if (Present (gnat_error_node))
1478 post_error_ne_tree ("{^ }bits of & unused?",
1479 gnat_error_node, gnat_entity,
1480 size_diffop (size, orig_size));
1481 else if (is_component_type)
1482 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1483 gnat_entity, gnat_entity,
1484 size_diffop (size, orig_size));
1485 }
1486 }
1487
1488 return record;
1489}
ee45a32d
EB
1490
1491/* Return a copy of the padded TYPE but with reverse storage order. */
1492
1493tree
1494set_reverse_storage_order_on_pad_type (tree type)
1495{
1496 tree field, canonical_pad_type;
1497
4232ebbb
ML
1498 if (flag_checking)
1499 {
1500 /* If the inner type is not scalar then the function does nothing. */
1501 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1502 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1503 && !VECTOR_TYPE_P (inner_type));
1504 }
ee45a32d
EB
1505
1506 /* This is required for the canonicalization. */
1507 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1508
1509 field = copy_node (TYPE_FIELDS (type));
1510 type = copy_type (type);
1511 DECL_CONTEXT (field) = type;
1512 TYPE_FIELDS (type) = field;
1513 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1514 canonical_pad_type = lookup_and_insert_pad_type (type);
1515 return canonical_pad_type ? canonical_pad_type : type;
1516}
842d4ee2
EB
1517\f
1518/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1519 If this is a multi-dimensional array type, do this recursively.
1520
1521 OP may be
1522 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1523 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1524 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1525
1526void
1527relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1528{
1529 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1530 of a one-dimensional array, since the padding has the same alias set
1531 as the field type, but if it's a multi-dimensional array, we need to
1532 see the inner types. */
1533 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1534 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1535 || TYPE_PADDING_P (gnu_old_type)))
1536 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1537
1538 /* Unconstrained array types are deemed incomplete and would thus be given
1539 alias set 0. Retrieve the underlying array type. */
1540 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1541 gnu_old_type
1542 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1543 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1544 gnu_new_type
1545 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1546
1547 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1548 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1549 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1550 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1551
1552 switch (op)
1553 {
1554 case ALIAS_SET_COPY:
1555 /* The alias set shouldn't be copied between array types with different
1556 aliasing settings because this can break the aliasing relationship
1557 between the array type and its element type. */
9abe8b74 1558 if (flag_checking || flag_strict_aliasing)
842d4ee2
EB
1559 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1560 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1561 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1562 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1563
1564 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1565 break;
1566
1567 case ALIAS_SET_SUBSET:
1568 case ALIAS_SET_SUPERSET:
1569 {
1570 alias_set_type old_set = get_alias_set (gnu_old_type);
1571 alias_set_type new_set = get_alias_set (gnu_new_type);
1572
1573 /* Do nothing if the alias sets conflict. This ensures that we
1574 never call record_alias_subset several times for the same pair
1575 or at all for alias set 0. */
1576 if (!alias_sets_conflict_p (old_set, new_set))
1577 {
1578 if (op == ALIAS_SET_SUBSET)
1579 record_alias_subset (old_set, new_set);
1580 else
1581 record_alias_subset (new_set, old_set);
1582 }
1583 }
1584 break;
1585
1586 default:
1587 gcc_unreachable ();
1588 }
1589
1590 record_component_aliases (gnu_new_type);
1591}
1592\f
1aeb40dd 1593/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
c1a569ef 1594 ARTIFICIAL_P is true if the type was generated by the compiler. */
a1ab4c31
AC
1595
1596void
1aeb40dd 1597record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1598{
c172df28
AH
1599 tree type_decl = build_decl (input_location,
1600 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1601 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1602 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1603 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1604
10069d53
EB
1605 if (debug_hooks->type_decl)
1606 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1607}
1608\f
825da0d2
EB
1609/* Finish constructing the character type CHAR_TYPE.
1610
1611 In Ada character types are enumeration types and, as a consequence, are
1612 represented in the front-end by integral types holding the positions of
1613 the enumeration values as defined by the language, which means that the
1614 integral types are unsigned.
1615
1616 Unfortunately the signedness of 'char' in C is implementation-defined
1617 and GCC even has the option -fsigned-char to toggle it at run time.
1618 Since GNAT's philosophy is to be compatible with C by default, to wit
1619 Interfaces.C.char is defined as a mere copy of Character, we may need
1620 to declare character types as signed types in GENERIC and generate the
1621 necessary adjustments to make them behave as unsigned types.
1622
1623 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1624 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1625 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1626 types. The idea is to ensure that the bit pattern contained in the
1627 Esize'd objects is not changed, even though the numerical value will
1628 be interpreted differently depending on the signedness.
1629
1630 For character types, the bounds are implicit and, therefore, need to
1631 be adjusted. Morever, the debug info needs the unsigned version. */
1632
1633void
1634finish_character_type (tree char_type)
1635{
1636 if (TYPE_UNSIGNED (char_type))
1637 return;
1638
7005800c
EB
1639 /* Make a copy of a generic unsigned version since we'll modify it. */
1640 tree unsigned_char_type
1641 = (char_type == char_type_node
1642 ? unsigned_char_type_node
1643 : copy_type (gnat_unsigned_type_for (char_type)));
825da0d2
EB
1644
1645 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1646 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1647 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1648
1649 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1650 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1651 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1652}
1653
e3edbd56
EB
1654/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1655 finish constructing the record type as a fat pointer type. */
1656
1657void
1658finish_fat_pointer_type (tree record_type, tree field_list)
1659{
1660 /* Make sure we can put it into a register. */
5da8c011 1661 if (STRICT_ALIGNMENT)
fe37c7af 1662 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
e3edbd56
EB
1663
1664 /* Show what it really is. */
1665 TYPE_FAT_POINTER_P (record_type) = 1;
1666
1667 /* Do not emit debug info for it since the types of its fields may still be
1668 incomplete at this point. */
1669 finish_record_type (record_type, field_list, 0, false);
1670
1671 /* Force type_contains_placeholder_p to return true on it. Although the
1672 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1673 type but the representation of the unconstrained array. */
1674 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1675}
1676
032d1b71 1677/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1678 finish constructing the record or union type. If REP_LEVEL is zero, this
1679 record has no representation clause and so will be entirely laid out here.
1680 If REP_LEVEL is one, this record has a representation clause and has been
1681 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1682 this record is derived from a parent record and thus inherits its layout;
032d1b71 1683 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
afc737f0 1684 additional debug info needs to be output for this type. */
a1ab4c31
AC
1685
1686void
032d1b71
EB
1687finish_record_type (tree record_type, tree field_list, int rep_level,
1688 bool debug_info_p)
a1ab4c31
AC
1689{
1690 enum tree_code code = TREE_CODE (record_type);
9dba4b55 1691 tree name = TYPE_IDENTIFIER (record_type);
a1ab4c31
AC
1692 tree ada_size = bitsize_zero_node;
1693 tree size = bitsize_zero_node;
1694 bool had_size = TYPE_SIZE (record_type) != 0;
1695 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1696 bool had_align = TYPE_ALIGN (record_type) != 0;
1697 tree field;
1698
032d1b71 1699 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1700
10069d53
EB
1701 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1702 generate debug info and have a parallel type. */
10069d53 1703 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1704
1705 /* Globally initialize the record first. If this is a rep'ed record,
1706 that just means some initializations; otherwise, layout the record. */
1707 if (rep_level > 0)
1708 {
fe37c7af
MM
1709 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1710 TYPE_ALIGN (record_type)));
a1ab4c31
AC
1711
1712 if (!had_size_unit)
1713 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1714
a1ab4c31
AC
1715 if (!had_size)
1716 TYPE_SIZE (record_type) = bitsize_zero_node;
1717
1718 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1719 out just like a UNION_TYPE, since the size will be fixed. */
1720 else if (code == QUAL_UNION_TYPE)
1721 code = UNION_TYPE;
1722 }
1723 else
1724 {
1725 /* Ensure there isn't a size already set. There can be in an error
1726 case where there is a rep clause but all fields have errors and
1727 no longer have a position. */
1728 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1729
1730 /* Ensure we use the traditional GCC layout for bitfields when we need
1731 to pack the record type or have a representation clause. The other
1732 possible layout (Microsoft C compiler), if available, would prevent
1733 efficient packing in almost all cases. */
1734#ifdef TARGET_MS_BITFIELD_LAYOUT
1735 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1736 decl_attributes (&record_type,
1737 tree_cons (get_identifier ("gcc_struct"),
1738 NULL_TREE, NULL_TREE),
1739 ATTR_FLAG_TYPE_IN_PLACE);
1740#endif
1741
a1ab4c31
AC
1742 layout_type (record_type);
1743 }
1744
1745 /* At this point, the position and size of each field is known. It was
1746 either set before entry by a rep clause, or by laying out the type above.
1747
1748 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1749 to compute the Ada size; the GCC size and alignment (for rep'ed records
1750 that are not padding types); and the mode (for rep'ed records). We also
1751 clear the DECL_BIT_FIELD indication for the cases we know have not been
1752 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1753
1754 if (code == QUAL_UNION_TYPE)
032d1b71 1755 field_list = nreverse (field_list);
a1ab4c31 1756
910ad8de 1757 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1758 {
1759 tree type = TREE_TYPE (field);
1760 tree pos = bit_position (field);
1761 tree this_size = DECL_SIZE (field);
1762 tree this_ada_size;
1763
e1e5852c 1764 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1765 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1766 && !TYPE_CONTAINS_TEMPLATE_P (type)
1767 && TYPE_ADA_SIZE (type))
1768 this_ada_size = TYPE_ADA_SIZE (type);
1769 else
1770 this_ada_size = this_size;
1771
1772 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1773 if (DECL_BIT_FIELD (field)
1774 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1775 {
1776 unsigned int align = TYPE_ALIGN (type);
1777
1778 /* In the general case, type alignment is required. */
1779 if (value_factor_p (pos, align))
1780 {
1781 /* The enclosing record type must be sufficiently aligned.
1782 Otherwise, if no alignment was specified for it and it
1783 has been laid out already, bump its alignment to the
14ecca2e
EB
1784 desired one if this is compatible with its size and
1785 maximum alignment, if any. */
a1ab4c31
AC
1786 if (TYPE_ALIGN (record_type) >= align)
1787 {
fe37c7af 1788 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
a1ab4c31
AC
1789 DECL_BIT_FIELD (field) = 0;
1790 }
1791 else if (!had_align
1792 && rep_level == 0
14ecca2e
EB
1793 && value_factor_p (TYPE_SIZE (record_type), align)
1794 && (!TYPE_MAX_ALIGN (record_type)
1795 || TYPE_MAX_ALIGN (record_type) >= align))
a1ab4c31 1796 {
fe37c7af
MM
1797 SET_TYPE_ALIGN (record_type, align);
1798 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
a1ab4c31
AC
1799 DECL_BIT_FIELD (field) = 0;
1800 }
1801 }
1802
1803 /* In the non-strict alignment case, only byte alignment is. */
1804 if (!STRICT_ALIGNMENT
1805 && DECL_BIT_FIELD (field)
1806 && value_factor_p (pos, BITS_PER_UNIT))
1807 DECL_BIT_FIELD (field) = 0;
1808 }
1809
c1abd261
EB
1810 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1811 field is technically not addressable. Except that it can actually
1812 be addressed if it is BLKmode and happens to be properly aligned. */
1813 if (DECL_BIT_FIELD (field)
1814 && !(DECL_MODE (field) == BLKmode
1815 && value_factor_p (pos, BITS_PER_UNIT)))
1816 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1817
1818 /* A type must be as aligned as its most aligned field that is not
1819 a bit-field. But this is already enforced by layout_type. */
1820 if (rep_level > 0 && !DECL_BIT_FIELD (field))
fe37c7af
MM
1821 SET_TYPE_ALIGN (record_type,
1822 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
a1ab4c31
AC
1823
1824 switch (code)
1825 {
1826 case UNION_TYPE:
1827 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1828 size = size_binop (MAX_EXPR, size, this_size);
1829 break;
1830
1831 case QUAL_UNION_TYPE:
1832 ada_size
1833 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1834 this_ada_size, ada_size);
1835 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1836 this_size, size);
1837 break;
1838
1839 case RECORD_TYPE:
1840 /* Since we know here that all fields are sorted in order of
1841 increasing bit position, the size of the record is one
1842 higher than the ending bit of the last field processed
1843 unless we have a rep clause, since in that case we might
1844 have a field outside a QUAL_UNION_TYPE that has a higher ending
1845 position. So use a MAX in that case. Also, if this field is a
1846 QUAL_UNION_TYPE, we need to take into account the previous size in
1847 the case of empty variants. */
1848 ada_size
1849 = merge_sizes (ada_size, pos, this_ada_size,
1850 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1851 size
1852 = merge_sizes (size, pos, this_size,
1853 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1854 break;
1855
1856 default:
1857 gcc_unreachable ();
1858 }
1859 }
1860
1861 if (code == QUAL_UNION_TYPE)
032d1b71 1862 nreverse (field_list);
a1ab4c31
AC
1863
1864 if (rep_level < 2)
1865 {
1866 /* If this is a padding record, we never want to make the size smaller
1867 than what was specified in it, if any. */
315cff15 1868 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1869 size = TYPE_SIZE (record_type);
1870
1871 /* Now set any of the values we've just computed that apply. */
315cff15 1872 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1873 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1874 SET_TYPE_ADA_SIZE (record_type, ada_size);
1875
1876 if (rep_level > 0)
1877 {
1878 tree size_unit = had_size_unit
1879 ? TYPE_SIZE_UNIT (record_type)
1880 : convert (sizetype,
1881 size_binop (CEIL_DIV_EXPR, size,
1882 bitsize_unit_node));
1883 unsigned int align = TYPE_ALIGN (record_type);
1884
1885 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1886 TYPE_SIZE_UNIT (record_type)
1887 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1888
1889 compute_record_mode (record_type);
1890 }
1891 }
1892
14ecca2e
EB
1893 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1894 TYPE_MAX_ALIGN (record_type) = 0;
1895
032d1b71 1896 if (debug_info_p)
a1ab4c31
AC
1897 rest_of_record_type_compilation (record_type);
1898}
1899
24d4b3d5
AC
1900/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1901 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1902 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1903 moment TYPE will get a context. */
a5695aa2
EB
1904
1905void
1906add_parallel_type (tree type, tree parallel_type)
1907{
1908 tree decl = TYPE_STUB_DECL (type);
1909
1910 while (DECL_PARALLEL_TYPE (decl))
1911 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1912
1913 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
24d4b3d5
AC
1914
1915 /* If PARALLEL_TYPE already has a context, we are done. */
7c775aca 1916 if (TYPE_CONTEXT (parallel_type))
24d4b3d5
AC
1917 return;
1918
7c775aca
EB
1919 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1920 it to PARALLEL_TYPE. */
1921 if (TYPE_CONTEXT (type))
24d4b3d5
AC
1922 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1923
7c775aca
EB
1924 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1925 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1926 so we have nothing to do in this case. */
a5695aa2
EB
1927}
1928
1929/* Return true if TYPE has a parallel type. */
1930
1931static bool
1932has_parallel_type (tree type)
1933{
1934 tree decl = TYPE_STUB_DECL (type);
1935
1936 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1937}
1938
afc737f0
EB
1939/* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1940 associated with it. It need not be invoked directly in most cases as
1941 finish_record_type takes care of doing so. */
a1ab4c31
AC
1942
1943void
1944rest_of_record_type_compilation (tree record_type)
1945{
a1ab4c31 1946 bool var_size = false;
fb88e1dd 1947 tree field;
a1ab4c31 1948
fb88e1dd
EB
1949 /* If this is a padded type, the bulk of the debug info has already been
1950 generated for the field's type. */
1951 if (TYPE_IS_PADDING_P (record_type))
1952 return;
1953
a5695aa2
EB
1954 /* If the type already has a parallel type (XVS type), then we're done. */
1955 if (has_parallel_type (record_type))
1956 return;
1957
fb88e1dd 1958 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1959 {
1960 /* We need to make an XVE/XVU record if any field has variable size,
1961 whether or not the record does. For example, if we have a union,
1962 it may be that all fields, rounded up to the alignment, have the
1963 same size, in which case we'll use that size. But the debug
1964 output routines (except Dwarf2) won't be able to output the fields,
1965 so we need to make the special record. */
1966 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1967 /* If a field has a non-constant qualifier, the record will have
1968 variable size too. */
fb88e1dd 1969 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1970 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1971 {
1972 var_size = true;
1973 break;
1974 }
1975 }
1976
fb88e1dd
EB
1977 /* If this record type is of variable size, make a parallel record type that
1978 will tell the debugger how the former is laid out (see exp_dbug.ads). */
986ccd21 1979 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
a1ab4c31
AC
1980 {
1981 tree new_record_type
1982 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1983 ? UNION_TYPE : TREE_CODE (record_type));
9dba4b55 1984 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
a1ab4c31 1985 tree last_pos = bitsize_zero_node;
0fb2335d 1986 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1987
0fb2335d
EB
1988 new_name
1989 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1990 ? "XVU" : "XVE");
1991 TYPE_NAME (new_record_type) = new_name;
fe37c7af 1992 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
a1ab4c31 1993 TYPE_STUB_DECL (new_record_type)
0fb2335d 1994 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1995 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1996 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
396a2ee2 1997 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
a1ab4c31
AC
1998 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1999 TYPE_SIZE_UNIT (new_record_type)
2000 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2001
5c475ba9
EB
2002 /* Now scan all the fields, replacing each field with a new field
2003 corresponding to the new encoding. */
a1ab4c31 2004 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 2005 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
2006 {
2007 tree field_type = TREE_TYPE (old_field);
2008 tree field_name = DECL_NAME (old_field);
a1ab4c31 2009 tree curpos = bit_position (old_field);
5c475ba9 2010 tree pos, new_field;
a1ab4c31
AC
2011 bool var = false;
2012 unsigned int align = 0;
a1ab4c31 2013
5c475ba9
EB
2014 /* We're going to do some pattern matching below so remove as many
2015 conversions as possible. */
2016 curpos = remove_conversions (curpos, true);
a1ab4c31 2017
5c475ba9 2018 /* See how the position was modified from the last position.
a1ab4c31 2019
5c475ba9
EB
2020 There are two basic cases we support: a value was added
2021 to the last position or the last position was rounded to
2022 a boundary and they something was added. Check for the
2023 first case first. If not, see if there is any evidence
2024 of rounding. If so, round the last position and retry.
a1ab4c31 2025
5c475ba9 2026 If this is a union, the position can be taken as zero. */
a1ab4c31 2027 if (TREE_CODE (new_record_type) == UNION_TYPE)
5c475ba9 2028 pos = bitsize_zero_node;
a1ab4c31
AC
2029 else
2030 pos = compute_related_constant (curpos, last_pos);
2031
5c475ba9
EB
2032 if (!pos
2033 && TREE_CODE (curpos) == MULT_EXPR
cc269bb6 2034 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
a1ab4c31
AC
2035 {
2036 tree offset = TREE_OPERAND (curpos, 0);
ae7e9ddd 2037 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
5c475ba9
EB
2038 align = scale_by_factor_of (offset, align);
2039 last_pos = round_up (last_pos, align);
2040 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 2041 }
5c475ba9
EB
2042 else if (!pos
2043 && TREE_CODE (curpos) == PLUS_EXPR
cc269bb6 2044 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
a1ab4c31 2045 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
5a36c51b
RS
2046 && tree_fits_uhwi_p
2047 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
a1ab4c31 2048 {
5c475ba9
EB
2049 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2050 unsigned HOST_WIDE_INT addend
ae7e9ddd 2051 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
a1ab4c31 2052 align
ae7e9ddd 2053 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
5c475ba9
EB
2054 align = scale_by_factor_of (offset, align);
2055 align = MIN (align, addend & -addend);
2056 last_pos = round_up (last_pos, align);
2057 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 2058 }
5c475ba9 2059 else if (potential_alignment_gap (prev_old_field, old_field, pos))
a1ab4c31
AC
2060 {
2061 align = TYPE_ALIGN (field_type);
5c475ba9
EB
2062 last_pos = round_up (last_pos, align);
2063 pos = compute_related_constant (curpos, last_pos);
a1ab4c31
AC
2064 }
2065
2066 /* If we can't compute a position, set it to zero.
2067
5c475ba9
EB
2068 ??? We really should abort here, but it's too much work
2069 to get this correct for all cases. */
a1ab4c31
AC
2070 if (!pos)
2071 pos = bitsize_zero_node;
2072
2073 /* See if this type is variable-sized and make a pointer type
2074 and indicate the indirection if so. Beware that the debug
2075 back-end may adjust the position computed above according
2076 to the alignment of the field type, i.e. the pointer type
2077 in this case, if we don't preventively counter that. */
2078 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2079 {
2080 field_type = build_pointer_type (field_type);
2081 if (align != 0 && TYPE_ALIGN (field_type) > align)
2082 {
afc737f0 2083 field_type = copy_type (field_type);
fe37c7af 2084 SET_TYPE_ALIGN (field_type, align);
a1ab4c31
AC
2085 }
2086 var = true;
2087 }
2088
2089 /* Make a new field name, if necessary. */
2090 if (var || align != 0)
2091 {
2092 char suffix[16];
2093
2094 if (align != 0)
2095 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2096 align / BITS_PER_UNIT);
2097 else
2098 strcpy (suffix, "XVL");
2099
0fb2335d 2100 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
2101 }
2102
da01bfee
EB
2103 new_field
2104 = create_field_decl (field_name, field_type, new_record_type,
2105 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 2106 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
2107 TYPE_FIELDS (new_record_type) = new_field;
2108
2109 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2110 zero. The only time it's not the last field of the record
2111 is when there are other components at fixed positions after
2112 it (meaning there was a rep clause for every field) and we
2113 want to be able to encode them. */
2114 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2115 (TREE_CODE (TREE_TYPE (old_field))
2116 == QUAL_UNION_TYPE)
2117 ? bitsize_zero_node
2118 : DECL_SIZE (old_field));
2119 prev_old_field = old_field;
2120 }
2121
fb88e1dd 2122 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 2123
a5695aa2 2124 add_parallel_type (record_type, new_record_type);
a1ab4c31 2125 }
a1ab4c31
AC
2126}
2127
a1ab4c31 2128/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
2129 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2130 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2131 replace a value of zero with the old size. If HAS_REP is true, we take the
2132 MAX of the end position of this field with LAST_SIZE. In all other cases,
2133 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
2134
2135static tree
2136merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2137 bool has_rep)
2138{
2139 tree type = TREE_TYPE (last_size);
c6bd4220 2140 tree new_size;
a1ab4c31
AC
2141
2142 if (!special || TREE_CODE (size) != COND_EXPR)
2143 {
c6bd4220 2144 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 2145 if (has_rep)
c6bd4220 2146 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
2147 }
2148
2149 else
c6bd4220
EB
2150 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2151 integer_zerop (TREE_OPERAND (size, 1))
2152 ? last_size : merge_sizes (last_size, first_bit,
2153 TREE_OPERAND (size, 1),
2154 1, has_rep),
2155 integer_zerop (TREE_OPERAND (size, 2))
2156 ? last_size : merge_sizes (last_size, first_bit,
2157 TREE_OPERAND (size, 2),
2158 1, has_rep));
a1ab4c31
AC
2159
2160 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2161 when fed through substitute_in_expr) into thinking that a constant
2162 size is not constant. */
c6bd4220
EB
2163 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2164 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 2165
c6bd4220 2166 return new_size;
a1ab4c31
AC
2167}
2168
2169/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2170 related by the addition of a constant. Return that constant if so. */
2171
2172static tree
2173compute_related_constant (tree op0, tree op1)
2174{
2175 tree op0_var, op1_var;
2176 tree op0_con = split_plus (op0, &op0_var);
2177 tree op1_con = split_plus (op1, &op1_var);
2178 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2179
2180 if (operand_equal_p (op0_var, op1_var, 0))
2181 return result;
2182 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2183 return result;
2184 else
2185 return 0;
2186}
2187
2188/* Utility function of above to split a tree OP which may be a sum, into a
2189 constant part, which is returned, and a variable part, which is stored
2190 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2191 bitsizetype. */
2192
2193static tree
2194split_plus (tree in, tree *pvar)
2195{
722356ce
EB
2196 /* Strip conversions in order to ease the tree traversal and maximize the
2197 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
2198 to always return and set *pvar to bitsizetype trees, but it's worth
2199 the effort. */
722356ce 2200 in = remove_conversions (in, false);
a1ab4c31
AC
2201
2202 *pvar = convert (bitsizetype, in);
2203
2204 if (TREE_CODE (in) == INTEGER_CST)
2205 {
2206 *pvar = bitsize_zero_node;
2207 return convert (bitsizetype, in);
2208 }
2209 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2210 {
2211 tree lhs_var, rhs_var;
2212 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2213 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2214
2215 if (lhs_var == TREE_OPERAND (in, 0)
2216 && rhs_var == TREE_OPERAND (in, 1))
2217 return bitsize_zero_node;
2218
2219 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2220 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2221 }
2222 else
2223 return bitsize_zero_node;
2224}
2225\f
a1ab4c31
AC
2226/* Return a copy of TYPE but safe to modify in any way. */
2227
2228tree
2229copy_type (tree type)
2230{
c6bd4220 2231 tree new_type = copy_node (type);
a1ab4c31 2232
90dcfecb
EB
2233 /* Unshare the language-specific data. */
2234 if (TYPE_LANG_SPECIFIC (type))
2235 {
2236 TYPE_LANG_SPECIFIC (new_type) = NULL;
2237 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2238 }
2239
2240 /* And the contents of the language-specific slot if needed. */
2241 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2242 && TYPE_RM_VALUES (type))
2243 {
2244 TYPE_RM_VALUES (new_type) = NULL_TREE;
2245 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2246 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2247 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2248 }
2249
a1ab4c31
AC
2250 /* copy_node clears this field instead of copying it, because it is
2251 aliased with TREE_CHAIN. */
c6bd4220 2252 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2253
afc737f0
EB
2254 TYPE_POINTER_TO (new_type) = NULL_TREE;
2255 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
c6bd4220 2256 TYPE_MAIN_VARIANT (new_type) = new_type;
afc737f0 2257 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
4b7bd260 2258 TYPE_CANONICAL (new_type) = new_type;
a1ab4c31 2259
c6bd4220 2260 return new_type;
a1ab4c31
AC
2261}
2262\f
c1abd261
EB
2263/* Return a subtype of sizetype with range MIN to MAX and whose
2264 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2265 of the associated TYPE_DECL. */
a1ab4c31
AC
2266
2267tree
2268create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2269{
2270 /* First build a type for the desired range. */
523e82a7 2271 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2272
523e82a7 2273 /* Then set the index type. */
a1ab4c31 2274 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2275 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2276
a1ab4c31
AC
2277 return type;
2278}
84fb43a1
EB
2279
2280/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2281 sizetype is used. */
2282
2283tree
2284create_range_type (tree type, tree min, tree max)
2285{
2286 tree range_type;
2287
7c775aca 2288 if (!type)
84fb43a1
EB
2289 type = sizetype;
2290
2291 /* First build a type with the base range. */
523e82a7
EB
2292 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2293 TYPE_MAX_VALUE (type));
84fb43a1
EB
2294
2295 /* Then set the actual range. */
1eb58520
AC
2296 SET_TYPE_RM_MIN_VALUE (range_type, min);
2297 SET_TYPE_RM_MAX_VALUE (range_type, max);
84fb43a1
EB
2298
2299 return range_type;
2300}
a1ab4c31 2301\f
6249559b
EB
2302/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2303 NAME gives the name of the type to be used in the declaration. */
10069d53
EB
2304
2305tree
6249559b 2306create_type_stub_decl (tree name, tree type)
10069d53 2307{
6249559b 2308 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
10069d53 2309 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2310 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2311 return type_decl;
2312}
2313
6249559b
EB
2314/* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2315 used in the declaration. ARTIFICIAL_P is true if the declaration was
2316 generated by the compiler. DEBUG_INFO_P is true if we need to write
2317 debug information about this type. GNAT_NODE is used for the position
2318 of the decl. */
a1ab4c31
AC
2319
2320tree
6249559b
EB
2321create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2322 Node_Id gnat_node)
a1ab4c31 2323{
a1ab4c31 2324 enum tree_code code = TREE_CODE (type);
6249559b
EB
2325 bool is_named
2326 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
10069d53 2327 tree type_decl;
a1ab4c31 2328
10069d53
EB
2329 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2330 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2331
10069d53
EB
2332 /* If the type hasn't been named yet, we're naming it; preserve an existing
2333 TYPE_STUB_DECL that has been attached to it for some purpose. */
6249559b 2334 if (!is_named && TYPE_STUB_DECL (type))
10069d53
EB
2335 {
2336 type_decl = TYPE_STUB_DECL (type);
6249559b 2337 DECL_NAME (type_decl) = name;
10069d53
EB
2338 }
2339 else
6249559b 2340 type_decl = build_decl (input_location, TYPE_DECL, name, type);
a1ab4c31 2341
10069d53 2342 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2343 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2344
2345 /* Add this decl to the current binding level. */
10069d53 2346 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2347
aef308d0
PMR
2348 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2349 causes the name to be also viewed as a "tag" by the debug back-end, with
2350 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2351 types in DWARF.
2352
2353 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2354 from multiple contexts, and "type_decl" references a copy of it: in such a
2355 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2356 with the mechanism above. */
6249559b 2357 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
10069d53
EB
2358 TYPE_STUB_DECL (type) = type_decl;
2359
50741117
EB
2360 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2361 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2362 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2363 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2364
2365 return type_decl;
2366}
10069d53 2367\f
a1ab4c31
AC
2368/* Return a VAR_DECL or CONST_DECL node.
2369
6249559b
EB
2370 NAME gives the name of the variable. ASM_NAME is its assembler name
2371 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
a1ab4c31
AC
2372 the GCC tree for an optional initial expression; NULL_TREE if none.
2373
2374 CONST_FLAG is true if this variable is constant, in which case we might
2375 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2376
2377 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2378 definition to be made visible outside of the current compilation unit, for
2379 instance variable definitions in a package specification.
2380
1e17ef87 2381 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2382 opposed to a definition: no storage is to be allocated for the variable).
2383
2056c5ed
EB
2384 STATIC_FLAG is only relevant when not at top level and indicates whether
2385 to always allocate storage to the variable.
2386
2387 VOLATILE_FLAG is true if this variable is declared as volatile.
a1ab4c31 2388
c1a569ef
EB
2389 ARTIFICIAL_P is true if the variable was generated by the compiler.
2390
2391 DEBUG_INFO_P is true if we need to write debug information for it.
2392
2ade427a
EB
2393 ATTR_LIST is the list of attributes to be attached to the variable.
2394
a1ab4c31
AC
2395 GNAT_NODE is used for the position of the decl. */
2396
2397tree
6249559b
EB
2398create_var_decl (tree name, tree asm_name, tree type, tree init,
2399 bool const_flag, bool public_flag, bool extern_flag,
2056c5ed
EB
2400 bool static_flag, bool volatile_flag, bool artificial_p,
2401 bool debug_info_p, struct attrib *attr_list,
2402 Node_Id gnat_node, bool const_decl_allowed_p)
a1ab4c31 2403{
5fe48b3d
EB
2404 /* Whether the object has static storage duration, either explicitly or by
2405 virtue of being declared at the global level. */
2406 const bool static_storage = static_flag || global_bindings_p ();
2407
2408 /* Whether the initializer is constant: for an external object or an object
2409 with static storage duration, we check that the initializer is a valid
2410 constant expression for initializing a static variable; otherwise, we
2411 only check that it is constant. */
2412 const bool init_const
6249559b
EB
2413 = (init
2414 && gnat_types_compatible_p (type, TREE_TYPE (init))
5fe48b3d 2415 && (extern_flag || static_storage
6249559b 2416 ? initializer_constant_valid_p (init, TREE_TYPE (init))
5fe48b3d 2417 != NULL_TREE
6249559b 2418 : TREE_CONSTANT (init)));
a1ab4c31
AC
2419
2420 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
5fe48b3d 2421 case the initializer may be used in lieu of the DECL node (as done in
a1ab4c31 2422 Identifier_to_gnu). This is useful to prevent the need of elaboration
5fe48b3d
EB
2423 code when an identifier for which such a DECL is made is in turn used
2424 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2425 but extra constraints apply to this choice (see below) and they are not
2426 relevant to the distinction we wish to make. */
2427 const bool constant_p = const_flag && init_const;
a1ab4c31
AC
2428
2429 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2430 and may be used for scalars in general but not for aggregates. */
2431 tree var_decl
c172df28
AH
2432 = build_decl (input_location,
2433 (constant_p && const_decl_allowed_p
a1ab4c31 2434 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
6249559b 2435 name, type);
a1ab4c31 2436
93e708f9
EB
2437 /* Detect constants created by the front-end to hold 'reference to function
2438 calls for stabilization purposes. This is needed for renaming. */
2439 if (const_flag && init && POINTER_TYPE_P (type))
2440 {
2441 tree inner = init;
2442 if (TREE_CODE (inner) == COMPOUND_EXPR)
2443 inner = TREE_OPERAND (inner, 1);
2444 inner = remove_conversions (inner, true);
2445 if (TREE_CODE (inner) == ADDR_EXPR
2446 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2447 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2448 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2449 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2450 DECL_RETURN_VALUE_P (var_decl) = 1;
2451 }
2452
a1ab4c31
AC
2453 /* If this is external, throw away any initializations (they will be done
2454 elsewhere) unless this is a constant for which we would like to remain
2455 able to get the initializer. If we are defining a global here, leave a
2456 constant initialization and save any variable elaborations for the
2457 elaboration routine. If we are just annotating types, throw away the
2458 initialization if it isn't a constant. */
2459 if ((extern_flag && !constant_p)
6249559b
EB
2460 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2461 init = NULL_TREE;
a1ab4c31 2462
5fe48b3d
EB
2463 /* At the global level, a non-constant initializer generates elaboration
2464 statements. Check that such statements are allowed, that is to say,
2465 not violating a No_Elaboration_Code restriction. */
6249559b 2466 if (init && !init_const && global_bindings_p ())
a1ab4c31 2467 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2468
c1a569ef 2469 /* Attach the initializer, if any. */
6249559b 2470 DECL_INITIAL (var_decl) = init;
c1a569ef
EB
2471
2472 /* Directly set some flags. */
2473 DECL_ARTIFICIAL (var_decl) = artificial_p;
8b7b0c36 2474 DECL_EXTERNAL (var_decl) = extern_flag;
a1ab4c31 2475
ffe9a0a7
EB
2476 TREE_CONSTANT (var_decl) = constant_p;
2477 TREE_READONLY (var_decl) = const_flag;
2478
2479 /* The object is public if it is external or if it is declared public
2480 and has static storage duration. */
2481 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2482
2483 /* We need to allocate static storage for an object with static storage
2484 duration if it isn't external. */
2485 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2486
2487 TREE_SIDE_EFFECTS (var_decl)
2488 = TREE_THIS_VOLATILE (var_decl)
2489 = TYPE_VOLATILE (type) | volatile_flag;
2490
2491 if (TREE_SIDE_EFFECTS (var_decl))
2492 TREE_ADDRESSABLE (var_decl) = 1;
2493
a1ab4c31
AC
2494 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2495 try to fiddle with DECL_COMMON. However, on platforms that don't
2496 support global BSS sections, uninitialized global variables would
2497 go in DATA instead, thus increasing the size of the executable. */
2498 if (!flag_no_common
2499 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2500 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2501 && !have_global_bss_p ())
2502 DECL_COMMON (var_decl) = 1;
a1ab4c31 2503
c1a569ef
EB
2504 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2505 since we will create an associated variable. Likewise for an external
2506 constant whose initializer is not absolute, because this would mean a
2507 global relocation in a read-only section which runs afoul of the PE-COFF
2508 run-time relocation mechanism. */
2509 if (!debug_info_p
2510 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2511 || (extern_flag
2512 && constant_p
6249559b
EB
2513 && init
2514 && initializer_constant_valid_p (init, TREE_TYPE (init))
c1a569ef 2515 != null_pointer_node))
5225a138
EB
2516 DECL_IGNORED_P (var_decl) = 1;
2517
74746d49
EB
2518 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2519 if (TREE_CODE (var_decl) == VAR_DECL)
2520 process_attributes (&var_decl, &attr_list, true, gnat_node);
2521
2522 /* Add this decl to the current binding level. */
2523 gnat_pushdecl (var_decl, gnat_node);
2524
a22b794d 2525 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
a1ab4c31 2526 {
a22b794d
EB
2527 /* Let the target mangle the name if this isn't a verbatim asm. */
2528 if (*IDENTIFIER_POINTER (asm_name) != '*')
2529 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
74746d49 2530
a22b794d 2531 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
a1ab4c31 2532 }
a1ab4c31
AC
2533
2534 return var_decl;
2535}
2536\f
2537/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2538
2539static bool
2540aggregate_type_contains_array_p (tree type)
2541{
2542 switch (TREE_CODE (type))
2543 {
2544 case RECORD_TYPE:
2545 case UNION_TYPE:
2546 case QUAL_UNION_TYPE:
2547 {
2548 tree field;
910ad8de 2549 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2550 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2551 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2552 return true;
2553 return false;
2554 }
2555
2556 case ARRAY_TYPE:
2557 return true;
2558
2559 default:
2560 gcc_unreachable ();
2561 }
2562}
2563
6249559b
EB
2564/* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2565 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2566 is the specified size of the field. If POS is nonzero, it is the bit
2567 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2568 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2569 means we are allowed to take the address of the field; if it is negative,
2570 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2571
2572tree
6249559b
EB
2573create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2574 int packed, int addressable)
a1ab4c31 2575{
6249559b 2576 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
a1ab4c31
AC
2577
2578 DECL_CONTEXT (field_decl) = record_type;
6249559b 2579 TREE_READONLY (field_decl) = TYPE_READONLY (type);
a1ab4c31
AC
2580
2581 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2582 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2583 Likewise for an aggregate without specified position that contains an
2584 array, because in this case slices of variable length of this array
2585 must be handled by GCC and variable-sized objects need to be aligned
2586 to at least a byte boundary. */
6249559b 2587 if (packed && (TYPE_MODE (type) == BLKmode
a1ab4c31 2588 || (!pos
6249559b
EB
2589 && AGGREGATE_TYPE_P (type)
2590 && aggregate_type_contains_array_p (type))))
fe37c7af 2591 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
a1ab4c31
AC
2592
2593 /* If a size is specified, use it. Otherwise, if the record type is packed
2594 compute a size to use, which may differ from the object's natural size.
2595 We always set a size in this case to trigger the checks for bitfield
2596 creation below, which is typically required when no position has been
2597 specified. */
2598 if (size)
2599 size = convert (bitsizetype, size);
2600 else if (packed == 1)
2601 {
6249559b
EB
2602 size = rm_size (type);
2603 if (TYPE_MODE (type) == BLKmode)
62f9f3ce 2604 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2605 }
2606
2607 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2608 specified for two reasons: first if the size differs from the natural
2609 size. Second, if the alignment is insufficient. There are a number of
2610 ways the latter can be true.
2611
2612 We never make a bitfield if the type of the field has a nonconstant size,
2613 because no such entity requiring bitfield operations should reach here.
2614
2615 We do *preventively* make a bitfield when there might be the need for it
2616 but we don't have all the necessary information to decide, as is the case
2617 of a field with no specified position in a packed record.
2618
2619 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2620 in layout_decl or finish_record_type to clear the bit_field indication if
2621 it is in fact not needed. */
2622 if (addressable >= 0
2623 && size
2624 && TREE_CODE (size) == INTEGER_CST
6249559b
EB
2625 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2626 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2627 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
a1ab4c31
AC
2628 || packed
2629 || (TYPE_ALIGN (record_type) != 0
6249559b 2630 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
a1ab4c31
AC
2631 {
2632 DECL_BIT_FIELD (field_decl) = 1;
2633 DECL_SIZE (field_decl) = size;
2634 if (!packed && !pos)
feec4372
EB
2635 {
2636 if (TYPE_ALIGN (record_type) != 0
6249559b 2637 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
fe37c7af 2638 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
feec4372 2639 else
fe37c7af 2640 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
feec4372 2641 }
a1ab4c31
AC
2642 }
2643
2644 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2645
2646 /* Bump the alignment if need be, either for bitfield/packing purposes or
2647 to satisfy the type requirements if no such consideration applies. When
2648 we get the alignment from the type, indicate if this is from an explicit
2649 user request, which prevents stor-layout from lowering it later on. */
2650 {
d9223014 2651 unsigned int bit_align
a1ab4c31 2652 = (DECL_BIT_FIELD (field_decl) ? 1
6249559b 2653 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
a1ab4c31
AC
2654
2655 if (bit_align > DECL_ALIGN (field_decl))
fe37c7af 2656 SET_DECL_ALIGN (field_decl, bit_align);
6249559b 2657 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
a1ab4c31 2658 {
fe37c7af 2659 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
6249559b 2660 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
a1ab4c31
AC
2661 }
2662 }
2663
2664 if (pos)
2665 {
2666 /* We need to pass in the alignment the DECL is known to have.
2667 This is the lowest-order bit set in POS, but no more than
2668 the alignment of the record, if one is specified. Note
2669 that an alignment of 0 is taken as infinite. */
2670 unsigned int known_align;
2671
cc269bb6 2672 if (tree_fits_uhwi_p (pos))
ae7e9ddd 2673 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
a1ab4c31
AC
2674 else
2675 known_align = BITS_PER_UNIT;
2676
2677 if (TYPE_ALIGN (record_type)
2678 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2679 known_align = TYPE_ALIGN (record_type);
2680
2681 layout_decl (field_decl, known_align);
2682 SET_DECL_OFFSET_ALIGN (field_decl,
cc269bb6 2683 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
a1ab4c31
AC
2684 : BITS_PER_UNIT);
2685 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2686 &DECL_FIELD_BIT_OFFSET (field_decl),
2687 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2688 }
2689
2690 /* In addition to what our caller says, claim the field is addressable if we
2691 know that its type is not suitable.
2692
2693 The field may also be "technically" nonaddressable, meaning that even if
2694 we attempt to take the field's address we will actually get the address
2695 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2696 value we have at this point is not accurate enough, so we don't account
2697 for this here and let finish_record_type decide. */
6249559b 2698 if (!addressable && !type_for_nonaliased_component_p (type))
a1ab4c31
AC
2699 addressable = 1;
2700
2701 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2702
2703 return field_decl;
2704}
2705\f
1e55d29a 2706/* Return a PARM_DECL node with NAME and TYPE. */
a1ab4c31
AC
2707
2708tree
1e55d29a 2709create_param_decl (tree name, tree type)
a1ab4c31 2710{
6249559b 2711 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
a1ab4c31 2712
a8e05f92
EB
2713 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2714 can lead to various ABI violations. */
2715 if (targetm.calls.promote_prototypes (NULL_TREE)
6249559b
EB
2716 && INTEGRAL_TYPE_P (type)
2717 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
a1ab4c31
AC
2718 {
2719 /* We have to be careful about biased types here. Make a subtype
2720 of integer_type_node with the proper biasing. */
6249559b
EB
2721 if (TREE_CODE (type) == INTEGER_TYPE
2722 && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 2723 {
84fb43a1
EB
2724 tree subtype
2725 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2726 TREE_TYPE (subtype) = integer_type_node;
2727 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
6249559b
EB
2728 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2729 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2730 type = subtype;
a1ab4c31
AC
2731 }
2732 else
6249559b 2733 type = integer_type_node;
a1ab4c31
AC
2734 }
2735
6249559b 2736 DECL_ARG_TYPE (param_decl) = type;
a1ab4c31
AC
2737 return param_decl;
2738}
2739\f
74746d49
EB
2740/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2741 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2742 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2743
74746d49
EB
2744void
2745process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2746 Node_Id gnat_node)
a1ab4c31 2747{
74746d49
EB
2748 struct attrib *attr;
2749
2750 for (attr = *attr_list; attr; attr = attr->next)
2751 switch (attr->type)
a1ab4c31
AC
2752 {
2753 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2754 Sloc_to_locus (Sloc (gnat_node), &input_location);
2755 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2756 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2757 break;
2758
2759 case ATTR_LINK_ALIAS:
74746d49 2760 if (!DECL_EXTERNAL (*node))
a1ab4c31 2761 {
74746d49
EB
2762 TREE_STATIC (*node) = 1;
2763 assemble_alias (*node, attr->name);
a1ab4c31
AC
2764 }
2765 break;
2766
2767 case ATTR_WEAK_EXTERNAL:
2768 if (SUPPORTS_WEAK)
74746d49 2769 declare_weak (*node);
a1ab4c31
AC
2770 else
2771 post_error ("?weak declarations not supported on this target",
74746d49 2772 attr->error_point);
a1ab4c31
AC
2773 break;
2774
2775 case ATTR_LINK_SECTION:
677f3fa8 2776 if (targetm_common.have_named_sections)
a1ab4c31 2777 {
0ab75824 2778 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
74746d49 2779 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2780 }
2781 else
2782 post_error ("?section attributes are not supported for this target",
74746d49 2783 attr->error_point);
a1ab4c31
AC
2784 break;
2785
2786 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2787 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2788 TREE_USED (*node) = 1;
a1ab4c31
AC
2789 break;
2790
2791 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2792 DECL_STATIC_DESTRUCTOR (*node) = 1;
2793 TREE_USED (*node) = 1;
a1ab4c31 2794 break;
40a14772
TG
2795
2796 case ATTR_THREAD_LOCAL_STORAGE:
56363ffd 2797 set_decl_tls_model (*node, decl_default_tls_model (*node));
74746d49 2798 DECL_COMMON (*node) = 0;
40a14772 2799 break;
a1ab4c31 2800 }
74746d49
EB
2801
2802 *attr_list = NULL;
a1ab4c31 2803}
a1ab4c31
AC
2804
2805/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2806 a power of 2. */
2807
2808bool
2809value_factor_p (tree value, HOST_WIDE_INT factor)
2810{
cc269bb6 2811 if (tree_fits_uhwi_p (value))
ae7e9ddd 2812 return tree_to_uhwi (value) % factor == 0;
a1ab4c31
AC
2813
2814 if (TREE_CODE (value) == MULT_EXPR)
2815 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2816 || value_factor_p (TREE_OPERAND (value, 1), factor));
2817
2818 return false;
2819}
2820
e8fa3dcd
PMR
2821/* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2822 from the parameter association for the instantiation of a generic. We do
2823 not want to emit source location for them: the code generated for their
2824 initialization is likely to disturb debugging. */
2825
2826bool
2827renaming_from_generic_instantiation_p (Node_Id gnat_node)
2828{
2829 if (Nkind (gnat_node) != N_Defining_Identifier
2830 || !IN (Ekind (gnat_node), Object_Kind)
2831 || Comes_From_Source (gnat_node)
2832 || !Present (Renamed_Object (gnat_node)))
2833 return false;
2834
2835 /* Get the object declaration of the renamed object, if any and if the
2836 renamed object is a mere identifier. */
2837 gnat_node = Renamed_Object (gnat_node);
2838 if (Nkind (gnat_node) != N_Identifier)
2839 return false;
2840
2841 gnat_node = Entity (gnat_node);
2842 if (!Present (Parent (gnat_node)))
2843 return false;
2844
2845 gnat_node = Parent (gnat_node);
2846 return
2847 (Present (gnat_node)
2848 && Nkind (gnat_node) == N_Object_Declaration
2849 && Present (Corresponding_Generic_Association (gnat_node)));
2850}
2851
9a30c7c4
AC
2852/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2853 feed it with the elaboration of GNAT_SCOPE. */
2854
2855static struct deferred_decl_context_node *
2856add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2857{
2858 struct deferred_decl_context_node *new_node;
2859
2860 new_node
2861 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2862 new_node->decl = decl;
2863 new_node->gnat_scope = gnat_scope;
2864 new_node->force_global = force_global;
2865 new_node->types.create (1);
2866 new_node->next = deferred_decl_context_queue;
2867 deferred_decl_context_queue = new_node;
2868 return new_node;
2869}
2870
2871/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2872 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2873 computed. */
2874
2875static void
2876add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2877{
2878 n->types.safe_push (type);
2879}
2880
2881/* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2882 NULL_TREE if it is not available. */
2883
2884static tree
2885compute_deferred_decl_context (Entity_Id gnat_scope)
2886{
2887 tree context;
2888
2889 if (present_gnu_tree (gnat_scope))
2890 context = get_gnu_tree (gnat_scope);
2891 else
2892 return NULL_TREE;
2893
2894 if (TREE_CODE (context) == TYPE_DECL)
2895 {
2896 const tree context_type = TREE_TYPE (context);
2897
2898 /* Skip dummy types: only the final ones can appear in the context
2899 chain. */
2900 if (TYPE_DUMMY_P (context_type))
2901 return NULL_TREE;
2902
2903 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2904 chain. */
2905 else
2906 context = context_type;
2907 }
2908
2909 return context;
2910}
2911
2912/* Try to process all deferred nodes in the queue. Keep in the queue the ones
2913 that cannot be processed yet, remove the other ones. If FORCE is true,
2914 force the processing for all nodes, use the global context when nodes don't
2915 have a GNU translation. */
2916
2917void
2918process_deferred_decl_context (bool force)
2919{
2920 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2921 struct deferred_decl_context_node *node;
2922
2923 while (*it != NULL)
2924 {
2925 bool processed = false;
2926 tree context = NULL_TREE;
2927 Entity_Id gnat_scope;
2928
2929 node = *it;
2930
2931 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2932 get the first scope. */
2933 gnat_scope = node->gnat_scope;
2934 while (Present (gnat_scope))
2935 {
2936 context = compute_deferred_decl_context (gnat_scope);
7c775aca 2937 if (!force || context)
9a30c7c4
AC
2938 break;
2939 gnat_scope = get_debug_scope (gnat_scope, NULL);
2940 }
2941
2942 /* Imported declarations must not be in a local context (i.e. not inside
2943 a function). */
7c775aca 2944 if (context && node->force_global > 0)
9a30c7c4
AC
2945 {
2946 tree ctx = context;
2947
7c775aca 2948 while (ctx)
9a30c7c4
AC
2949 {
2950 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
7c775aca 2951 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
9a30c7c4
AC
2952 }
2953 }
2954
2955 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2956 was no elaborated scope, use the global context. */
7c775aca 2957 if (force && !context)
9a30c7c4
AC
2958 context = get_global_context ();
2959
7c775aca 2960 if (context)
9a30c7c4
AC
2961 {
2962 tree t;
2963 int i;
2964
2965 DECL_CONTEXT (node->decl) = context;
2966
2967 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2968 ..._TYPE nodes. */
2969 FOR_EACH_VEC_ELT (node->types, i, t)
2970 {
24d4b3d5 2971 gnat_set_type_context (t, context);
9a30c7c4
AC
2972 }
2973 processed = true;
2974 }
2975
2976 /* If this node has been successfuly processed, remove it from the
2977 queue. Then move to the next node. */
2978 if (processed)
2979 {
2980 *it = node->next;
2981 node->types.release ();
2982 free (node);
2983 }
2984 else
2985 it = &node->next;
2986 }
2987}
2988
2989
5c475ba9
EB
2990/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2991
2992static unsigned int
2993scale_by_factor_of (tree expr, unsigned int value)
2994{
3b5d86ec
PMR
2995 unsigned HOST_WIDE_INT addend = 0;
2996 unsigned HOST_WIDE_INT factor = 1;
2997
2998 /* Peel conversions around EXPR and try to extract bodies from function
2999 calls: it is possible to get the scale factor from size functions. */
5c475ba9 3000 expr = remove_conversions (expr, true);
3b5d86ec
PMR
3001 if (TREE_CODE (expr) == CALL_EXPR)
3002 expr = maybe_inline_call_in_expr (expr);
3003
3004 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3005 multiple of the scale factor we are looking for. */
3006 if (TREE_CODE (expr) == PLUS_EXPR
3007 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3008 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3009 {
3010 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3011 expr = TREE_OPERAND (expr, 0);
3012 }
5c475ba9
EB
3013
3014 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3015 corresponding to the number of trailing zeros of the mask. */
3016 if (TREE_CODE (expr) == BIT_AND_EXPR
3017 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3018 {
3019 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3020 unsigned int i = 0;
3021
3022 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3023 {
3024 mask >>= 1;
3b5d86ec 3025 factor *= 2;
5c475ba9
EB
3026 i++;
3027 }
3028 }
3029
3b5d86ec
PMR
3030 /* If the addend is not a multiple of the factor we found, give up. In
3031 theory we could find a smaller common factor but it's useless for our
3032 needs. This situation arises when dealing with a field F1 with no
3033 alignment requirement but that is following a field F2 with such
3034 requirements. As long as we have F2's offset, we don't need alignment
3035 information to compute F1's. */
3036 if (addend % factor != 0)
3037 factor = 1;
3038
3039 return factor * value;
5c475ba9
EB
3040}
3041
7d7fcb08 3042/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
3043 unless we can prove these 2 fields are laid out in such a way that no gap
3044 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3045 is the distance in bits between the end of PREV_FIELD and the starting
3046 position of CURR_FIELD. It is ignored if null. */
3047
3048static bool
3049potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3050{
3051 /* If this is the first field of the record, there cannot be any gap */
3052 if (!prev_field)
3053 return false;
3054
78df6221 3055 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
3056 time when such a field is not the last field of the record is when
3057 there are other components at fixed positions after it (meaning there
3058 was a rep clause for every field), in which case we don't want the
3059 alignment constraint to override them. */
3060 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3061 return false;
3062
3063 /* If the distance between the end of prev_field and the beginning of
3064 curr_field is constant, then there is a gap if the value of this
3065 constant is not null. */
cc269bb6 3066 if (offset && tree_fits_uhwi_p (offset))
a1ab4c31
AC
3067 return !integer_zerop (offset);
3068
3069 /* If the size and position of the previous field are constant,
3070 then check the sum of this size and position. There will be a gap
3071 iff it is not multiple of the current field alignment. */
cc269bb6
RS
3072 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3073 && tree_fits_uhwi_p (bit_position (prev_field)))
ae7e9ddd
RS
3074 return ((tree_to_uhwi (bit_position (prev_field))
3075 + tree_to_uhwi (DECL_SIZE (prev_field)))
a1ab4c31
AC
3076 % DECL_ALIGN (curr_field) != 0);
3077
3078 /* If both the position and size of the previous field are multiples
3079 of the current field alignment, there cannot be any gap. */
3080 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3081 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3082 return false;
3083
3084 /* Fallback, return that there may be a potential gap */
3085 return true;
3086}
3087
6249559b
EB
3088/* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3089 the decl. */
a1ab4c31
AC
3090
3091tree
6249559b 3092create_label_decl (tree name, Node_Id gnat_node)
a1ab4c31 3093{
88a94e2b 3094 tree label_decl
6249559b 3095 = build_decl (input_location, LABEL_DECL, name, void_type_node);
a1ab4c31 3096
88a94e2b
EB
3097 DECL_MODE (label_decl) = VOIDmode;
3098
3099 /* Add this decl to the current binding level. */
3100 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
3101
3102 return label_decl;
3103}
3104\f
6249559b
EB
3105/* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3106 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3107 the list of its parameters (a list of PARM_DECL nodes chained through the
3108 DECL_CHAIN field).
a1ab4c31 3109
2ade427a
EB
3110 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3111
1e55d29a
EB
3112 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3113 definition to be made visible outside of the current compilation unit.
3114
3115 EXTERN_FLAG is true when processing an external subprogram declaration.
c1a569ef
EB
3116
3117 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3118
3119 DEBUG_INFO_P is true if we need to write debug information for it.
3120
2ade427a
EB
3121 ATTR_LIST is the list of attributes to be attached to the subprogram.
3122
c1a569ef 3123 GNAT_NODE is used for the position of the decl. */
a1ab4c31
AC
3124
3125tree
6249559b 3126create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
1e55d29a
EB
3127 enum inline_status_t inline_status, bool public_flag,
3128 bool extern_flag, bool artificial_p, bool debug_info_p,
6249559b 3129 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 3130{
6249559b 3131 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
7d7fcb08 3132 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 3133
c1a569ef 3134 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
7d7fcb08 3135 DECL_EXTERNAL (subprog_decl) = extern_flag;
1e55d29a
EB
3136 TREE_PUBLIC (subprog_decl) = public_flag;
3137
3138 if (!debug_info_p)
3139 DECL_IGNORED_P (subprog_decl) = 1;
0e24192c
EB
3140
3141 switch (inline_status)
3142 {
3143 case is_suppressed:
3144 DECL_UNINLINABLE (subprog_decl) = 1;
3145 break;
3146
3147 case is_disabled:
3148 break;
3149
f087ea44
AC
3150 case is_required:
3151 if (Back_End_Inlining)
1eb58520
AC
3152 decl_attributes (&subprog_decl,
3153 tree_cons (get_identifier ("always_inline"),
3154 NULL_TREE, NULL_TREE),
3155 ATTR_FLAG_TYPE_IN_PLACE);
3156
f087ea44
AC
3157 /* ... fall through ... */
3158
0e24192c
EB
3159 case is_enabled:
3160 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
c1a569ef 3161 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
0e24192c
EB
3162 break;
3163
3164 default:
3165 gcc_unreachable ();
3166 }
7d7fcb08 3167
2b50232a
EB
3168 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3169
7414a3c3
EB
3170 /* Once everything is processed, finish the subprogram declaration. */
3171 finish_subprog_decl (subprog_decl, asm_name, type);
3172
2b50232a
EB
3173 /* Add this decl to the current binding level. */
3174 gnat_pushdecl (subprog_decl, gnat_node);
3175
a1ab4c31
AC
3176 /* Output the assembler code and/or RTL for the declaration. */
3177 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3178
3179 return subprog_decl;
3180}
1e55d29a 3181
7414a3c3
EB
3182/* Given a subprogram declaration DECL, its assembler name and its type,
3183 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
1e55d29a
EB
3184
3185void
7414a3c3 3186finish_subprog_decl (tree decl, tree asm_name, tree type)
1e55d29a
EB
3187{
3188 tree result_decl
3189 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3190 TREE_TYPE (type));
3191
3192 DECL_ARTIFICIAL (result_decl) = 1;
3193 DECL_IGNORED_P (result_decl) = 1;
3194 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3195 DECL_RESULT (decl) = result_decl;
3196
3197 TREE_READONLY (decl) = TYPE_READONLY (type);
3198 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
7414a3c3
EB
3199
3200 if (asm_name)
3201 {
3202 /* Let the target mangle the name if this isn't a verbatim asm. */
3203 if (*IDENTIFIER_POINTER (asm_name) != '*')
3204 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3205
3206 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3207
3208 /* The expand_main_function circuitry expects "main_identifier_node" to
3209 designate the DECL_NAME of the 'main' entry point, in turn expected
3210 to be declared as the "main" function literally by default. Ada
3211 program entry points are typically declared with a different name
3212 within the binder generated file, exported as 'main' to satisfy the
3213 system expectations. Force main_identifier_node in this case. */
3214 if (asm_name == main_identifier_node)
3215 DECL_NAME (decl) = main_identifier_node;
3216 }
1e55d29a 3217}
a1ab4c31
AC
3218\f
3219/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3220 body. This routine needs to be invoked before processing the declarations
3221 appearing in the subprogram. */
3222
3223void
3224begin_subprog_body (tree subprog_decl)
3225{
3226 tree param_decl;
3227
a1ab4c31
AC
3228 announce_function (subprog_decl);
3229
0ae44446
JR
3230 /* This function is being defined. */
3231 TREE_STATIC (subprog_decl) = 1;
3232
e2d13a4a
EB
3233 /* The failure of this assertion will likely come from a wrong context for
3234 the subprogram body, e.g. another procedure for a procedure declared at
3235 library level. */
3236 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3237
58c8f770
EB
3238 current_function_decl = subprog_decl;
3239
a1ab4c31
AC
3240 /* Enter a new binding level and show that all the parameters belong to
3241 this function. */
3242 gnat_pushlevel ();
a09d56d8 3243
a1ab4c31 3244 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 3245 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
3246 DECL_CONTEXT (param_decl) = subprog_decl;
3247
3248 make_decl_rtl (subprog_decl);
a1ab4c31
AC
3249}
3250
71196d4e 3251/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
3252
3253void
a406865a 3254end_subprog_body (tree body)
a1ab4c31
AC
3255{
3256 tree fndecl = current_function_decl;
3257
bd9c7fb9 3258 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
3259 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3260 DECL_INITIAL (fndecl) = current_binding_level->block;
3261 gnat_poplevel ();
3262
a1ab4c31
AC
3263 /* Mark the RESULT_DECL as being in this subprogram. */
3264 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3265
a963da4d
EB
3266 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3267 if (TREE_CODE (body) == BIND_EXPR)
3268 {
3269 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3270 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3271 }
3272
a1ab4c31
AC
3273 DECL_SAVED_TREE (fndecl) = body;
3274
228ee426 3275 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
3276}
3277
3278/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 3279
71196d4e
EB
3280void
3281rest_of_subprog_body_compilation (tree subprog_decl)
3282{
a1ab4c31
AC
3283 /* We cannot track the location of errors past this point. */
3284 error_gnat_node = Empty;
3285
3286 /* If we're only annotating types, don't actually compile this function. */
3287 if (type_annotate_only)
3288 return;
3289
a406865a 3290 /* Dump functions before gimplification. */
71196d4e 3291 dump_function (TDI_original, subprog_decl);
a406865a 3292
228ee426 3293 if (!decl_function_context (subprog_decl))
3dafb85c 3294 cgraph_node::finalize_function (subprog_decl, false);
a1ab4c31
AC
3295 else
3296 /* Register this function with cgraph just far enough to get it
3297 added to our parent's nested function list. */
037e5573 3298 (void) cgraph_node::get_create (subprog_decl);
a1ab4c31
AC
3299}
3300
a1ab4c31
AC
3301tree
3302gnat_builtin_function (tree decl)
3303{
3304 gnat_pushdecl (decl, Empty);
3305 return decl;
3306}
3307
3308/* Return an integer type with the number of bits of precision given by
3309 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3310 it is a signed type. */
3311
3312tree
3313gnat_type_for_size (unsigned precision, int unsignedp)
3314{
3315 tree t;
3316 char type_name[20];
3317
3318 if (precision <= 2 * MAX_BITS_PER_WORD
3319 && signed_and_unsigned_types[precision][unsignedp])
3320 return signed_and_unsigned_types[precision][unsignedp];
3321
3322 if (unsignedp)
3323 t = make_unsigned_type (precision);
3324 else
3325 t = make_signed_type (precision);
3326
3327 if (precision <= 2 * MAX_BITS_PER_WORD)
3328 signed_and_unsigned_types[precision][unsignedp] = t;
3329
3330 if (!TYPE_NAME (t))
3331 {
c3d79c60 3332 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
3333 TYPE_NAME (t) = get_identifier (type_name);
3334 }
3335
3336 return t;
3337}
3338
3339/* Likewise for floating-point types. */
3340
3341static tree
ef4bddc2 3342float_type_for_precision (int precision, machine_mode mode)
a1ab4c31
AC
3343{
3344 tree t;
3345 char type_name[20];
3346
3347 if (float_types[(int) mode])
3348 return float_types[(int) mode];
3349
3350 float_types[(int) mode] = t = make_node (REAL_TYPE);
3351 TYPE_PRECISION (t) = precision;
3352 layout_type (t);
3353
3354 gcc_assert (TYPE_MODE (t) == mode);
3355 if (!TYPE_NAME (t))
3356 {
3357 sprintf (type_name, "FLOAT_%d", precision);
3358 TYPE_NAME (t) = get_identifier (type_name);
3359 }
3360
3361 return t;
3362}
3363
3364/* Return a data type that has machine mode MODE. UNSIGNEDP selects
3365 an unsigned type; otherwise a signed type is returned. */
3366
3367tree
ef4bddc2 3368gnat_type_for_mode (machine_mode mode, int unsignedp)
a1ab4c31
AC
3369{
3370 if (mode == BLKmode)
3371 return NULL_TREE;
2799d18c
EB
3372
3373 if (mode == VOIDmode)
a1ab4c31 3374 return void_type_node;
2799d18c
EB
3375
3376 if (COMPLEX_MODE_P (mode))
a1ab4c31 3377 return NULL_TREE;
2799d18c
EB
3378
3379 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 3380 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
3381
3382 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 3383 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
3384
3385 if (VECTOR_MODE_P (mode))
3386 {
ef4bddc2 3387 machine_mode inner_mode = GET_MODE_INNER (mode);
2799d18c
EB
3388 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3389 if (inner_type)
3390 return build_vector_type_for_mode (inner_type, mode);
3391 }
3392
3393 return NULL_TREE;
a1ab4c31
AC
3394}
3395
9a1bdc31
EB
3396/* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3397 signedness being specified by UNSIGNEDP. */
a1ab4c31
AC
3398
3399tree
9a1bdc31 3400gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
a1ab4c31 3401{
825da0d2
EB
3402 if (type_node == char_type_node)
3403 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3404
9a1bdc31 3405 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
a1ab4c31
AC
3406
3407 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3408 {
afc737f0 3409 type = copy_type (type);
a1ab4c31
AC
3410 TREE_TYPE (type) = type_node;
3411 }
3412 else if (TREE_TYPE (type_node)
3413 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3414 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3415 {
afc737f0 3416 type = copy_type (type);
a1ab4c31
AC
3417 TREE_TYPE (type) = TREE_TYPE (type_node);
3418 }
3419
3420 return type;
3421}
3422
3423/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3424 transparently converted to each other. */
3425
3426int
3427gnat_types_compatible_p (tree t1, tree t2)
3428{
3429 enum tree_code code;
3430
3431 /* This is the default criterion. */
3432 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3433 return 1;
3434
3435 /* We only check structural equivalence here. */
3436 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3437 return 0;
3438
7948ae37
OH
3439 /* Vector types are also compatible if they have the same number of subparts
3440 and the same form of (scalar) element type. */
3441 if (code == VECTOR_TYPE
3442 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3443 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3444 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3445 return 1;
3446
cfa0bd19 3447 /* Array types are also compatible if they are constrained and have the same
ee45a32d 3448 domain(s), the same component type and the same scalar storage order. */
a1ab4c31 3449 if (code == ARRAY_TYPE
0adef32b
JJ
3450 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3451 || (TYPE_DOMAIN (t1)
b4680ca1 3452 && TYPE_DOMAIN (t2)
0adef32b
JJ
3453 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3454 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3455 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 3456 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
3457 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3458 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
ee45a32d
EB
3459 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3460 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
a1ab4c31
AC
3461 return 1;
3462
a1ab4c31
AC
3463 return 0;
3464}
523e82a7 3465
71196d4e
EB
3466/* Return true if EXPR is a useless type conversion. */
3467
3468bool
3469gnat_useless_type_conversion (tree expr)
3470{
3471 if (CONVERT_EXPR_P (expr)
3472 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3473 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3474 return gnat_types_compatible_p (TREE_TYPE (expr),
3475 TREE_TYPE (TREE_OPERAND (expr, 0)));
3476
3477 return false;
3478}
3479
523e82a7
EB
3480/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3481
3482bool
3483fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3484 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3485{
3486 return TYPE_CI_CO_LIST (t) == cico_list
3487 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3488 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3489 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3490}
a1ab4c31
AC
3491\f
3492/* EXP is an expression for the size of an object. If this size contains
3493 discriminant references, replace them with the maximum (if MAX_P) or
3494 minimum (if !MAX_P) possible value of the discriminant. */
3495
3496tree
3497max_size (tree exp, bool max_p)
3498{
3499 enum tree_code code = TREE_CODE (exp);
3500 tree type = TREE_TYPE (exp);
3501
3502 switch (TREE_CODE_CLASS (code))
3503 {
3504 case tcc_declaration:
3505 case tcc_constant:
3506 return exp;
3507
3508 case tcc_vl_exp:
3509 if (code == CALL_EXPR)
3510 {
f82a627c
EB
3511 tree t, *argarray;
3512 int n, i;
3513
3514 t = maybe_inline_call_in_expr (exp);
3515 if (t)
3516 return max_size (t, max_p);
a1ab4c31 3517
f82a627c
EB
3518 n = call_expr_nargs (exp);
3519 gcc_assert (n > 0);
2bb1fc26 3520 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3521 for (i = 0; i < n; i++)
3522 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3523 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3524 }
3525 break;
3526
3527 case tcc_reference:
3528 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3529 modify. Otherwise, we treat it like a variable. */
1eb58520
AC
3530 if (CONTAINS_PLACEHOLDER_P (exp))
3531 {
3532 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3533 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3534 return max_size (convert (get_base_type (val_type), val), true);
3535 }
a1ab4c31 3536
1eb58520 3537 return exp;
a1ab4c31
AC
3538
3539 case tcc_comparison:
3540 return max_p ? size_one_node : size_zero_node;
3541
3542 case tcc_unary:
ce3da0d0
EB
3543 if (code == NON_LVALUE_EXPR)
3544 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3545
ce3da0d0
EB
3546 return fold_build1 (code, type,
3547 max_size (TREE_OPERAND (exp, 0),
3548 code == NEGATE_EXPR ? !max_p : max_p));
3549
a1ab4c31 3550 case tcc_binary:
ce3da0d0
EB
3551 {
3552 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3553 tree rhs = max_size (TREE_OPERAND (exp, 1),
3554 code == MINUS_EXPR ? !max_p : max_p);
3555
3556 /* Special-case wanting the maximum value of a MIN_EXPR.
3557 In that case, if one side overflows, return the other. */
3558 if (max_p && code == MIN_EXPR)
3559 {
3560 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3561 return lhs;
3562
3563 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3564 return rhs;
3565 }
3566
3567 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3568 overflowing and the RHS a variable. */
3569 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3570 && TREE_CODE (lhs) == INTEGER_CST
3571 && TREE_OVERFLOW (lhs)
396e67d2 3572 && TREE_CODE (rhs) != INTEGER_CST)
ce3da0d0
EB
3573 return lhs;
3574
396e67d2
EB
3575 /* If we are going to subtract a "negative" value in an unsigned type,
3576 do the operation as an addition of the negated value, in order to
3577 avoid creating a spurious overflow below. */
3578 if (code == MINUS_EXPR
3579 && TYPE_UNSIGNED (type)
3580 && TREE_CODE (rhs) == INTEGER_CST
3581 && !TREE_OVERFLOW (rhs)
3582 && tree_int_cst_sign_bit (rhs) != 0)
3583 {
3584 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3585 code = PLUS_EXPR;
3586 }
3587
3588 /* We need to detect overflows so we call size_binop here. */
ce3da0d0
EB
3589 return size_binop (code, lhs, rhs);
3590 }
3591
a1ab4c31
AC
3592 case tcc_expression:
3593 switch (TREE_CODE_LENGTH (code))
3594 {
3595 case 1:
722356ce
EB
3596 if (code == SAVE_EXPR)
3597 return exp;
ce3da0d0
EB
3598
3599 return fold_build1 (code, type,
3600 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3601
3602 case 2:
3603 if (code == COMPOUND_EXPR)
3604 return max_size (TREE_OPERAND (exp, 1), max_p);
3605
ce3da0d0
EB
3606 return fold_build2 (code, type,
3607 max_size (TREE_OPERAND (exp, 0), max_p),
3608 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3609
3610 case 3:
722356ce 3611 if (code == COND_EXPR)
a1ab4c31
AC
3612 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3613 max_size (TREE_OPERAND (exp, 1), max_p),
3614 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3615
3616 default:
3617 break;
a1ab4c31
AC
3618 }
3619
3620 /* Other tree classes cannot happen. */
3621 default:
3622 break;
3623 }
3624
3625 gcc_unreachable ();
3626}
3627\f
3628/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3629 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3630 Return a constructor for the template. */
3631
3632tree
3633build_template (tree template_type, tree array_type, tree expr)
3634{
9771b263 3635 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3636 tree bound_list = NULL_TREE;
3637 tree field;
3638
3639 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3640 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3641 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3642 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3643
3644 if (TREE_CODE (array_type) == ARRAY_TYPE
3645 || (TREE_CODE (array_type) == INTEGER_TYPE
3646 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3647 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3648
3649 /* First make the list for a CONSTRUCTOR for the template. Go down the
3650 field list of the template instead of the type chain because this
3651 array might be an Ada array of arrays and we can't tell where the
3652 nested arrays stop being the underlying object. */
3653
3654 for (field = TYPE_FIELDS (template_type); field;
3655 (bound_list
3656 ? (bound_list = TREE_CHAIN (bound_list))
3657 : (array_type = TREE_TYPE (array_type))),
910ad8de 3658 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3659 {
3660 tree bounds, min, max;
3661
3662 /* If we have a bound list, get the bounds from there. Likewise
3663 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3664 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3665 This will give us a maximum range. */
3666 if (bound_list)
3667 bounds = TREE_VALUE (bound_list);
3668 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3669 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3670 else if (expr && TREE_CODE (expr) == PARM_DECL
3671 && DECL_BY_COMPONENT_PTR_P (expr))
3672 bounds = TREE_TYPE (field);
3673 else
3674 gcc_unreachable ();
3675
3676 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3677 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3678
3679 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3680 substitute it from OBJECT. */
3681 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3682 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3683
0e228dd9 3684 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3685 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3686 }
3687
0e228dd9 3688 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3689}
3690\f
e63b36bd
EB
3691/* Return true if TYPE is suitable for the element type of a vector. */
3692
3693static bool
3694type_for_vector_element_p (tree type)
3695{
ef4bddc2 3696 machine_mode mode;
e63b36bd
EB
3697
3698 if (!INTEGRAL_TYPE_P (type)
3699 && !SCALAR_FLOAT_TYPE_P (type)
3700 && !FIXED_POINT_TYPE_P (type))
3701 return false;
3702
3703 mode = TYPE_MODE (type);
3704 if (GET_MODE_CLASS (mode) != MODE_INT
3705 && !SCALAR_FLOAT_MODE_P (mode)
3706 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3707 return false;
3708
3709 return true;
3710}
3711
3712/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3713 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3714 attribute declaration and want to issue error messages on failure. */
3715
3716static tree
3717build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3718{
3719 unsigned HOST_WIDE_INT size_int, inner_size_int;
3720 int nunits;
3721
3722 /* Silently punt on variable sizes. We can't make vector types for them,
3723 need to ignore them on front-end generated subtypes of unconstrained
3724 base types, and this attribute is for binding implementors, not end
3725 users, so we should never get there from legitimate explicit uses. */
3726 if (!tree_fits_uhwi_p (size))
3727 return NULL_TREE;
3728 size_int = tree_to_uhwi (size);
3729
3730 if (!type_for_vector_element_p (inner_type))
3731 {
3732 if (attribute)
3733 error ("invalid element type for attribute %qs",
3734 IDENTIFIER_POINTER (attribute));
3735 return NULL_TREE;
3736 }
3737 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3738
3739 if (size_int % inner_size_int)
3740 {
3741 if (attribute)
3742 error ("vector size not an integral multiple of component size");
3743 return NULL_TREE;
3744 }
3745
3746 if (size_int == 0)
3747 {
3748 if (attribute)
3749 error ("zero vector size");
3750 return NULL_TREE;
3751 }
3752
3753 nunits = size_int / inner_size_int;
3754 if (nunits & (nunits - 1))
3755 {
3756 if (attribute)
3757 error ("number of components of vector not a power of two");
3758 return NULL_TREE;
3759 }
3760
3761 return build_vector_type (inner_type, nunits);
3762}
3763
3764/* Return a vector type whose representative array type is ARRAY_TYPE, or
3765 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3766 processing the attribute and want to issue error messages on failure. */
3767
3768static tree
3769build_vector_type_for_array (tree array_type, tree attribute)
3770{
3771 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3772 TYPE_SIZE_UNIT (array_type),
3773 attribute);
3774 if (!vector_type)
3775 return NULL_TREE;
3776
3777 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3778 return vector_type;
3779}
3780\f
928dfa4b
EB
3781/* Build a type to be used to represent an aliased object whose nominal type
3782 is an unconstrained array. This consists of a RECORD_TYPE containing a
3783 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3784 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3785 an arbitrary unconstrained object. Use NAME as the name of the record.
3786 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
3787
3788tree
928dfa4b
EB
3789build_unc_object_type (tree template_type, tree object_type, tree name,
3790 bool debug_info_p)
a1ab4c31 3791{
24d4b3d5 3792 tree decl;
a1ab4c31 3793 tree type = make_node (RECORD_TYPE);
da01bfee
EB
3794 tree template_field
3795 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3796 NULL_TREE, NULL_TREE, 0, 1);
3797 tree array_field
3798 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3799 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
3800
3801 TYPE_NAME (type) = name;
3802 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 3803 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
3804 finish_record_type (type, template_field, 0, true);
3805
3806 /* Declare it now since it will never be declared otherwise. This is
3807 necessary to ensure that its subtrees are properly marked. */
24d4b3d5
AC
3808 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3809
3810 /* template_type will not be used elsewhere than here, so to keep the debug
3811 info clean and in order to avoid scoping issues, make decl its
3812 context. */
3813 gnat_set_type_context (template_type, decl);
a1ab4c31
AC
3814
3815 return type;
3816}
3817
3818/* Same, taking a thin or fat pointer type instead of a template type. */
3819
3820tree
3821build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 3822 tree name, bool debug_info_p)
a1ab4c31
AC
3823{
3824 tree template_type;
3825
315cff15 3826 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
3827
3828 template_type
315cff15 3829 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 3830 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 3831 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
3832
3833 return
3834 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 3835}
a1ab4c31 3836\f
229077b0
EB
3837/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3838 In the normal case this is just two adjustments, but we have more to
3839 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3840
3841void
3842update_pointer_to (tree old_type, tree new_type)
3843{
3844 tree ptr = TYPE_POINTER_TO (old_type);
3845 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 3846 tree t;
a1ab4c31
AC
3847
3848 /* If this is the main variant, process all the other variants first. */
3849 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
3850 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3851 update_pointer_to (t, new_type);
a1ab4c31 3852
229077b0 3853 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3854 if (!ptr && !ref)
3855 return;
3856
3857 /* Merge the old type qualifiers in the new type.
3858
3859 Each old variant has qualifiers for specific reasons, and the new
229077b0 3860 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3861 information grabbed at some point, and merging the two simply unifies
3862 these inputs into the final type description.
3863
3864 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3865 type designating it; after the designated type's freeze, we get here with
3866 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3867 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3868 designated type, because that's what it really is.
3869
229077b0
EB
3870 We might also get here for a non-dummy OLD_TYPE variant with different
3871 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3872 to private record type elaboration (see the comments around the call to
229077b0
EB
3873 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3874 the qualifiers in those cases too, to avoid accidentally discarding the
3875 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3876 new_type
3877 = build_qualified_type (new_type,
3878 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3879
3880 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3881 if (old_type == new_type)
3882 return;
3883
3884 /* Otherwise, first handle the simple case. */
3885 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3886 {
aeecf17c
EB
3887 tree new_ptr, new_ref;
3888
3889 /* If pointer or reference already points to new type, nothing to do.
3890 This can happen as update_pointer_to can be invoked multiple times
3891 on the same couple of types because of the type variants. */
3892 if ((ptr && TREE_TYPE (ptr) == new_type)
3893 || (ref && TREE_TYPE (ref) == new_type))
3894 return;
3895
3896 /* Chain PTR and its variants at the end. */
3897 new_ptr = TYPE_POINTER_TO (new_type);
3898 if (new_ptr)
3899 {
3900 while (TYPE_NEXT_PTR_TO (new_ptr))
3901 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3902 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3903 }
3904 else
3905 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 3906
aeecf17c 3907 /* Now adjust them. */
a1ab4c31 3908 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 3909 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
3910 {
3911 TREE_TYPE (t) = new_type;
3912 if (TYPE_NULL_BOUNDS (t))
3913 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3914 }
de9528f0 3915
aeecf17c
EB
3916 /* Chain REF and its variants at the end. */
3917 new_ref = TYPE_REFERENCE_TO (new_type);
3918 if (new_ref)
3919 {
3920 while (TYPE_NEXT_REF_TO (new_ref))
3921 new_ref = TYPE_NEXT_REF_TO (new_ref);
3922 TYPE_NEXT_REF_TO (new_ref) = ref;
3923 }
3924 else
3925 TYPE_REFERENCE_TO (new_type) = ref;
3926
3927 /* Now adjust them. */
a1ab4c31 3928 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
3929 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3930 TREE_TYPE (t) = new_type;
de9528f0
EB
3931
3932 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 3933 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3934 }
3935
aeecf17c
EB
3936 /* Now deal with the unconstrained array case. In this case the pointer
3937 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
3938 Turn them into pointers to the correct types using update_pointer_to.
3939 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
3940 else
3941 {
e3edbd56 3942 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
3943
3944 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3945
e3edbd56 3946 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
3947 since update_pointer_to can be invoked multiple times on the same
3948 couple of types because of the type variants. */
3949 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3950 return;
3951
a1ab4c31 3952 update_pointer_to
e3edbd56
EB
3953 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3954 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 3955
a1ab4c31 3956 update_pointer_to
e3edbd56
EB
3957 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3958 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 3959
e3edbd56
EB
3960 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3961 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 3962
e3edbd56 3963 TYPE_POINTER_TO (old_type) = NULL_TREE;
1e55d29a 3964 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3965 }
3966}
3967\f
8df2e902
EB
3968/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3969 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3970
3971static tree
3972convert_to_fat_pointer (tree type, tree expr)
3973{
910ad8de 3974 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 3975 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3976 tree etype = TREE_TYPE (expr);
88293f03 3977 tree template_addr;
9771b263
DN
3978 vec<constructor_elt, va_gc> *v;
3979 vec_alloc (v, 2);
a1ab4c31 3980
50179d58
EB
3981 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3982 array (compare_fat_pointers ensures that this is the full discriminant)
3983 and a valid pointer to the bounds. This latter property is necessary
3984 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 3985 if (integer_zerop (expr))
0e228dd9 3986 {
50179d58
EB
3987 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3988 tree null_bounds, t;
3989
3990 if (TYPE_NULL_BOUNDS (ptr_template_type))
3991 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3992 else
3993 {
3994 /* The template type can still be dummy at this point so we build an
3995 empty constructor. The middle-end will fill it in with zeros. */
90b4c164 3996 t = build_constructor (template_type, NULL);
50179d58
EB
3997 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3998 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3999 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4000 }
4001
0e228dd9 4002 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
4003 fold_convert (p_array_type, null_pointer_node));
4004 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4005 t = build_constructor (type, v);
4006 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4007 TREE_CONSTANT (t) = 0;
4008 TREE_STATIC (t) = 1;
4009
4010 return t;
0e228dd9 4011 }
a1ab4c31 4012
0d7de0e1
EB
4013 /* If EXPR is a thin pointer, make template and data from the record. */
4014 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 4015 {
0d7de0e1 4016 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 4017
7d7a1fe8 4018 expr = gnat_protect_expr (expr);
88293f03
EB
4019
4020 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4021 the thin pointer value has been shifted so we shift it back to get
4022 the template address. */
4023 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
2b45154d 4024 {
88293f03
EB
4025 template_addr
4026 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4027 fold_build1 (NEGATE_EXPR, sizetype,
4028 byte_position
4029 (DECL_CHAIN (field))));
4030 template_addr
4031 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4032 template_addr);
2b45154d 4033 }
a1ab4c31 4034
88293f03
EB
4035 /* Otherwise we explicitly take the address of the fields. */
4036 else
4037 {
4038 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4039 template_addr
4040 = build_unary_op (ADDR_EXPR, NULL_TREE,
64235766 4041 build_component_ref (expr, field, false));
88293f03 4042 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
64235766 4043 build_component_ref (expr, DECL_CHAIN (field),
88293f03
EB
4044 false));
4045 }
a1ab4c31 4046 }
8df2e902
EB
4047
4048 /* Otherwise, build the constructor for the template. */
a1ab4c31 4049 else
88293f03
EB
4050 template_addr
4051 = build_unary_op (ADDR_EXPR, NULL_TREE,
4052 build_template (template_type, TREE_TYPE (etype),
4053 expr));
a1ab4c31 4054
8df2e902 4055 /* The final result is a constructor for the fat pointer.
a1ab4c31 4056
8df2e902
EB
4057 If EXPR is an argument of a foreign convention subprogram, the type it
4058 points to is directly the component type. In this case, the expression
a1ab4c31 4059 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4060 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4061 required, for instance because it ensures that possible later folding of
8df2e902 4062 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4063 same type as the initial reference.
4064
8df2e902
EB
4065 Note that the call to "build_template" above is still fine because it
4066 will only refer to the provided TEMPLATE_TYPE in this case. */
88293f03
EB
4067 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4068 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
0e228dd9 4069 return gnat_build_constructor (type, v);
a1ab4c31
AC
4070}
4071\f
a1ab4c31
AC
4072/* Create an expression whose value is that of EXPR,
4073 converted to type TYPE. The TREE_TYPE of the value
4074 is always TYPE. This function implements all reasonable
4075 conversions; callers should filter out those that are
4076 not permitted by the language being compiled. */
4077
4078tree
4079convert (tree type, tree expr)
4080{
a1ab4c31
AC
4081 tree etype = TREE_TYPE (expr);
4082 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4083 enum tree_code code = TREE_CODE (type);
a1ab4c31 4084
c34f3839
EB
4085 /* If the expression is already of the right type, we are done. */
4086 if (etype == type)
a1ab4c31
AC
4087 return expr;
4088
4089 /* If both input and output have padding and are of variable size, do this
4090 as an unchecked conversion. Likewise if one is a mere variant of the
4091 other, so we avoid a pointless unpad/repad sequence. */
4092 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4093 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4094 && (!TREE_CONSTANT (TYPE_SIZE (type))
4095 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4096 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4097 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4098 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4099 ;
4100
f88facfe
EB
4101 /* If the output type has padding, convert to the inner type and make a
4102 constructor to build the record, unless a variable size is involved. */
315cff15 4103 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4104 {
9771b263 4105 vec<constructor_elt, va_gc> *v;
0e228dd9 4106
a1ab4c31
AC
4107 /* If we previously converted from another type and our type is
4108 of variable size, remove the conversion to avoid the need for
f88facfe 4109 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4110 original and packable version. */
4111 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4112 && (!TREE_CONSTANT (TYPE_SIZE (type))
4113 || (ecode == RECORD_TYPE
4114 && TYPE_NAME (etype)
4115 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4116 expr = TREE_OPERAND (expr, 0);
4117
4118 /* If we are just removing the padding from expr, convert the original
4119 object if we have variable size in order to avoid the need for some
f88facfe 4120 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4121 of the other, so we avoid a pointless unpad/repad sequence. */
4122 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4123 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4124 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4125 || TYPE_MAIN_VARIANT (type)
4126 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4127 || (ecode == RECORD_TYPE
4128 && TYPE_NAME (etype)
4129 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4130 return convert (type, TREE_OPERAND (expr, 0));
4131
431cfac1
EB
4132 /* If the inner type is of self-referential size and the expression type
4133 is a record, do this as an unchecked conversion. But first pad the
4134 expression if possible to have the same size on both sides. */
c34f3839 4135 if (ecode == RECORD_TYPE
f88facfe 4136 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4137 {
980a0501 4138 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4139 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4140 false, false, false, true),
4141 expr);
431cfac1
EB
4142 return unchecked_convert (type, expr, false);
4143 }
a1ab4c31 4144
f88facfe
EB
4145 /* If we are converting between array types with variable size, do the
4146 final conversion as an unchecked conversion, again to avoid the need
4147 for some variable-sized temporaries. If valid, this conversion is
4148 very likely purely technical and without real effects. */
c34f3839 4149 if (ecode == ARRAY_TYPE
f88facfe
EB
4150 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4151 && !TREE_CONSTANT (TYPE_SIZE (etype))
4152 && !TREE_CONSTANT (TYPE_SIZE (type)))
4153 return unchecked_convert (type,
4154 convert (TREE_TYPE (TYPE_FIELDS (type)),
4155 expr),
4156 false);
4157
9771b263 4158 vec_alloc (v, 1);
0e228dd9
NF
4159 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4160 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4161 return gnat_build_constructor (type, v);
a1ab4c31
AC
4162 }
4163
4164 /* If the input type has padding, remove it and convert to the output type.
4165 The conditions ordering is arranged to ensure that the output type is not
4166 a padding type here, as it is not clear whether the conversion would
4167 always be correct if this was to happen. */
315cff15 4168 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4169 {
4170 tree unpadded;
4171
4172 /* If we have just converted to this padded type, just get the
4173 inner expression. */
2117b9bb
EB
4174 if (TREE_CODE (expr) == CONSTRUCTOR)
4175 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
a1ab4c31
AC
4176
4177 /* Otherwise, build an explicit component reference. */
4178 else
64235766 4179 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4180
4181 return convert (type, unpadded);
4182 }
4183
4184 /* If the input is a biased type, adjust first. */
4185 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4186 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
1eb58520 4187 fold_convert (TREE_TYPE (etype), expr),
a1ab4c31 4188 fold_convert (TREE_TYPE (etype),
1eb58520 4189 TYPE_MIN_VALUE (etype))));
a1ab4c31
AC
4190
4191 /* If the input is a justified modular type, we need to extract the actual
4192 object before converting it to any other type with the exceptions of an
4193 unconstrained array or of a mere type variant. It is useful to avoid the
4194 extraction and conversion in the type variant case because it could end
4195 up replacing a VAR_DECL expr by a constructor and we might be about the
4196 take the address of the result. */
4197 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4198 && code != UNCONSTRAINED_ARRAY_TYPE
4199 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
64235766
EB
4200 return
4201 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
a1ab4c31
AC
4202
4203 /* If converting to a type that contains a template, convert to the data
4204 type and then build the template. */
4205 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4206 {
910ad8de 4207 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4208 vec<constructor_elt, va_gc> *v;
4209 vec_alloc (v, 2);
a1ab4c31
AC
4210
4211 /* If the source already has a template, get a reference to the
4212 associated array only, as we are going to rebuild a template
4213 for the target type anyway. */
4214 expr = maybe_unconstrained_array (expr);
4215
0e228dd9
NF
4216 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4217 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4218 obj_type, NULL_TREE));
73a1a803
EB
4219 if (expr)
4220 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4221 convert (obj_type, expr));
0e228dd9 4222 return gnat_build_constructor (type, v);
a1ab4c31
AC
4223 }
4224
a1c7d797 4225 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4226 switch (TREE_CODE (expr))
4227 {
4228 case ERROR_MARK:
4229 return expr;
4230
4231 case NULL_EXPR:
4232 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4233 conversion in gnat_expand_expr. NULL_EXPR does not represent
4234 and actual value, so no conversion is needed. */
4235 expr = copy_node (expr);
4236 TREE_TYPE (expr) = type;
4237 return expr;
4238
4239 case STRING_CST:
4240 /* If we are converting a STRING_CST to another constrained array type,
4241 just make a new one in the proper type. */
4242 if (code == ecode && AGGREGATE_TYPE_P (etype)
4243 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4244 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4245 {
4246 expr = copy_node (expr);
4247 TREE_TYPE (expr) = type;
4248 return expr;
4249 }
4250 break;
4251
7948ae37 4252 case VECTOR_CST:
44e9e3ec 4253 /* If we are converting a VECTOR_CST to a mere type variant, just make
7948ae37
OH
4254 a new one in the proper type. */
4255 if (code == ecode && gnat_types_compatible_p (type, etype))
4256 {
4257 expr = copy_node (expr);
4258 TREE_TYPE (expr) = type;
4259 return expr;
4260 }
4261
a1ab4c31 4262 case CONSTRUCTOR:
44e9e3ec
EB
4263 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4264 another padding type around the same type, just make a new one in
4265 the proper type. */
4266 if (code == ecode
4267 && (gnat_types_compatible_p (type, etype)
4268 || (code == RECORD_TYPE
4269 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4270 && TREE_TYPE (TYPE_FIELDS (type))
4271 == TREE_TYPE (TYPE_FIELDS (etype)))))
a1ab4c31
AC
4272 {
4273 expr = copy_node (expr);
4274 TREE_TYPE (expr) = type;
9771b263 4275 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4276 return expr;
4277 }
4278
cb3d597d
EB
4279 /* Likewise for a conversion between original and packable version, or
4280 conversion between types of the same size and with the same list of
4281 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4282 if (code == ecode
4283 && code == RECORD_TYPE
cb3d597d
EB
4284 && (TYPE_NAME (type) == TYPE_NAME (etype)
4285 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4286
a1ab4c31 4287 {
9771b263
DN
4288 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4289 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4290 vec<constructor_elt, va_gc> *v;
4291 vec_alloc (v, len);
a1ab4c31
AC
4292 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4293 unsigned HOST_WIDE_INT idx;
4294 tree index, value;
4295
db868e1e
OH
4296 /* Whether we need to clear TREE_CONSTANT et al. on the output
4297 constructor when we convert in place. */
4298 bool clear_constant = false;
4299
a1ab4c31
AC
4300 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4301 {
44e9e3ec
EB
4302 /* Skip the missing fields in the CONSTRUCTOR. */
4303 while (efield && field && !SAME_FIELD_P (efield, index))
4304 {
4305 efield = DECL_CHAIN (efield);
4306 field = DECL_CHAIN (field);
4307 }
cb3d597d 4308 /* The field must be the same. */
44e9e3ec 4309 if (!(efield && field && SAME_FIELD_P (efield, field)))
a1ab4c31 4310 break;
44e9e3ec
EB
4311 constructor_elt elt
4312 = {field, convert (TREE_TYPE (field), value)};
9771b263 4313 v->quick_push (elt);
db868e1e
OH
4314
4315 /* If packing has made this field a bitfield and the input
4316 value couldn't be emitted statically any more, we need to
4317 clear TREE_CONSTANT on our output. */
ced57283
EB
4318 if (!clear_constant
4319 && TREE_CONSTANT (expr)
db868e1e
OH
4320 && !CONSTRUCTOR_BITFIELD_P (efield)
4321 && CONSTRUCTOR_BITFIELD_P (field)
4322 && !initializer_constant_valid_for_bitfield_p (value))
4323 clear_constant = true;
4324
910ad8de
NF
4325 efield = DECL_CHAIN (efield);
4326 field = DECL_CHAIN (field);
a1ab4c31
AC
4327 }
4328
db868e1e
OH
4329 /* If we have been able to match and convert all the input fields
4330 to their output type, convert in place now. We'll fallback to a
4331 view conversion downstream otherwise. */
a1ab4c31
AC
4332 if (idx == len)
4333 {
4334 expr = copy_node (expr);
4335 TREE_TYPE (expr) = type;
4336 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4337 if (clear_constant)
ced57283 4338 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4339 return expr;
4340 }
4341 }
7948ae37
OH
4342
4343 /* Likewise for a conversion between array type and vector type with a
4344 compatible representative array. */
4345 else if (code == VECTOR_TYPE
4346 && ecode == ARRAY_TYPE
4347 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4348 etype))
4349 {
9771b263
DN
4350 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4351 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4352 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4353 unsigned HOST_WIDE_INT ix;
4354 tree value;
4355
4356 /* Build a VECTOR_CST from a *constant* array constructor. */
4357 if (TREE_CONSTANT (expr))
4358 {
4359 bool constant_p = true;
4360
4361 /* Iterate through elements and check if all constructor
4362 elements are *_CSTs. */
4363 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4364 if (!CONSTANT_CLASS_P (value))
4365 {
4366 constant_p = false;
4367 break;
4368 }
4369
4370 if (constant_p)
4371 return build_vector_from_ctor (type,
4372 CONSTRUCTOR_ELTS (expr));
4373 }
4374
4375 /* Otherwise, build a regular vector constructor. */
9771b263 4376 vec_alloc (v, len);
7948ae37
OH
4377 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4378 {
f32682ca 4379 constructor_elt elt = {NULL_TREE, value};
9771b263 4380 v->quick_push (elt);
7948ae37
OH
4381 }
4382 expr = copy_node (expr);
4383 TREE_TYPE (expr) = type;
4384 CONSTRUCTOR_ELTS (expr) = v;
4385 return expr;
4386 }
a1ab4c31
AC
4387 break;
4388
4389 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4390 /* First retrieve the underlying array. */
4391 expr = maybe_unconstrained_array (expr);
4392 etype = TREE_TYPE (expr);
4393 ecode = TREE_CODE (etype);
4394 break;
a1ab4c31
AC
4395
4396 case VIEW_CONVERT_EXPR:
4397 {
4398 /* GCC 4.x is very sensitive to type consistency overall, and view
4399 conversions thus are very frequent. Even though just "convert"ing
4400 the inner operand to the output type is fine in most cases, it
4401 might expose unexpected input/output type mismatches in special
4402 circumstances so we avoid such recursive calls when we can. */
4403 tree op0 = TREE_OPERAND (expr, 0);
4404
4405 /* If we are converting back to the original type, we can just
4406 lift the input conversion. This is a common occurrence with
4407 switches back-and-forth amongst type variants. */
4408 if (type == TREE_TYPE (op0))
4409 return op0;
4410
7948ae37
OH
4411 /* Otherwise, if we're converting between two aggregate or vector
4412 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4413 target type in place or to just convert the inner expression. */
4414 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4415 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4416 {
4417 /* If we are converting between mere variants, we can just
4418 substitute the VIEW_CONVERT_EXPR in place. */
4419 if (gnat_types_compatible_p (type, etype))
4420 return build1 (VIEW_CONVERT_EXPR, type, op0);
4421
4422 /* Otherwise, we may just bypass the input view conversion unless
4423 one of the types is a fat pointer, which is handled by
4424 specialized code below which relies on exact type matching. */
315cff15
EB
4425 else if (!TYPE_IS_FAT_POINTER_P (type)
4426 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4427 return convert (type, op0);
4428 }
ad1d36ba
EB
4429
4430 break;
a1ab4c31 4431 }
a1ab4c31 4432
a1ab4c31
AC
4433 default:
4434 break;
4435 }
4436
4437 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4438 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4439 return convert_to_fat_pointer (type, expr);
4440
7948ae37
OH
4441 /* If we are converting between two aggregate or vector types that are mere
4442 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4443 to a vector type from its representative array type. */
4444 else if ((code == ecode
4445 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4446 && gnat_types_compatible_p (type, etype))
4447 || (code == VECTOR_TYPE
4448 && ecode == ARRAY_TYPE
4449 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4450 etype)))
a1ab4c31
AC
4451 return build1 (VIEW_CONVERT_EXPR, type, expr);
4452
76af763d
EB
4453 /* If we are converting between tagged types, try to upcast properly. */
4454 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4455 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4456 {
4457 tree child_etype = etype;
4458 do {
4459 tree field = TYPE_FIELDS (child_etype);
4460 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
64235766 4461 return build_component_ref (expr, field, false);
76af763d
EB
4462 child_etype = TREE_TYPE (field);
4463 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4464 }
4465
bb1f7929
EB
4466 /* If we are converting from a smaller form of record type back to it, just
4467 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4468 size on both sides. */
4469 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4470 && smaller_form_type_p (etype, type))
4471 {
4472 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4473 false, false, false, true),
4474 expr);
4475 return build1 (VIEW_CONVERT_EXPR, type, expr);
4476 }
4477
a1ab4c31 4478 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4479 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4480 return fold_convert (type, expr);
4481
4482 switch (code)
4483 {
4484 case VOID_TYPE:
4485 return fold_build1 (CONVERT_EXPR, type, expr);
4486
a1ab4c31
AC
4487 case INTEGER_TYPE:
4488 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4489 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4490 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4491 return unchecked_convert (type, expr, false);
4492 else if (TYPE_BIASED_REPRESENTATION_P (type))
4493 return fold_convert (type,
4494 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4495 convert (TREE_TYPE (type), expr),
1eb58520
AC
4496 convert (TREE_TYPE (type),
4497 TYPE_MIN_VALUE (type))));
a1ab4c31
AC
4498
4499 /* ... fall through ... */
4500
4501 case ENUMERAL_TYPE:
01ddebf2 4502 case BOOLEAN_TYPE:
a1ab4c31
AC
4503 /* If we are converting an additive expression to an integer type
4504 with lower precision, be wary of the optimization that can be
4505 applied by convert_to_integer. There are 2 problematic cases:
4506 - if the first operand was originally of a biased type,
4507 because we could be recursively called to convert it
4508 to an intermediate type and thus rematerialize the
4509 additive operator endlessly,
4510 - if the expression contains a placeholder, because an
4511 intermediate conversion that changes the sign could
4512 be inserted and thus introduce an artificial overflow
4513 at compile time when the placeholder is substituted. */
4514 if (code == INTEGER_TYPE
4515 && ecode == INTEGER_TYPE
4516 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4517 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4518 {
4519 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4520
4521 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4522 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4523 || CONTAINS_PLACEHOLDER_P (expr))
4524 return build1 (NOP_EXPR, type, expr);
4525 }
4526
4527 return fold (convert_to_integer (type, expr));
4528
4529 case POINTER_TYPE:
4530 case REFERENCE_TYPE:
0d7de0e1 4531 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4532 for differing offsets from the base pointer, depending on whether
4533 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4534 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4535 {
2b45154d 4536 tree etype_pos
7c775aca 4537 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
2b45154d
EB
4538 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4539 : size_zero_node;
4540 tree type_pos
7c775aca 4541 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
2b45154d
EB
4542 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4543 : size_zero_node;
4544 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4545
a1ab4c31 4546 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4547 if (integer_zerop (byte_diff))
4548 return expr;
4549
4550 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4551 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4552 }
4553
0d7de0e1
EB
4554 /* If converting fat pointer to normal or thin pointer, get the pointer
4555 to the array and then convert it. */
4556 if (TYPE_IS_FAT_POINTER_P (etype))
64235766 4557 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4558
4559 return fold (convert_to_pointer (type, expr));
4560
4561 case REAL_TYPE:
4562 return fold (convert_to_real (type, expr));
4563
4564 case RECORD_TYPE:
4565 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4566 {
9771b263
DN
4567 vec<constructor_elt, va_gc> *v;
4568 vec_alloc (v, 1);
0e228dd9
NF
4569
4570 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4571 convert (TREE_TYPE (TYPE_FIELDS (type)),
4572 expr));
4573 return gnat_build_constructor (type, v);
4574 }
a1ab4c31
AC
4575
4576 /* ... fall through ... */
4577
4578 case ARRAY_TYPE:
4579 /* In these cases, assume the front-end has validated the conversion.
4580 If the conversion is valid, it will be a bit-wise conversion, so
4581 it can be viewed as an unchecked conversion. */
4582 return unchecked_convert (type, expr, false);
4583
4584 case UNION_TYPE:
4585 /* This is a either a conversion between a tagged type and some
4586 subtype, which we have to mark as a UNION_TYPE because of
4587 overlapping fields or a conversion of an Unchecked_Union. */
4588 return unchecked_convert (type, expr, false);
4589
4590 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4591 /* If the input is a VECTOR_TYPE, convert to the representative
4592 array type first. */
4593 if (ecode == VECTOR_TYPE)
4594 {
4595 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4596 etype = TREE_TYPE (expr);
4597 ecode = TREE_CODE (etype);
4598 }
4599
a1ab4c31
AC
4600 /* If EXPR is a constrained array, take its address, convert it to a
4601 fat pointer, and then dereference it. Likewise if EXPR is a
4602 record containing both a template and a constrained array.
4603 Note that a record representing a justified modular type
4604 always represents a packed constrained array. */
4605 if (ecode == ARRAY_TYPE
4606 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4607 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4608 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4609 return
4610 build_unary_op
4611 (INDIRECT_REF, NULL_TREE,
4612 convert_to_fat_pointer (TREE_TYPE (type),
4613 build_unary_op (ADDR_EXPR,
4614 NULL_TREE, expr)));
4615
4616 /* Do something very similar for converting one unconstrained
4617 array to another. */
4618 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4619 return
4620 build_unary_op (INDIRECT_REF, NULL_TREE,
4621 convert (TREE_TYPE (type),
4622 build_unary_op (ADDR_EXPR,
4623 NULL_TREE, expr)));
4624 else
4625 gcc_unreachable ();
4626
4627 case COMPLEX_TYPE:
4628 return fold (convert_to_complex (type, expr));
4629
4630 default:
4631 gcc_unreachable ();
4632 }
4633}
15bf7d19
EB
4634
4635/* Create an expression whose value is that of EXPR converted to the common
4636 index type, which is sizetype. EXPR is supposed to be in the base type
4637 of the GNAT index type. Calling it is equivalent to doing
4638
4639 convert (sizetype, expr)
4640
4641 but we try to distribute the type conversion with the knowledge that EXPR
4642 cannot overflow in its type. This is a best-effort approach and we fall
4643 back to the above expression as soon as difficulties are encountered.
4644
4645 This is necessary to overcome issues that arise when the GNAT base index
4646 type and the GCC common index type (sizetype) don't have the same size,
4647 which is quite frequent on 64-bit architectures. In this case, and if
4648 the GNAT base index type is signed but the iteration type of the loop has
4649 been forced to unsigned, the loop scalar evolution engine cannot compute
4650 a simple evolution for the general induction variables associated with the
4651 array indices, because it will preserve the wrap-around semantics in the
4652 unsigned type of their "inner" part. As a result, many loop optimizations
4653 are blocked.
4654
4655 The solution is to use a special (basic) induction variable that is at
4656 least as large as sizetype, and to express the aforementioned general
4657 induction variables in terms of this induction variable, eliminating
4658 the problematic intermediate truncation to the GNAT base index type.
4659 This is possible as long as the original expression doesn't overflow
4660 and if the middle-end hasn't introduced artificial overflows in the
4661 course of the various simplification it can make to the expression. */
4662
4663tree
4664convert_to_index_type (tree expr)
4665{
4666 enum tree_code code = TREE_CODE (expr);
4667 tree type = TREE_TYPE (expr);
4668
4669 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4670 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4671 if (TYPE_UNSIGNED (type) || !optimize)
4672 return convert (sizetype, expr);
4673
4674 switch (code)
4675 {
4676 case VAR_DECL:
4677 /* The main effect of the function: replace a loop parameter with its
4678 associated special induction variable. */
4679 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4680 expr = DECL_INDUCTION_VAR (expr);
4681 break;
4682
4683 CASE_CONVERT:
4684 {
4685 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4686 /* Bail out as soon as we suspect some sort of type frobbing. */
4687 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4688 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4689 break;
4690 }
4691
4692 /* ... fall through ... */
4693
4694 case NON_LVALUE_EXPR:
4695 return fold_build1 (code, sizetype,
4696 convert_to_index_type (TREE_OPERAND (expr, 0)));
4697
4698 case PLUS_EXPR:
4699 case MINUS_EXPR:
4700 case MULT_EXPR:
4701 return fold_build2 (code, sizetype,
4702 convert_to_index_type (TREE_OPERAND (expr, 0)),
4703 convert_to_index_type (TREE_OPERAND (expr, 1)));
4704
4705 case COMPOUND_EXPR:
4706 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4707 convert_to_index_type (TREE_OPERAND (expr, 1)));
4708
4709 case COND_EXPR:
4710 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4711 convert_to_index_type (TREE_OPERAND (expr, 1)),
4712 convert_to_index_type (TREE_OPERAND (expr, 2)));
4713
4714 default:
4715 break;
4716 }
4717
4718 return convert (sizetype, expr);
4719}
a1ab4c31
AC
4720\f
4721/* Remove all conversions that are done in EXP. This includes converting
4722 from a padded type or to a justified modular type. If TRUE_ADDRESS
4723 is true, always return the address of the containing object even if
4724 the address is not bit-aligned. */
4725
4726tree
4727remove_conversions (tree exp, bool true_address)
4728{
4729 switch (TREE_CODE (exp))
4730 {
4731 case CONSTRUCTOR:
4732 if (true_address
4733 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4734 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4735 return
2117b9bb 4736 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
a1ab4c31
AC
4737 break;
4738
4739 case COMPONENT_REF:
315cff15 4740 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
4741 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4742 break;
4743
a1ab4c31 4744 CASE_CONVERT:
722356ce
EB
4745 case VIEW_CONVERT_EXPR:
4746 case NON_LVALUE_EXPR:
a1ab4c31
AC
4747 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4748
4749 default:
4750 break;
4751 }
4752
4753 return exp;
4754}
4755\f
4756/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 4757 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
4758 likewise return an expression pointing to the underlying array. */
4759
4760tree
4761maybe_unconstrained_array (tree exp)
4762{
4763 enum tree_code code = TREE_CODE (exp);
1aa291f7 4764 tree type = TREE_TYPE (exp);
a1ab4c31 4765
1aa291f7 4766 switch (TREE_CODE (type))
a1ab4c31
AC
4767 {
4768 case UNCONSTRAINED_ARRAY_TYPE:
4769 if (code == UNCONSTRAINED_ARRAY_REF)
4770 {
7e169899 4771 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
4772 const bool no_trap = TREE_THIS_NOTRAP (exp);
4773
7e169899 4774 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
4775 type = TREE_TYPE (exp);
4776
7e169899
EB
4777 if (TREE_CODE (exp) == COND_EXPR)
4778 {
4779 tree op1
4780 = build_unary_op (INDIRECT_REF, NULL_TREE,
4781 build_component_ref (TREE_OPERAND (exp, 1),
1aa291f7 4782 TYPE_FIELDS (type),
7e169899
EB
4783 false));
4784 tree op2
4785 = build_unary_op (INDIRECT_REF, NULL_TREE,
4786 build_component_ref (TREE_OPERAND (exp, 2),
1aa291f7 4787 TYPE_FIELDS (type),
7e169899
EB
4788 false));
4789
4790 exp = build3 (COND_EXPR,
1aa291f7 4791 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
4792 TREE_OPERAND (exp, 0), op1, op2);
4793 }
4794 else
a1c7d797
EB
4795 {
4796 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
64235766
EB
4797 build_component_ref (exp,
4798 TYPE_FIELDS (type),
a1c7d797
EB
4799 false));
4800 TREE_READONLY (exp) = read_only;
4801 TREE_THIS_NOTRAP (exp) = no_trap;
4802 }
a1ab4c31
AC
4803 }
4804
4805 else if (code == NULL_EXPR)
1aa291f7
EB
4806 exp = build1 (NULL_EXPR,
4807 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4808 TREE_OPERAND (exp, 0));
4809 break;
a1ab4c31
AC
4810
4811 case RECORD_TYPE:
1aa291f7
EB
4812 /* If this is a padded type and it contains a template, convert to the
4813 unpadded type first. */
4814 if (TYPE_PADDING_P (type)
4815 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4816 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 4817 {
1aa291f7 4818 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
64235766 4819 code = TREE_CODE (exp);
1aa291f7
EB
4820 type = TREE_TYPE (exp);
4821 }
4822
4823 if (TYPE_CONTAINS_TEMPLATE_P (type))
4824 {
64235766
EB
4825 /* If the array initializer is a box, return NULL_TREE. */
4826 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4827 return NULL_TREE;
4828
4829 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4830 false);
4831 type = TREE_TYPE (exp);
1aa291f7
EB
4832
4833 /* If the array type is padded, convert to the unpadded type. */
64235766
EB
4834 if (TYPE_IS_PADDING_P (type))
4835 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 4836 }
a1ab4c31
AC
4837 break;
4838
4839 default:
4840 break;
4841 }
4842
4843 return exp;
4844}
4845\f
afcea859 4846/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 4847 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
4848
4849static bool
4850can_fold_for_view_convert_p (tree expr)
4851{
4852 tree t1, t2;
4853
4854 /* The folder will fold NOP_EXPRs between integral types with the same
4855 precision (in the middle-end's sense). We cannot allow it if the
4856 types don't have the same precision in the Ada sense as well. */
4857 if (TREE_CODE (expr) != NOP_EXPR)
4858 return true;
4859
4860 t1 = TREE_TYPE (expr);
4861 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4862
4863 /* Defer to the folder for non-integral conversions. */
4864 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4865 return true;
4866
4867 /* Only fold conversions that preserve both precisions. */
4868 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4869 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4870 return true;
4871
4872 return false;
4873}
4874
a1ab4c31 4875/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4876 If NOTRUNC_P is true, truncation operations should be suppressed.
4877
4878 Special care is required with (source or target) integral types whose
4879 precision is not equal to their size, to make sure we fetch or assign
4880 the value bits whose location might depend on the endianness, e.g.
4881
4882 Rmsize : constant := 8;
4883 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4884
4885 type Bit_Array is array (1 .. Rmsize) of Boolean;
4886 pragma Pack (Bit_Array);
4887
4888 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4889
4890 Value : Int := 2#1000_0001#;
4891 Vbits : Bit_Array := To_Bit_Array (Value);
4892
4893 we expect the 8 bits at Vbits'Address to always contain Value, while
4894 their original location depends on the endianness, at Value'Address
84fb43a1 4895 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
4896
4897tree
4898unchecked_convert (tree type, tree expr, bool notrunc_p)
4899{
4900 tree etype = TREE_TYPE (expr);
c34f3839
EB
4901 enum tree_code ecode = TREE_CODE (etype);
4902 enum tree_code code = TREE_CODE (type);
e63b36bd 4903 tree tem;
980a0501 4904 int c;
a1ab4c31 4905
c34f3839 4906 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
4907 if (etype == type)
4908 return expr;
4909
026c3cfd 4910 /* If both types are integral just do a normal conversion.
a1ab4c31 4911 Likewise for a conversion to an unconstrained array. */
1eb58520 4912 if (((INTEGRAL_TYPE_P (type)
0d7de0e1 4913 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 4914 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
1eb58520 4915 && (INTEGRAL_TYPE_P (etype)
315cff15 4916 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
4917 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4918 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 4919 {
c34f3839 4920 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
4921 {
4922 tree ntype = copy_type (etype);
a1ab4c31
AC
4923 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4924 TYPE_MAIN_VARIANT (ntype) = ntype;
4925 expr = build1 (NOP_EXPR, ntype, expr);
4926 }
4927
c34f3839 4928 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 4929 {
afcea859 4930 tree rtype = copy_type (type);
a1ab4c31
AC
4931 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4932 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4933 expr = convert (rtype, expr);
4934 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 4935 }
afcea859
EB
4936 else
4937 expr = convert (type, expr);
a1ab4c31
AC
4938 }
4939
afcea859 4940 /* If we are converting to an integral type whose precision is not equal
ee45a32d
EB
4941 to its size, first unchecked convert to a record type that contains a
4942 field of the given precision. Then extract the result from the field.
4943
4944 There is a subtlety if the source type is an aggregate type with reverse
4945 storage order because its representation is not contiguous in the native
4946 storage order, i.e. a direct unchecked conversion to an integral type
4947 with N bits of precision cannot read the first N bits of the aggregate
4948 type. To overcome it, we do an unchecked conversion to an integral type
4949 with reverse storage order and return the resulting value. This also
4950 ensures that the result of the unchecked conversion doesn't depend on
4951 the endianness of the target machine, but only on the storage order of
4952 the aggregate type.
4953
4954 Finally, for the sake of consistency, we do the unchecked conversion
4955 to an integral type with reverse storage order as soon as the source
4956 type is an aggregate type with reverse storage order, even if there
4957 are no considerations of precision or size involved. */
980a0501
EB
4958 else if (INTEGRAL_TYPE_P (type)
4959 && TYPE_RM_SIZE (type)
9a1bdc31
EB
4960 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
4961 TYPE_SIZE (type)) < 0
ee45a32d
EB
4962 || (AGGREGATE_TYPE_P (etype)
4963 && TYPE_REVERSE_STORAGE_ORDER (etype))))
a1ab4c31
AC
4964 {
4965 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
4966 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4967 tree field_type, field;
4968
ee45a32d
EB
4969 if (AGGREGATE_TYPE_P (etype))
4970 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4971 = TYPE_REVERSE_STORAGE_ORDER (etype);
4972
416de7d5
EB
4973 if (TYPE_UNSIGNED (type))
4974 field_type = make_unsigned_type (prec);
4975 else
4976 field_type = make_signed_type (prec);
4977 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4978
4979 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 4980 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 4981
44e9e3ec 4982 finish_record_type (rec_type, field, 1, false);
a1ab4c31
AC
4983
4984 expr = unchecked_convert (rec_type, expr, notrunc_p);
64235766 4985 expr = build_component_ref (expr, field, false);
416de7d5 4986 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4987 }
4988
416de7d5
EB
4989 /* Similarly if we are converting from an integral type whose precision is
4990 not equal to its size, first copy into a field of the given precision
ee45a32d
EB
4991 and unchecked convert the record type.
4992
4993 The same considerations as above apply if the target type is an aggregate
4994 type with reverse storage order and we also proceed similarly. */
980a0501
EB
4995 else if (INTEGRAL_TYPE_P (etype)
4996 && TYPE_RM_SIZE (etype)
9a1bdc31
EB
4997 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
4998 TYPE_SIZE (etype)) < 0
ee45a32d
EB
4999 || (AGGREGATE_TYPE_P (type)
5000 && TYPE_REVERSE_STORAGE_ORDER (type))))
a1ab4c31
AC
5001 {
5002 tree rec_type = make_node (RECORD_TYPE);
416de7d5 5003 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
5004 vec<constructor_elt, va_gc> *v;
5005 vec_alloc (v, 1);
416de7d5
EB
5006 tree field_type, field;
5007
ee45a32d
EB
5008 if (AGGREGATE_TYPE_P (type))
5009 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5010 = TYPE_REVERSE_STORAGE_ORDER (type);
5011
416de7d5
EB
5012 if (TYPE_UNSIGNED (etype))
5013 field_type = make_unsigned_type (prec);
5014 else
5015 field_type = make_signed_type (prec);
5016 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5017
5018 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 5019 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 5020
44e9e3ec 5021 finish_record_type (rec_type, field, 1, false);
a1ab4c31 5022
416de7d5 5023 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
5024 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5025 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
5026 expr = unchecked_convert (type, expr, notrunc_p);
5027 }
5028
980a0501
EB
5029 /* If we are converting from a scalar type to a type with a different size,
5030 we need to pad to have the same size on both sides.
5031
5032 ??? We cannot do it unconditionally because unchecked conversions are
5033 used liberally by the front-end to implement polymorphism, e.g. in:
5034
5035 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5036 return p___size__4 (p__object!(S191s.all));
5037
5038 so we skip all expressions that are references. */
5039 else if (!REFERENCE_CLASS_P (expr)
5040 && !AGGREGATE_TYPE_P (etype)
5041 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5042 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5043 {
5044 if (c < 0)
5045 {
5046 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5047 false, false, false, true),
5048 expr);
5049 expr = unchecked_convert (type, expr, notrunc_p);
5050 }
5051 else
5052 {
5053 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5054 false, false, false, true);
5055 expr = unchecked_convert (rec_type, expr, notrunc_p);
64235766 5056 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
980a0501
EB
5057 }
5058 }
5059
7948ae37
OH
5060 /* We have a special case when we are converting between two unconstrained
5061 array types. In that case, take the address, convert the fat pointer
5062 types, and dereference. */
c34f3839 5063 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5064 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5065 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5066 build_unary_op (ADDR_EXPR, NULL_TREE,
5067 expr)));
7948ae37
OH
5068
5069 /* Another special case is when we are converting to a vector type from its
5070 representative array type; this a regular conversion. */
c34f3839
EB
5071 else if (code == VECTOR_TYPE
5072 && ecode == ARRAY_TYPE
7948ae37
OH
5073 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5074 etype))
5075 expr = convert (type, expr);
5076
e63b36bd
EB
5077 /* And, if the array type is not the representative, we try to build an
5078 intermediate vector type of which the array type is the representative
5079 and to do the unchecked conversion between the vector types, in order
5080 to enable further simplifications in the middle-end. */
5081 else if (code == VECTOR_TYPE
5082 && ecode == ARRAY_TYPE
5083 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5084 {
5085 expr = convert (tem, expr);
5086 return unchecked_convert (type, expr, notrunc_p);
5087 }
5088
44e9e3ec
EB
5089 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5090 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5091 else if (TREE_CODE (expr) == CONSTRUCTOR
5092 && code == RECORD_TYPE
5093 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5094 {
5095 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5096 Empty, false, false, false, true),
5097 expr);
5098 return unchecked_convert (type, expr, notrunc_p);
5099 }
5100
5101 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
a1ab4c31
AC
5102 else
5103 {
5104 expr = maybe_unconstrained_array (expr);
5105 etype = TREE_TYPE (expr);
c34f3839 5106 ecode = TREE_CODE (etype);
afcea859
EB
5107 if (can_fold_for_view_convert_p (expr))
5108 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5109 else
5110 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5111 }
5112
afcea859
EB
5113 /* If the result is an integral type whose precision is not equal to its
5114 size, sign- or zero-extend the result. We need not do this if the input
5115 is an integral type of the same precision and signedness or if the output
55c8849f
EB
5116 is a biased type or if both the input and output are unsigned, or if the
5117 lower bound is constant and non-negative, see E_Signed_Integer_Subtype
5118 case of gnat_to_gnu_entity. */
a1ab4c31 5119 if (!notrunc_p
9a1bdc31
EB
5120 && INTEGRAL_TYPE_P (type)
5121 && TYPE_RM_SIZE (type)
5122 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
a1ab4c31
AC
5123 && !(INTEGRAL_TYPE_P (etype)
5124 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
9a1bdc31
EB
5125 && tree_int_cst_compare (TYPE_RM_SIZE (type),
5126 TYPE_RM_SIZE (etype)
5127 ? TYPE_RM_SIZE (etype)
5128 : TYPE_SIZE (etype)) == 0)
5129 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
55c8849f
EB
5130 && !((TYPE_UNSIGNED (type)
5131 || (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
5132 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0))
5133 && TYPE_UNSIGNED (etype)))
a1ab4c31 5134 {
c34f3839 5135 tree base_type
9a1bdc31
EB
5136 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5137 TYPE_UNSIGNED (type));
a1ab4c31
AC
5138 tree shift_expr
5139 = convert (base_type,
5140 size_binop (MINUS_EXPR,
9a1bdc31 5141 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
a1ab4c31
AC
5142 expr
5143 = convert (type,
5144 build_binary_op (RSHIFT_EXPR, base_type,
5145 build_binary_op (LSHIFT_EXPR, base_type,
5146 convert (base_type, expr),
5147 shift_expr),
5148 shift_expr));
5149 }
5150
5151 /* An unchecked conversion should never raise Constraint_Error. The code
5152 below assumes that GCC's conversion routines overflow the same way that
5153 the underlying hardware does. This is probably true. In the rare case
5154 when it is false, we can rely on the fact that such conversions are
5155 erroneous anyway. */
5156 if (TREE_CODE (expr) == INTEGER_CST)
5157 TREE_OVERFLOW (expr) = 0;
5158
5159 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5160 show no longer constant. */
5161 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5162 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5163 OEP_ONLY_CONST))
5164 TREE_CONSTANT (expr) = 0;
5165
5166 return expr;
5167}
5168\f
feec4372 5169/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5170 the latter being a record type as predicated by Is_Record_Type. */
5171
5172enum tree_code
5173tree_code_for_record_type (Entity_Id gnat_type)
5174{
b1a785fb 5175 Node_Id component_list, component;
a1ab4c31 5176
b1a785fb
EB
5177 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5178 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5179 if (!Is_Unchecked_Union (gnat_type))
5180 return RECORD_TYPE;
5181
b1a785fb
EB
5182 gnat_type = Implementation_Base_Type (gnat_type);
5183 component_list
5184 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5185
a1ab4c31
AC
5186 for (component = First_Non_Pragma (Component_Items (component_list));
5187 Present (component);
5188 component = Next_Non_Pragma (component))
5189 if (Ekind (Defining_Entity (component)) == E_Component)
5190 return RECORD_TYPE;
5191
5192 return UNION_TYPE;
5193}
5194
caa9d12a
EB
5195/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5196 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5197 according to the presence of an alignment clause on the type or, if it
5198 is an array, on the component type. */
5199
5200bool
5201is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5202{
5203 gnat_type = Underlying_Type (gnat_type);
5204
5205 *align_clause = Present (Alignment_Clause (gnat_type));
5206
5207 if (Is_Array_Type (gnat_type))
5208 {
5209 gnat_type = Underlying_Type (Component_Type (gnat_type));
5210 if (Present (Alignment_Clause (gnat_type)))
5211 *align_clause = true;
5212 }
5213
5214 if (!Is_Floating_Point_Type (gnat_type))
5215 return false;
5216
5217 if (UI_To_Int (Esize (gnat_type)) != 64)
5218 return false;
5219
5220 return true;
5221}
5222
5223/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5224 size is greater or equal to 64 bits, or an array of such a type. Set
5225 ALIGN_CLAUSE according to the presence of an alignment clause on the
5226 type or, if it is an array, on the component type. */
5227
5228bool
5229is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5230{
5231 gnat_type = Underlying_Type (gnat_type);
5232
5233 *align_clause = Present (Alignment_Clause (gnat_type));
5234
5235 if (Is_Array_Type (gnat_type))
5236 {
5237 gnat_type = Underlying_Type (Component_Type (gnat_type));
5238 if (Present (Alignment_Clause (gnat_type)))
5239 *align_clause = true;
5240 }
5241
5242 if (!Is_Scalar_Type (gnat_type))
5243 return false;
5244
5245 if (UI_To_Int (Esize (gnat_type)) < 64)
5246 return false;
5247
5248 return true;
5249}
5250
a1ab4c31
AC
5251/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5252 component of an aggregate type. */
5253
5254bool
5255type_for_nonaliased_component_p (tree gnu_type)
5256{
5257 /* If the type is passed by reference, we may have pointers to the
5258 component so it cannot be made non-aliased. */
5259 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5260 return false;
5261
5262 /* We used to say that any component of aggregate type is aliased
5263 because the front-end may take 'Reference of it. The front-end
5264 has been enhanced in the meantime so as to use a renaming instead
5265 in most cases, but the back-end can probably take the address of
5266 such a component too so we go for the conservative stance.
5267
5268 For instance, we might need the address of any array type, even
5269 if normally passed by copy, to construct a fat pointer if the
5270 component is used as an actual for an unconstrained formal.
5271
5272 Likewise for record types: even if a specific record subtype is
5273 passed by copy, the parent type might be passed by ref (e.g. if
5274 it's of variable size) and we might take the address of a child
5275 component to pass to a parent formal. We have no way to check
5276 for such conditions here. */
5277 if (AGGREGATE_TYPE_P (gnu_type))
5278 return false;
5279
5280 return true;
5281}
5282
bb1f7929
EB
5283/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5284
5285bool
5286smaller_form_type_p (tree type, tree orig_type)
5287{
5288 tree size, osize;
5289
5290 /* We're not interested in variants here. */
5291 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5292 return false;
5293
5294 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5295 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5296 return false;
5297
5298 size = TYPE_SIZE (type);
5299 osize = TYPE_SIZE (orig_type);
5300
5301 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5302 return false;
5303
5304 return tree_int_cst_lt (size, osize) != 0;
5305}
5306
a22b794d 5307/* Perform final processing on global declarations. */
d7438551 5308
0f50b3db
EB
5309static GTY (()) tree dummy_global;
5310
a1ab4c31 5311void
a22b794d 5312gnat_write_global_declarations (void)
a1ab4c31 5313{
10e4d056
EB
5314 unsigned int i;
5315 tree iter;
5316
65444786 5317 /* If we have declared types as used at the global level, insert them in
755c71fa
EB
5318 the global hash table. We use a dummy variable for this purpose, but
5319 we need to build it unconditionally to avoid -fcompare-debug issues. */
5320 if (first_global_object_name)
65444786 5321 {
35e8bcf4 5322 struct varpool_node *node;
d3c268ab
EB
5323 char *label;
5324
5325 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5326 dummy_global
d3c268ab
EB
5327 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5328 void_type_node);
dd25fe0a 5329 DECL_HARD_REGISTER (dummy_global) = 1;
65444786 5330 TREE_STATIC (dummy_global) = 1;
037e5573 5331 node = varpool_node::get_create (dummy_global);
dd25fe0a 5332 node->definition = 1;
67348ccc 5333 node->force_output = 1;
65444786 5334
755c71fa
EB
5335 if (types_used_by_cur_var_decl)
5336 while (!types_used_by_cur_var_decl->is_empty ())
5337 {
5338 tree t = types_used_by_cur_var_decl->pop ();
5339 types_used_by_var_decl_insert (t, dummy_global);
5340 }
65444786
EB
5341 }
5342
a22b794d
EB
5343 /* Output debug information for all global type declarations first. This
5344 ensures that global types whose compilation hasn't been finalized yet,
5345 for example pointers to Taft amendment types, have their compilation
5346 finalized in the right context. */
5347 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5348 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
1f0e2688 5349 debug_hooks->type_decl (iter, false);
a22b794d
EB
5350
5351 /* Then output the global variables. We need to do that after the debug
8afaddaa 5352 information for global types is emitted so that they are finalized. */
a22b794d
EB
5353 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5354 if (TREE_CODE (iter) == VAR_DECL)
5355 rest_of_decl_compilation (iter, true, 0);
caadda8e
PMR
5356
5357 /* Output the imported modules/declarations. In GNAT, these are only
5358 materializing subprogram. */
5359 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5360 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5361 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5362 DECL_CONTEXT (iter), 0);
a1ab4c31
AC
5363}
5364
5365/* ************************************************************************
5366 * * GCC builtins support *
5367 * ************************************************************************ */
5368
5369/* The general scheme is fairly simple:
5370
5371 For each builtin function/type to be declared, gnat_install_builtins calls
aef308d0 5372 internal facilities which eventually get to gnat_pushdecl, which in turn
a1ab4c31
AC
5373 tracks the so declared builtin function decls in the 'builtin_decls' global
5374 datastructure. When an Intrinsic subprogram declaration is processed, we
5375 search this global datastructure to retrieve the associated BUILT_IN DECL
5376 node. */
5377
5378/* Search the chain of currently available builtin declarations for a node
5379 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5380 found, if any, or NULL_TREE otherwise. */
5381tree
5382builtin_decl_for (tree name)
5383{
5384 unsigned i;
5385 tree decl;
5386
9771b263 5387 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5388 if (DECL_NAME (decl) == name)
5389 return decl;
5390
5391 return NULL_TREE;
5392}
5393
5394/* The code below eventually exposes gnat_install_builtins, which declares
5395 the builtin types and functions we might need, either internally or as
5396 user accessible facilities.
5397
5398 ??? This is a first implementation shot, still in rough shape. It is
5399 heavily inspired from the "C" family implementation, with chunks copied
5400 verbatim from there.
5401
ba464315 5402 Two obvious improvement candidates are:
a1ab4c31
AC
5403 o Use a more efficient name/decl mapping scheme
5404 o Devise a middle-end infrastructure to avoid having to copy
5405 pieces between front-ends. */
5406
5407/* ----------------------------------------------------------------------- *
5408 * BUILTIN ELEMENTARY TYPES *
5409 * ----------------------------------------------------------------------- */
5410
5411/* Standard data types to be used in builtin argument declarations. */
5412
5413enum c_tree_index
5414{
5415 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5416 CTI_STRING_TYPE,
5417 CTI_CONST_STRING_TYPE,
5418
5419 CTI_MAX
5420};
5421
5422static tree c_global_trees[CTI_MAX];
5423
5424#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5425#define string_type_node c_global_trees[CTI_STRING_TYPE]
5426#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5427
5428/* ??? In addition some attribute handlers, we currently don't support a
5429 (small) number of builtin-types, which in turns inhibits support for a
5430 number of builtin functions. */
5431#define wint_type_node void_type_node
5432#define intmax_type_node void_type_node
5433#define uintmax_type_node void_type_node
5434
a1ab4c31
AC
5435/* Used to help initialize the builtin-types.def table. When a type of
5436 the correct size doesn't exist, use error_mark_node instead of NULL.
5437 The later results in segfaults even when a decl using the type doesn't
5438 get invoked. */
5439
5440static tree
5441builtin_type_for_size (int size, bool unsignedp)
5442{
ced57283 5443 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5444 return type ? type : error_mark_node;
5445}
5446
5447/* Build/push the elementary type decls that builtin functions/types
5448 will need. */
5449
5450static void
5451install_builtin_elementary_types (void)
5452{
9a1bdc31 5453 signed_size_type_node = gnat_signed_type_for (size_type_node);
a1ab4c31 5454 pid_type_node = integer_type_node;
a1ab4c31
AC
5455
5456 string_type_node = build_pointer_type (char_type_node);
5457 const_string_type_node
5458 = build_pointer_type (build_qualified_type
5459 (char_type_node, TYPE_QUAL_CONST));
5460}
5461
5462/* ----------------------------------------------------------------------- *
5463 * BUILTIN FUNCTION TYPES *
5464 * ----------------------------------------------------------------------- */
5465
5466/* Now, builtin function types per se. */
5467
5468enum c_builtin_type
5469{
5470#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5471#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5472#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5473#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5474#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5475#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5476#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
f6a7cffc
TS
5477#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5478 ARG6) NAME,
5479#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5480 ARG6, ARG7) NAME,
5481#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5482 ARG6, ARG7, ARG8) NAME,
d9a6bd32
JJ
5483#define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5484 ARG6, ARG7, ARG8, ARG9) NAME,
5485#define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5486 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5487#define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5488 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
a1ab4c31
AC
5489#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5490#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5491#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5492#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5493#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
f6a7cffc 5494#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
56a9f6bc 5495 NAME,
3e32ee19
NS
5496#define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5497 ARG6) NAME,
56a9f6bc
TS
5498#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5499 ARG6, ARG7) NAME,
a1ab4c31
AC
5500#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5501#include "builtin-types.def"
5502#undef DEF_PRIMITIVE_TYPE
5503#undef DEF_FUNCTION_TYPE_0
5504#undef DEF_FUNCTION_TYPE_1
5505#undef DEF_FUNCTION_TYPE_2
5506#undef DEF_FUNCTION_TYPE_3
5507#undef DEF_FUNCTION_TYPE_4
5508#undef DEF_FUNCTION_TYPE_5
5509#undef DEF_FUNCTION_TYPE_6
5510#undef DEF_FUNCTION_TYPE_7
acf0174b 5511#undef DEF_FUNCTION_TYPE_8
d9a6bd32
JJ
5512#undef DEF_FUNCTION_TYPE_9
5513#undef DEF_FUNCTION_TYPE_10
5514#undef DEF_FUNCTION_TYPE_11
a1ab4c31
AC
5515#undef DEF_FUNCTION_TYPE_VAR_0
5516#undef DEF_FUNCTION_TYPE_VAR_1
5517#undef DEF_FUNCTION_TYPE_VAR_2
5518#undef DEF_FUNCTION_TYPE_VAR_3
5519#undef DEF_FUNCTION_TYPE_VAR_4
5520#undef DEF_FUNCTION_TYPE_VAR_5
3e32ee19 5521#undef DEF_FUNCTION_TYPE_VAR_6
56a9f6bc 5522#undef DEF_FUNCTION_TYPE_VAR_7
a1ab4c31
AC
5523#undef DEF_POINTER_TYPE
5524 BT_LAST
5525};
5526
5527typedef enum c_builtin_type builtin_type;
5528
5529/* A temporary array used in communication with def_fn_type. */
5530static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5531
5532/* A helper function for install_builtin_types. Build function type
5533 for DEF with return type RET and N arguments. If VAR is true, then the
5534 function should be variadic after those N arguments.
5535
5536 Takes special care not to ICE if any of the types involved are
5537 error_mark_node, which indicates that said type is not in fact available
5538 (see builtin_type_for_size). In which case the function type as a whole
5539 should be error_mark_node. */
5540
5541static void
5542def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5543{
e5b00edf
NF
5544 tree t;
5545 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5546 va_list list;
5547 int i;
5548
5549 va_start (list, n);
5550 for (i = 0; i < n; ++i)
5551 {
c6bd4220 5552 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5553 t = builtin_types[a];
5554 if (t == error_mark_node)
5555 goto egress;
e5b00edf 5556 args[i] = t;
a1ab4c31 5557 }
a1ab4c31 5558
a1ab4c31
AC
5559 t = builtin_types[ret];
5560 if (t == error_mark_node)
5561 goto egress;
e5b00edf
NF
5562 if (var)
5563 t = build_varargs_function_type_array (t, n, args);
5564 else
5565 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5566
5567 egress:
5568 builtin_types[def] = t;
0edf1bb2 5569 va_end (list);
a1ab4c31
AC
5570}
5571
5572/* Build the builtin function types and install them in the builtin_types
5573 array for later use in builtin function decls. */
5574
5575static void
5576install_builtin_function_types (void)
5577{
5578 tree va_list_ref_type_node;
5579 tree va_list_arg_type_node;
5580
5581 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5582 {
5583 va_list_arg_type_node = va_list_ref_type_node =
5584 build_pointer_type (TREE_TYPE (va_list_type_node));
5585 }
5586 else
5587 {
5588 va_list_arg_type_node = va_list_type_node;
5589 va_list_ref_type_node = build_reference_type (va_list_type_node);
5590 }
5591
5592#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5593 builtin_types[ENUM] = VALUE;
5594#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5595 def_fn_type (ENUM, RETURN, 0, 0);
5596#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5597 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5598#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5599 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5600#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5601 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5602#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5603 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5604#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5605 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5606#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5607 ARG6) \
5608 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5609#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5610 ARG6, ARG7) \
5611 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
5612#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5613 ARG6, ARG7, ARG8) \
5614 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5615 ARG7, ARG8);
d9a6bd32
JJ
5616#define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5617 ARG6, ARG7, ARG8, ARG9) \
5618 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5619 ARG7, ARG8, ARG9);
5620#define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5621 ARG6, ARG7, ARG8, ARG9, ARG10) \
5622 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5623 ARG7, ARG8, ARG9, ARG10);
5624#define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5625 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5626 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5627 ARG7, ARG8, ARG9, ARG10, ARG11);
a1ab4c31
AC
5628#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5629 def_fn_type (ENUM, RETURN, 1, 0);
5630#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5631 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5632#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5633 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5634#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5635 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5636#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5637 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5638#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5639 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
3e32ee19
NS
5640#define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5641 ARG6) \
5642 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
56a9f6bc
TS
5643#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5644 ARG6, ARG7) \
5645 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
a1ab4c31
AC
5646#define DEF_POINTER_TYPE(ENUM, TYPE) \
5647 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5648
5649#include "builtin-types.def"
5650
5651#undef DEF_PRIMITIVE_TYPE
f6a7cffc 5652#undef DEF_FUNCTION_TYPE_0
a1ab4c31
AC
5653#undef DEF_FUNCTION_TYPE_1
5654#undef DEF_FUNCTION_TYPE_2
5655#undef DEF_FUNCTION_TYPE_3
5656#undef DEF_FUNCTION_TYPE_4
5657#undef DEF_FUNCTION_TYPE_5
5658#undef DEF_FUNCTION_TYPE_6
f6a7cffc
TS
5659#undef DEF_FUNCTION_TYPE_7
5660#undef DEF_FUNCTION_TYPE_8
d9a6bd32
JJ
5661#undef DEF_FUNCTION_TYPE_9
5662#undef DEF_FUNCTION_TYPE_10
5663#undef DEF_FUNCTION_TYPE_11
a1ab4c31
AC
5664#undef DEF_FUNCTION_TYPE_VAR_0
5665#undef DEF_FUNCTION_TYPE_VAR_1
5666#undef DEF_FUNCTION_TYPE_VAR_2
5667#undef DEF_FUNCTION_TYPE_VAR_3
5668#undef DEF_FUNCTION_TYPE_VAR_4
5669#undef DEF_FUNCTION_TYPE_VAR_5
3e32ee19 5670#undef DEF_FUNCTION_TYPE_VAR_6
56a9f6bc 5671#undef DEF_FUNCTION_TYPE_VAR_7
a1ab4c31
AC
5672#undef DEF_POINTER_TYPE
5673 builtin_types[(int) BT_LAST] = NULL_TREE;
5674}
5675
5676/* ----------------------------------------------------------------------- *
5677 * BUILTIN ATTRIBUTES *
5678 * ----------------------------------------------------------------------- */
5679
5680enum built_in_attribute
5681{
5682#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5683#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5684#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5685#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5686#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5687#include "builtin-attrs.def"
5688#undef DEF_ATTR_NULL_TREE
5689#undef DEF_ATTR_INT
e384e6b5 5690#undef DEF_ATTR_STRING
a1ab4c31
AC
5691#undef DEF_ATTR_IDENT
5692#undef DEF_ATTR_TREE_LIST
5693 ATTR_LAST
5694};
5695
5696static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5697
5698static void
5699install_builtin_attributes (void)
5700{
5701 /* Fill in the built_in_attributes array. */
5702#define DEF_ATTR_NULL_TREE(ENUM) \
5703 built_in_attributes[(int) ENUM] = NULL_TREE;
5704#define DEF_ATTR_INT(ENUM, VALUE) \
5705 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5706#define DEF_ATTR_STRING(ENUM, VALUE) \
5707 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5708#define DEF_ATTR_IDENT(ENUM, STRING) \
5709 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5710#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5711 built_in_attributes[(int) ENUM] \
5712 = tree_cons (built_in_attributes[(int) PURPOSE], \
5713 built_in_attributes[(int) VALUE], \
5714 built_in_attributes[(int) CHAIN]);
5715#include "builtin-attrs.def"
5716#undef DEF_ATTR_NULL_TREE
5717#undef DEF_ATTR_INT
e384e6b5 5718#undef DEF_ATTR_STRING
a1ab4c31
AC
5719#undef DEF_ATTR_IDENT
5720#undef DEF_ATTR_TREE_LIST
5721}
5722
5723/* Handle a "const" attribute; arguments as in
5724 struct attribute_spec.handler. */
5725
5726static tree
5727handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5728 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5729 bool *no_add_attrs)
5730{
5731 if (TREE_CODE (*node) == FUNCTION_DECL)
5732 TREE_READONLY (*node) = 1;
5733 else
5734 *no_add_attrs = true;
5735
5736 return NULL_TREE;
5737}
5738
5739/* Handle a "nothrow" attribute; arguments as in
5740 struct attribute_spec.handler. */
5741
5742static tree
5743handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5744 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5745 bool *no_add_attrs)
5746{
5747 if (TREE_CODE (*node) == FUNCTION_DECL)
5748 TREE_NOTHROW (*node) = 1;
5749 else
5750 *no_add_attrs = true;
5751
5752 return NULL_TREE;
5753}
5754
5755/* Handle a "pure" attribute; arguments as in
5756 struct attribute_spec.handler. */
5757
5758static tree
5759handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5760 int ARG_UNUSED (flags), bool *no_add_attrs)
5761{
5762 if (TREE_CODE (*node) == FUNCTION_DECL)
5763 DECL_PURE_P (*node) = 1;
ba464315 5764 /* TODO: support types. */
a1ab4c31
AC
5765 else
5766 {
7948ae37
OH
5767 warning (OPT_Wattributes, "%qs attribute ignored",
5768 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5769 *no_add_attrs = true;
5770 }
5771
5772 return NULL_TREE;
5773}
5774
5775/* Handle a "no vops" attribute; arguments as in
5776 struct attribute_spec.handler. */
5777
5778static tree
5779handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5780 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5781 bool *ARG_UNUSED (no_add_attrs))
5782{
5783 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5784 DECL_IS_NOVOPS (*node) = 1;
5785 return NULL_TREE;
5786}
5787
5788/* Helper for nonnull attribute handling; fetch the operand number
5789 from the attribute argument list. */
5790
5791static bool
5792get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5793{
5794 /* Verify the arg number is a constant. */
807e902e 5795 if (!tree_fits_uhwi_p (arg_num_expr))
a1ab4c31
AC
5796 return false;
5797
5798 *valp = TREE_INT_CST_LOW (arg_num_expr);
5799 return true;
5800}
5801
5802/* Handle the "nonnull" attribute. */
5803static tree
5804handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5805 tree args, int ARG_UNUSED (flags),
5806 bool *no_add_attrs)
5807{
5808 tree type = *node;
5809 unsigned HOST_WIDE_INT attr_arg_num;
5810
5811 /* If no arguments are specified, all pointer arguments should be
5812 non-null. Verify a full prototype is given so that the arguments
4d926e34
MS
5813 will have the correct types when we actually check them later.
5814 Avoid diagnosing type-generic built-ins since those have no
5815 prototype. */
a1ab4c31
AC
5816 if (!args)
5817 {
4d926e34
MS
5818 if (!prototype_p (type)
5819 && (!TYPE_ATTRIBUTES (type)
5820 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
a1ab4c31
AC
5821 {
5822 error ("nonnull attribute without arguments on a non-prototype");
5823 *no_add_attrs = true;
5824 }
5825 return NULL_TREE;
5826 }
5827
5828 /* Argument list specified. Verify that each argument number references
5829 a pointer argument. */
5830 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5831 {
a1ab4c31
AC
5832 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5833
5834 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5835 {
5836 error ("nonnull argument has invalid operand number (argument %lu)",
5837 (unsigned long) attr_arg_num);
5838 *no_add_attrs = true;
5839 return NULL_TREE;
5840 }
5841
d7d058c5 5842 if (prototype_p (type))
a1ab4c31 5843 {
d7d058c5
NF
5844 function_args_iterator iter;
5845 tree argument;
5846
5847 function_args_iter_init (&iter, type);
5848 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 5849 {
d7d058c5 5850 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
5851 if (!argument || ck_num == arg_num)
5852 break;
a1ab4c31
AC
5853 }
5854
5855 if (!argument
d7d058c5 5856 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 5857 {
58c8f770
EB
5858 error ("nonnull argument with out-of-range operand number "
5859 "(argument %lu, operand %lu)",
a1ab4c31
AC
5860 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5861 *no_add_attrs = true;
5862 return NULL_TREE;
5863 }
5864
d7d058c5 5865 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 5866 {
58c8f770
EB
5867 error ("nonnull argument references non-pointer operand "
5868 "(argument %lu, operand %lu)",
a1ab4c31
AC
5869 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5870 *no_add_attrs = true;
5871 return NULL_TREE;
5872 }
5873 }
5874 }
5875
5876 return NULL_TREE;
5877}
5878
5879/* Handle a "sentinel" attribute. */
5880
5881static tree
5882handle_sentinel_attribute (tree *node, tree name, tree args,
5883 int ARG_UNUSED (flags), bool *no_add_attrs)
5884{
f4da8dce 5885 if (!prototype_p (*node))
a1ab4c31
AC
5886 {
5887 warning (OPT_Wattributes,
7948ae37
OH
5888 "%qs attribute requires prototypes with named arguments",
5889 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5890 *no_add_attrs = true;
5891 }
5892 else
5893 {
dcf0c47e 5894 if (!stdarg_p (*node))
a1ab4c31
AC
5895 {
5896 warning (OPT_Wattributes,
7948ae37
OH
5897 "%qs attribute only applies to variadic functions",
5898 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5899 *no_add_attrs = true;
5900 }
5901 }
5902
5903 if (args)
5904 {
5905 tree position = TREE_VALUE (args);
5906
5907 if (TREE_CODE (position) != INTEGER_CST)
5908 {
5909 warning (0, "requested position is not an integer constant");
5910 *no_add_attrs = true;
5911 }
5912 else
5913 {
5914 if (tree_int_cst_lt (position, integer_zero_node))
5915 {
5916 warning (0, "requested position is less than zero");
5917 *no_add_attrs = true;
5918 }
5919 }
5920 }
5921
5922 return NULL_TREE;
5923}
5924
5925/* Handle a "noreturn" attribute; arguments as in
5926 struct attribute_spec.handler. */
5927
5928static tree
5929handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5930 int ARG_UNUSED (flags), bool *no_add_attrs)
5931{
5932 tree type = TREE_TYPE (*node);
5933
5934 /* See FIXME comment in c_common_attribute_table. */
5935 if (TREE_CODE (*node) == FUNCTION_DECL)
5936 TREE_THIS_VOLATILE (*node) = 1;
5937 else if (TREE_CODE (type) == POINTER_TYPE
5938 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5939 TREE_TYPE (*node)
5940 = build_pointer_type
5941 (build_type_variant (TREE_TYPE (type),
5942 TYPE_READONLY (TREE_TYPE (type)), 1));
5943 else
5944 {
7948ae37
OH
5945 warning (OPT_Wattributes, "%qs attribute ignored",
5946 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5947 *no_add_attrs = true;
5948 }
1f0b1322
EB
5949
5950 return NULL_TREE;
5951}
5952
5953/* Handle a "noinline" attribute; arguments as in
5954 struct attribute_spec.handler. */
5955
5956static tree
5957handle_noinline_attribute (tree *node, tree name,
5958 tree ARG_UNUSED (args),
5959 int ARG_UNUSED (flags), bool *no_add_attrs)
5960{
5961 if (TREE_CODE (*node) == FUNCTION_DECL)
5962 {
5963 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
5964 {
5965 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
5966 "with attribute %qs", name, "always_inline");
5967 *no_add_attrs = true;
5968 }
5969 else
5970 DECL_UNINLINABLE (*node) = 1;
5971 }
5972 else
5973 {
5974 warning (OPT_Wattributes, "%qE attribute ignored", name);
5975 *no_add_attrs = true;
5976 }
5977
5978 return NULL_TREE;
5979}
5980
5981/* Handle a "noclone" attribute; arguments as in
5982 struct attribute_spec.handler. */
5983
5984static tree
5985handle_noclone_attribute (tree *node, tree name,
5986 tree ARG_UNUSED (args),
5987 int ARG_UNUSED (flags), bool *no_add_attrs)
5988{
5989 if (TREE_CODE (*node) != FUNCTION_DECL)
5990 {
5991 warning (OPT_Wattributes, "%qE attribute ignored", name);
5992 *no_add_attrs = true;
5993 }
a1ab4c31
AC
5994
5995 return NULL_TREE;
5996}
5997
0d6e14fd
JH
5998/* Handle a "leaf" attribute; arguments as in
5999 struct attribute_spec.handler. */
6000
6001static tree
f087ea44 6002handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
0d6e14fd
JH
6003 int ARG_UNUSED (flags), bool *no_add_attrs)
6004{
6005 if (TREE_CODE (*node) != FUNCTION_DECL)
6006 {
6007 warning (OPT_Wattributes, "%qE attribute ignored", name);
6008 *no_add_attrs = true;
6009 }
6010 if (!TREE_PUBLIC (*node))
6011 {
32a5388a 6012 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd 6013 *no_add_attrs = true;
f087ea44
AC
6014 }
6015
6016 return NULL_TREE;
6017}
6018
6019/* Handle a "always_inline" attribute; arguments as in
6020 struct attribute_spec.handler. */
6021
6022static tree
6023handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6024 int ARG_UNUSED (flags), bool *no_add_attrs)
6025{
6026 if (TREE_CODE (*node) == FUNCTION_DECL)
6027 {
6028 /* Set the attribute and mark it for disregarding inline limits. */
6029 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6030 }
6031 else
6032 {
6033 warning (OPT_Wattributes, "%qE attribute ignored", name);
6034 *no_add_attrs = true;
0d6e14fd
JH
6035 }
6036
6037 return NULL_TREE;
6038}
6039
a1ab4c31
AC
6040/* Handle a "malloc" attribute; arguments as in
6041 struct attribute_spec.handler. */
6042
6043static tree
6044handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6045 int ARG_UNUSED (flags), bool *no_add_attrs)
6046{
6047 if (TREE_CODE (*node) == FUNCTION_DECL
6048 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6049 DECL_IS_MALLOC (*node) = 1;
6050 else
6051 {
7948ae37
OH
6052 warning (OPT_Wattributes, "%qs attribute ignored",
6053 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6054 *no_add_attrs = true;
6055 }
6056
6057 return NULL_TREE;
6058}
6059
6060/* Fake handler for attributes we don't properly support. */
6061
6062tree
6063fake_attribute_handler (tree * ARG_UNUSED (node),
6064 tree ARG_UNUSED (name),
6065 tree ARG_UNUSED (args),
6066 int ARG_UNUSED (flags),
6067 bool * ARG_UNUSED (no_add_attrs))
6068{
6069 return NULL_TREE;
6070}
6071
6072/* Handle a "type_generic" attribute. */
6073
6074static tree
6075handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6076 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6077 bool * ARG_UNUSED (no_add_attrs))
6078{
a1ab4c31
AC
6079 /* Ensure we have a function type. */
6080 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 6081
a1ab4c31 6082 /* Ensure we have a variadic function. */
dcf0c47e 6083 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
6084
6085 return NULL_TREE;
6086}
6087
2724e58f
OH
6088/* Handle a "vector_size" attribute; arguments as in
6089 struct attribute_spec.handler. */
6090
6091static tree
6092handle_vector_size_attribute (tree *node, tree name, tree args,
e63b36bd 6093 int ARG_UNUSED (flags), bool *no_add_attrs)
2724e58f 6094{
e63b36bd
EB
6095 tree type = *node;
6096 tree vector_type;
2724e58f
OH
6097
6098 *no_add_attrs = true;
6099
2724e58f
OH
6100 /* We need to provide for vector pointers, vector arrays, and
6101 functions returning vectors. For example:
6102
6103 __attribute__((vector_size(16))) short *foo;
6104
6105 In this case, the mode is SI, but the type being modified is
6106 HI, so we need to look further. */
2724e58f
OH
6107 while (POINTER_TYPE_P (type)
6108 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6109 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6110 type = TREE_TYPE (type);
6111
e63b36bd
EB
6112 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6113 if (!vector_type)
6114 return NULL_TREE;
2724e58f
OH
6115
6116 /* Build back pointers if needed. */
e63b36bd 6117 *node = reconstruct_complex_type (*node, vector_type);
2724e58f
OH
6118
6119 return NULL_TREE;
6120}
6121
7948ae37
OH
6122/* Handle a "vector_type" attribute; arguments as in
6123 struct attribute_spec.handler. */
6124
6125static tree
6126handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
e63b36bd 6127 int ARG_UNUSED (flags), bool *no_add_attrs)
7948ae37 6128{
e63b36bd
EB
6129 tree type = *node;
6130 tree vector_type;
7948ae37
OH
6131
6132 *no_add_attrs = true;
6133
e63b36bd 6134 if (TREE_CODE (type) != ARRAY_TYPE)
7948ae37
OH
6135 {
6136 error ("attribute %qs applies to array types only",
6137 IDENTIFIER_POINTER (name));
6138 return NULL_TREE;
6139 }
6140
e63b36bd
EB
6141 vector_type = build_vector_type_for_array (type, name);
6142 if (!vector_type)
7948ae37
OH
6143 return NULL_TREE;
6144
e63b36bd
EB
6145 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6146 *node = vector_type;
7948ae37
OH
6147
6148 return NULL_TREE;
6149}
6150
a1ab4c31
AC
6151/* ----------------------------------------------------------------------- *
6152 * BUILTIN FUNCTIONS *
6153 * ----------------------------------------------------------------------- */
6154
6155/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6156 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6157 if nonansi_p and flag_no_nonansi_builtin. */
6158
6159static void
6160def_builtin_1 (enum built_in_function fncode,
6161 const char *name,
6162 enum built_in_class fnclass,
6163 tree fntype, tree libtype,
6164 bool both_p, bool fallback_p,
6165 bool nonansi_p ATTRIBUTE_UNUSED,
6166 tree fnattrs, bool implicit_p)
6167{
6168 tree decl;
6169 const char *libname;
6170
6171 /* Preserve an already installed decl. It most likely was setup in advance
6172 (e.g. as part of the internal builtins) for specific reasons. */
7c775aca 6173 if (builtin_decl_explicit (fncode))
a1ab4c31
AC
6174 return;
6175
6176 gcc_assert ((!both_p && !fallback_p)
6177 || !strncmp (name, "__builtin_",
6178 strlen ("__builtin_")));
6179
6180 libname = name + strlen ("__builtin_");
6181 decl = add_builtin_function (name, fntype, fncode, fnclass,
6182 (fallback_p ? libname : NULL),
6183 fnattrs);
6184 if (both_p)
6185 /* ??? This is normally further controlled by command-line options
6186 like -fno-builtin, but we don't have them for Ada. */
6187 add_builtin_function (libname, libtype, fncode, fnclass,
6188 NULL, fnattrs);
6189
e79983f4 6190 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6191}
6192
6193static int flag_isoc94 = 0;
6194static int flag_isoc99 = 0;
22869a37 6195static int flag_isoc11 = 0;
a1ab4c31
AC
6196
6197/* Install what the common builtins.def offers. */
6198
6199static void
6200install_builtin_functions (void)
6201{
6202#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6203 NONANSI_P, ATTRS, IMPLICIT, COND) \
6204 if (NAME && COND) \
6205 def_builtin_1 (ENUM, NAME, CLASS, \
6206 builtin_types[(int) TYPE], \
6207 builtin_types[(int) LIBTYPE], \
6208 BOTH_P, FALLBACK_P, NONANSI_P, \
6209 built_in_attributes[(int) ATTRS], IMPLICIT);
6210#include "builtins.def"
a1ab4c31
AC
6211}
6212
6213/* ----------------------------------------------------------------------- *
6214 * BUILTIN FUNCTIONS *
6215 * ----------------------------------------------------------------------- */
6216
6217/* Install the builtin functions we might need. */
6218
6219void
6220gnat_install_builtins (void)
6221{
6222 install_builtin_elementary_types ();
6223 install_builtin_function_types ();
6224 install_builtin_attributes ();
6225
6226 /* Install builtins used by generic middle-end pieces first. Some of these
6227 know about internal specificities and control attributes accordingly, for
6228 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6229 the generic definition from builtins.def. */
384c400a 6230 build_common_builtin_nodes ();
a1ab4c31
AC
6231
6232 /* Now, install the target specific builtins, such as the AltiVec family on
6233 ppc, and the common set as exposed by builtins.def. */
6234 targetm.init_builtins ();
6235 install_builtin_functions ();
6236}
6237
6238#include "gt-ada-utils.h"
6239#include "gtype-ada.h"
This page took 3.887435 seconds and 5 git commands to generate.