]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
Fix comment in rtl.def
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
396a2ee2 9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
a1ab4c31
AC
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
2adfab87
AM
29#include "target.h"
30#include "function.h"
a1ab4c31 31#include "tree.h"
d8a2d370 32#include "stringpool.h"
2adfab87
AM
33#include "cgraph.h"
34#include "diagnostic.h"
35#include "alias.h"
36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "attribs.h"
39#include "varasm.h"
a1ab4c31
AC
40#include "toplev.h"
41#include "output.h"
a1ab4c31
AC
42#include "debug.h"
43#include "convert.h"
677f3fa8 44#include "common/common-target.h"
8713b7e4 45#include "langhooks.h"
8713b7e4 46#include "tree-dump.h"
a1ab4c31 47#include "tree-inline.h"
a1ab4c31
AC
48
49#include "ada.h"
50#include "types.h"
51#include "atree.h"
a1ab4c31 52#include "nlists.h"
a1ab4c31
AC
53#include "uintp.h"
54#include "fe.h"
55#include "sinfo.h"
56#include "einfo.h"
57#include "ada-tree.h"
58#include "gigi.h"
59
a1ab4c31
AC
60/* If nonzero, pretend we are allocating at global level. */
61int force_global;
62
caa9d12a
EB
63/* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66int double_float_alignment;
67
68/* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71int double_scalar_alignment;
72
24228312
AC
73/* True if floating-point arithmetics may use wider intermediate results. */
74bool fp_arith_may_widen = true;
75
a1ab4c31
AC
76/* Tree nodes for the various types and decls we create. */
77tree gnat_std_decls[(int) ADT_LAST];
78
79/* Functions to call for each of the possible raise reasons. */
80tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
ca8e13e8 82/* Likewise, but with extra info for each of the possible raise reasons. */
437f8c1e
AC
83tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
a1ab4c31
AC
85/* Forward declarations for handlers of attributes. */
86static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
0d6e14fd 93static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
f087ea44 94static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
95static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
96static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
2724e58f 97static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
7948ae37 98static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
a1ab4c31
AC
99
100/* Fake handler for attributes we don't properly support, typically because
101 they'd require dragging a lot of the common-c front-end circuitry. */
102static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
103
104/* Table of machine-independent internal attributes for Ada. We support
105 this minimal set of attributes to accommodate the needs of builtins. */
106const struct attribute_spec gnat_internal_attribute_table[] =
107{
62d784f7
KT
108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
109 affects_type_identity } */
110 { "const", 0, 0, true, false, false, handle_const_attribute,
111 false },
112 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
113 false },
114 { "pure", 0, 0, true, false, false, handle_pure_attribute,
115 false },
116 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
117 false },
118 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
119 false },
120 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
121 false },
122 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
123 false },
124 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
125 false },
f087ea44
AC
126 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
127 false },
62d784f7
KT
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
132
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
a1ab4c31
AC
138
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
62d784f7
KT
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
a1ab4c31 144
62d784f7 145 { NULL, 0, 0, false, false, false, NULL, false }
a1ab4c31
AC
146};
147
148/* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152
153#define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155
156#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158
159#define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161
162/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164
165#define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167
168#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170
171#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173
174/* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
176
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181
182/* Likewise for float types, but record these by mode. */
183static GTY(()) tree float_types[NUM_MACHINE_MODES];
184
185/* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
187
d1b38208 188struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
a1ab4c31
AC
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
196};
197
198/* The binding level currently in effect. */
199static GTY(()) struct gnat_binding_level *current_binding_level;
200
201/* A chain of gnat_binding_level structures awaiting reuse. */
202static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203
228ee426
EB
204/* The context to be used for global declarations. */
205static GTY(()) tree global_context;
206
a22b794d
EB
207/* An array of global declarations. */
208static GTY(()) vec<tree, va_gc> *global_decls;
a1ab4c31
AC
209
210/* An array of builtin function declarations. */
9771b263 211static GTY(()) vec<tree, va_gc> *builtin_decls;
a1ab4c31 212
a1ab4c31
AC
213/* A chain of unused BLOCK nodes. */
214static GTY((deletable)) tree free_block_chain;
215
842d4ee2
EB
216/* A hash table of padded types. It is modelled on the generic type
217 hash table in tree.c, which must thus be used as a reference. */
d242408f
TS
218
219struct GTY((for_user)) pad_type_hash {
842d4ee2
EB
220 unsigned long hash;
221 tree type;
222};
223
6c907cff 224struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
d242408f
TS
225{
226 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
227 static bool equal (pad_type_hash *a, pad_type_hash *b);
08ec2754 228 static int keep_cache_entry (pad_type_hash *&);
d242408f
TS
229};
230
231static GTY ((cache))
232 hash_table<pad_type_hasher> *pad_type_hash_table;
842d4ee2 233
a1ab4c31
AC
234static tree merge_sizes (tree, tree, tree, bool, bool);
235static tree compute_related_constant (tree, tree);
236static tree split_plus (tree, tree *);
ef4bddc2 237static tree float_type_for_precision (int, machine_mode);
a1ab4c31 238static tree convert_to_fat_pointer (tree, tree);
5c475ba9 239static unsigned int scale_by_factor_of (tree, unsigned int);
a1ab4c31 240static bool potential_alignment_gap (tree, tree, tree);
9a30c7c4 241
afc737f0
EB
242/* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
243 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
9a30c7c4
AC
244struct deferred_decl_context_node
245{
afc737f0
EB
246 /* The ..._DECL node to work on. */
247 tree decl;
248
249 /* The corresponding entity's Scope. */
250 Entity_Id gnat_scope;
251
252 /* The value of force_global when DECL was pushed. */
253 int force_global;
254
255 /* The list of ..._TYPE nodes to propagate the context to. */
256 vec<tree> types;
257
258 /* The next queue item. */
259 struct deferred_decl_context_node *next;
9a30c7c4
AC
260};
261
262static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
263
264/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
265 feed it with the elaboration of GNAT_SCOPE. */
266static struct deferred_decl_context_node *
267add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
268
269/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
270 feed it with the DECL_CONTEXT computed as part of N as soon as it is
271 computed. */
272static void add_deferred_type_context (struct deferred_decl_context_node *n,
273 tree type);
a1ab4c31 274\f
842d4ee2 275/* Initialize data structures of the utils.c module. */
a1ab4c31
AC
276
277void
842d4ee2 278init_gnat_utils (void)
a1ab4c31 279{
842d4ee2 280 /* Initialize the association of GNAT nodes to GCC trees. */
766090c2 281 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
282
283 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
766090c2 284 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
842d4ee2
EB
285
286 /* Initialize the hash table of padded types. */
d242408f 287 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
a1ab4c31
AC
288}
289
842d4ee2 290/* Destroy data structures of the utils.c module. */
f04b8d69
EB
291
292void
842d4ee2 293destroy_gnat_utils (void)
f04b8d69 294{
842d4ee2 295 /* Destroy the association of GNAT nodes to GCC trees. */
f04b8d69
EB
296 ggc_free (associate_gnat_to_gnu);
297 associate_gnat_to_gnu = NULL;
f04b8d69 298
842d4ee2
EB
299 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
300 ggc_free (dummy_node_table);
301 dummy_node_table = NULL;
302
303 /* Destroy the hash table of padded types. */
d242408f 304 pad_type_hash_table->empty ();
842d4ee2 305 pad_type_hash_table = NULL;
842d4ee2
EB
306}
307\f
a1d8cc63
EB
308/* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
309 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
310 If NO_CHECK is true, the latter check is suppressed.
a1ab4c31 311
a1d8cc63 312 If GNU_DECL is zero, reset a previous association. */
a1ab4c31
AC
313
314void
315save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
316{
317 /* Check that GNAT_ENTITY is not already defined and that it is being set
a1d8cc63 318 to something which is a decl. If that is not the case, this usually
a1ab4c31
AC
319 means GNAT_ENTITY is defined twice, but occasionally is due to some
320 Gigi problem. */
321 gcc_assert (!(gnu_decl
322 && (PRESENT_GNU_TREE (gnat_entity)
323 || (!no_check && !DECL_P (gnu_decl)))));
324
325 SET_GNU_TREE (gnat_entity, gnu_decl);
326}
327
a1d8cc63
EB
328/* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
329 that was associated with it. If there is no such tree node, abort.
a1ab4c31
AC
330
331 In some cases, such as delayed elaboration or expressions that need to
332 be elaborated only once, GNAT_ENTITY is really not an entity. */
333
334tree
335get_gnu_tree (Entity_Id gnat_entity)
336{
337 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
338 return GET_GNU_TREE (gnat_entity);
339}
340
341/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
342
343bool
344present_gnu_tree (Entity_Id gnat_entity)
345{
346 return PRESENT_GNU_TREE (gnat_entity);
347}
348\f
a1ab4c31
AC
349/* Make a dummy type corresponding to GNAT_TYPE. */
350
351tree
352make_dummy_type (Entity_Id gnat_type)
353{
bf0b0e5e 354 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
a1ab4c31
AC
355 tree gnu_type;
356
a1ab4c31
AC
357 /* If there was no equivalent type (can only happen when just annotating
358 types) or underlying type, go back to the original type. */
bf0b0e5e
AC
359 if (No (gnat_equiv))
360 gnat_equiv = gnat_type;
a1ab4c31
AC
361
362 /* If it there already a dummy type, use that one. Else make one. */
bf0b0e5e
AC
363 if (PRESENT_DUMMY_NODE (gnat_equiv))
364 return GET_DUMMY_NODE (gnat_equiv);
a1ab4c31
AC
365
366 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
367 an ENUMERAL_TYPE. */
bf0b0e5e
AC
368 gnu_type = make_node (Is_Record_Type (gnat_equiv)
369 ? tree_code_for_record_type (gnat_equiv)
a1ab4c31
AC
370 : ENUMERAL_TYPE);
371 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
372 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
373 TYPE_STUB_DECL (gnu_type)
374 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
bf0b0e5e 375 if (Is_By_Reference_Type (gnat_equiv))
a0b8b1b7 376 TYPE_BY_REFERENCE_P (gnu_type) = 1;
a1ab4c31 377
bf0b0e5e 378 SET_DUMMY_NODE (gnat_equiv, gnu_type);
a1ab4c31
AC
379
380 return gnu_type;
381}
e3edbd56
EB
382
383/* Return the dummy type that was made for GNAT_TYPE, if any. */
384
385tree
386get_dummy_type (Entity_Id gnat_type)
387{
388 return GET_DUMMY_NODE (gnat_type);
389}
390
391/* Build dummy fat and thin pointer types whose designated type is specified
392 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
393
394void
395build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
396{
397 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
398 tree gnu_fat_type, fields, gnu_object_type;
399
400 gnu_template_type = make_node (RECORD_TYPE);
401 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
402 TYPE_DUMMY_P (gnu_template_type) = 1;
403 gnu_ptr_template = build_pointer_type (gnu_template_type);
404
405 gnu_array_type = make_node (ENUMERAL_TYPE);
406 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
407 TYPE_DUMMY_P (gnu_array_type) = 1;
408 gnu_ptr_array = build_pointer_type (gnu_array_type);
409
410 gnu_fat_type = make_node (RECORD_TYPE);
411 /* Build a stub DECL to trigger the special processing for fat pointer types
412 in gnat_pushdecl. */
413 TYPE_NAME (gnu_fat_type)
414 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
415 gnu_fat_type);
416 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
417 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
418 DECL_CHAIN (fields)
419 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
420 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
421 finish_fat_pointer_type (gnu_fat_type, fields);
422 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
423 /* Suppress debug info until after the type is completed. */
424 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
425
426 gnu_object_type = make_node (RECORD_TYPE);
427 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
428 TYPE_DUMMY_P (gnu_object_type) = 1;
429
430 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
431 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
432}
a1ab4c31 433\f
c99c0026 434/* Return true if we are in the global binding level. */
a1ab4c31 435
c99c0026 436bool
a1ab4c31
AC
437global_bindings_p (void)
438{
7c775aca 439 return force_global || !current_function_decl;
a1ab4c31
AC
440}
441
a09d56d8 442/* Enter a new binding level. */
a1ab4c31
AC
443
444void
c6bd4220 445gnat_pushlevel (void)
a1ab4c31
AC
446{
447 struct gnat_binding_level *newlevel = NULL;
448
449 /* Reuse a struct for this binding level, if there is one. */
450 if (free_binding_level)
451 {
452 newlevel = free_binding_level;
453 free_binding_level = free_binding_level->chain;
454 }
455 else
766090c2 456 newlevel = ggc_alloc<gnat_binding_level> ();
a1ab4c31
AC
457
458 /* Use a free BLOCK, if any; otherwise, allocate one. */
459 if (free_block_chain)
460 {
461 newlevel->block = free_block_chain;
462 free_block_chain = BLOCK_CHAIN (free_block_chain);
463 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
464 }
465 else
466 newlevel->block = make_node (BLOCK);
467
468 /* Point the BLOCK we just made to its parent. */
469 if (current_binding_level)
470 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
471
a09d56d8
EB
472 BLOCK_VARS (newlevel->block) = NULL_TREE;
473 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
a1ab4c31
AC
474 TREE_USED (newlevel->block) = 1;
475
a09d56d8 476 /* Add this level to the front of the chain (stack) of active levels. */
a1ab4c31
AC
477 newlevel->chain = current_binding_level;
478 newlevel->jmpbuf_decl = NULL_TREE;
479 current_binding_level = newlevel;
480}
481
482/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
483 and point FNDECL to this BLOCK. */
484
485void
486set_current_block_context (tree fndecl)
487{
488 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
489 DECL_INITIAL (fndecl) = current_binding_level->block;
a09d56d8 490 set_block_for_group (current_binding_level->block);
a1ab4c31
AC
491}
492
493/* Set the jmpbuf_decl for the current binding level to DECL. */
494
495void
496set_block_jmpbuf_decl (tree decl)
497{
498 current_binding_level->jmpbuf_decl = decl;
499}
500
501/* Get the jmpbuf_decl, if any, for the current binding level. */
502
503tree
c6bd4220 504get_block_jmpbuf_decl (void)
a1ab4c31
AC
505{
506 return current_binding_level->jmpbuf_decl;
507}
508
a09d56d8 509/* Exit a binding level. Set any BLOCK into the current code group. */
a1ab4c31
AC
510
511void
c6bd4220 512gnat_poplevel (void)
a1ab4c31
AC
513{
514 struct gnat_binding_level *level = current_binding_level;
515 tree block = level->block;
516
517 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
72ac05b0 518 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
a1ab4c31
AC
519
520 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
521 are no variables free the block and merge its subblocks into those of its
a09d56d8 522 parent block. Otherwise, add it to the list of its parent. */
a1ab4c31
AC
523 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
524 ;
7c775aca 525 else if (!BLOCK_VARS (block))
a1ab4c31
AC
526 {
527 BLOCK_SUBBLOCKS (level->chain->block)
61e46a7d
NF
528 = block_chainon (BLOCK_SUBBLOCKS (block),
529 BLOCK_SUBBLOCKS (level->chain->block));
a1ab4c31
AC
530 BLOCK_CHAIN (block) = free_block_chain;
531 free_block_chain = block;
532 }
533 else
534 {
535 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
536 BLOCK_SUBBLOCKS (level->chain->block) = block;
537 TREE_USED (block) = 1;
538 set_block_for_group (block);
539 }
540
541 /* Free this binding structure. */
542 current_binding_level = level->chain;
543 level->chain = free_binding_level;
544 free_binding_level = level;
545}
546
2231f17f
EB
547/* Exit a binding level and discard the associated BLOCK. */
548
549void
550gnat_zaplevel (void)
551{
552 struct gnat_binding_level *level = current_binding_level;
553 tree block = level->block;
554
555 BLOCK_CHAIN (block) = free_block_chain;
556 free_block_chain = block;
557
558 /* Free this binding structure. */
559 current_binding_level = level->chain;
560 level->chain = free_binding_level;
561 free_binding_level = level;
562}
a1ab4c31 563\f
4708440c
EB
564/* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
565
566static void
567gnat_set_type_context (tree type, tree context)
568{
569 tree decl = TYPE_STUB_DECL (type);
570
571 TYPE_CONTEXT (type) = context;
572
573 while (decl && DECL_PARALLEL_TYPE (decl))
574 {
24d4b3d5
AC
575 tree parallel_type = DECL_PARALLEL_TYPE (decl);
576
577 /* Give a context to the parallel types and their stub decl, if any.
578 Some parallel types seems to be present in multiple parallel type
579 chains, so don't mess with their context if they already have one. */
7c775aca 580 if (!TYPE_CONTEXT (parallel_type))
24d4b3d5 581 {
7c775aca 582 if (TYPE_STUB_DECL (parallel_type))
24d4b3d5
AC
583 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
584 TYPE_CONTEXT (parallel_type) = context;
585 }
586
4708440c
EB
587 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
588 }
589}
590
9a30c7c4
AC
591/* Return the innermost scope, starting at GNAT_NODE, we are be interested in
592 the debug info, or Empty if there is no such scope. If not NULL, set
593 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
594
1d4b96e0 595Entity_Id
9a30c7c4
AC
596get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
597{
598 Entity_Id gnat_entity;
599
600 if (is_subprogram)
601 *is_subprogram = false;
602
1d4b96e0
AC
603 if (Nkind (gnat_node) == N_Defining_Identifier
604 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
9a30c7c4
AC
605 gnat_entity = Scope (gnat_node);
606 else
607 return Empty;
608
609 while (Present (gnat_entity))
610 {
611 switch (Ekind (gnat_entity))
612 {
613 case E_Function:
614 case E_Procedure:
615 if (Present (Protected_Body_Subprogram (gnat_entity)))
616 gnat_entity = Protected_Body_Subprogram (gnat_entity);
617
618 /* If the scope is a subprogram, then just rely on
619 current_function_decl, so that we don't have to defer
620 anything. This is needed because other places rely on the
621 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
622 if (is_subprogram)
623 *is_subprogram = true;
624 return gnat_entity;
625
626 case E_Record_Type:
627 case E_Record_Subtype:
628 return gnat_entity;
629
630 default:
631 /* By default, we are not interested in this particular scope: go to
632 the outer one. */
633 break;
634 }
7c775aca 635
9a30c7c4
AC
636 gnat_entity = Scope (gnat_entity);
637 }
7c775aca 638
9a30c7c4
AC
639 return Empty;
640}
641
7c775aca
EB
642/* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
643 of N otherwise. */
9a30c7c4
AC
644
645static void
7c775aca 646defer_or_set_type_context (tree type, tree context,
9a30c7c4
AC
647 struct deferred_decl_context_node *n)
648{
649 if (n)
650 add_deferred_type_context (n, type);
651 else
652 gnat_set_type_context (type, context);
653}
654
7c775aca 655/* Return global_context, but create it first if need be. */
9a30c7c4
AC
656
657static tree
658get_global_context (void)
659{
660 if (!global_context)
881a5e60
PMR
661 {
662 global_context = build_translation_unit_decl (NULL_TREE);
663 debug_hooks->register_main_translation_unit (global_context);
664 }
7c775aca 665
9a30c7c4
AC
666 return global_context;
667}
668
228ee426
EB
669/* Record DECL as belonging to the current lexical scope and use GNAT_NODE
670 for location information and flag propagation. */
a1ab4c31
AC
671
672void
673gnat_pushdecl (tree decl, Node_Id gnat_node)
674{
9a30c7c4
AC
675 tree context = NULL_TREE;
676 struct deferred_decl_context_node *deferred_decl_context = NULL;
677
678 /* If explicitely asked to make DECL global or if it's an imported nested
679 object, short-circuit the regular Scope-based context computation. */
680 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
a1ab4c31 681 {
9a30c7c4
AC
682 /* Rely on the GNAT scope, or fallback to the current_function_decl if
683 the GNAT scope reached the global scope, if it reached a subprogram
684 or the declaration is a subprogram or a variable (for them we skip
685 intermediate context types because the subprogram body elaboration
686 machinery and the inliner both expect a subprogram context).
687
688 Falling back to current_function_decl is necessary for implicit
689 subprograms created by gigi, such as the elaboration subprograms. */
690 bool context_is_subprogram = false;
691 const Entity_Id gnat_scope
692 = get_debug_scope (gnat_node, &context_is_subprogram);
693
694 if (Present (gnat_scope)
695 && !context_is_subprogram
696 && TREE_CODE (decl) != FUNCTION_DECL
697 && TREE_CODE (decl) != VAR_DECL)
698 /* Always assume the scope has not been elaborated, thus defer the
699 context propagation to the time its elaboration will be
700 available. */
701 deferred_decl_context
702 = add_deferred_decl_context (decl, gnat_scope, force_global);
703
704 /* External declarations (when force_global > 0) may not be in a
705 local context. */
7c775aca 706 else if (current_function_decl && force_global == 0)
9a30c7c4 707 context = current_function_decl;
a1ab4c31
AC
708 }
709
9a30c7c4 710 /* If either we are forced to be in global mode or if both the GNAT scope and
7c775aca 711 the current_function_decl did not help in determining the context, use the
9a30c7c4 712 global scope. */
7c775aca 713 if (!deferred_decl_context && !context)
9a30c7c4
AC
714 context = get_global_context ();
715
716 /* Functions imported in another function are not really nested.
717 For really nested functions mark them initially as needing
718 a static chain for uses of that flag before unnesting;
719 lower_nested_functions will then recompute it. */
720 if (TREE_CODE (decl) == FUNCTION_DECL
721 && !TREE_PUBLIC (decl)
7c775aca 722 && context
9a30c7c4 723 && (TREE_CODE (context) == FUNCTION_DECL
7c775aca 724 || decl_function_context (context)))
9a30c7c4
AC
725 DECL_STATIC_CHAIN (decl) = 1;
726
727 if (!deferred_decl_context)
728 DECL_CONTEXT (decl) = context;
729
228ee426 730 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
a1ab4c31
AC
731
732 /* Set the location of DECL and emit a declaration for it. */
e8fa3dcd 733 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
a1ab4c31 734 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
228ee426 735
a1ab4c31
AC
736 add_decl_expr (decl, gnat_node);
737
738 /* Put the declaration on the list. The list of declarations is in reverse
2231f17f
EB
739 order. The list will be reversed later. Put global declarations in the
740 globals list and local ones in the current block. But skip TYPE_DECLs
741 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
742 with the debugger and aren't needed anyway. */
743 if (!(TREE_CODE (decl) == TYPE_DECL
744 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
a1ab4c31 745 {
9083aacd 746 if (DECL_EXTERNAL (decl))
a1ab4c31 747 {
a1ab4c31 748 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
9771b263 749 vec_safe_push (builtin_decls, decl);
a1ab4c31 750 }
9083aacd 751 else if (global_bindings_p ())
a22b794d 752 vec_safe_push (global_decls, decl);
9083aacd 753 else
a1ab4c31 754 {
a963da4d
EB
755 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
756 BLOCK_VARS (current_binding_level->block) = decl;
a1ab4c31
AC
757 }
758 }
759
aef308d0 760 /* For the declaration of a type, set its name either if it isn't already
10069d53 761 set or if the previous type name was not derived from a source name.
a1ab4c31 762 We'd rather have the type named with a real name and all the pointer
aef308d0
PMR
763 types to the same object have the same node, except when the names are
764 both derived from source names. */
a1ab4c31
AC
765 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
766 {
767 tree t = TREE_TYPE (decl);
768
79714815
EB
769 /* Array and pointer types aren't tagged types in the C sense so we need
770 to generate a typedef in DWARF for them and make sure it is preserved,
771 unless the type is artificial. */
aef308d0 772 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
79714815
EB
773 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
774 || DECL_ARTIFICIAL (decl)))
775 ;
776 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
777 generate the typedef in DWARF. Also do that for fat pointer types
778 because, even though they are tagged types in the C sense, they are
779 still XUP types attached to the base array type at this point. */
aef308d0 780 else if (!DECL_ARTIFICIAL (decl)
79714815
EB
781 && (TREE_CODE (t) == ARRAY_TYPE
782 || TREE_CODE (t) == POINTER_TYPE
783 || TYPE_IS_FAT_POINTER_P (t)))
a1ab4c31 784 {
aef308d0 785 tree tt;
1e039275
EB
786 /* ??? Copy and original type are not supposed to be variant but we
787 really need a variant for the placeholder machinery to work. */
aef308d0
PMR
788 if (TYPE_IS_FAT_POINTER_P (t))
789 tt = build_variant_type_copy (t);
790 else
1e039275
EB
791 {
792 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
793 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
79714815
EB
794 if (TREE_CODE (t) == POINTER_TYPE)
795 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
1e039275
EB
796 tt = build_qualified_type (tt, TYPE_QUALS (t));
797 }
a1ab4c31 798 TYPE_NAME (tt) = decl;
9a30c7c4
AC
799 defer_or_set_type_context (tt,
800 DECL_CONTEXT (decl),
801 deferred_decl_context);
a1ab4c31
AC
802 TREE_USED (tt) = TREE_USED (t);
803 TREE_TYPE (decl) = tt;
79714815 804 if (TYPE_NAME (t)
aef308d0
PMR
805 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
806 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
40c88b94
EB
807 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
808 else
809 DECL_ORIGINAL_TYPE (decl) = t;
79714815
EB
810 /* Array types need to have a name so that they can be related to
811 their GNAT encodings. */
812 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
813 TYPE_NAME (t) = DECL_NAME (decl);
e3edbd56 814 t = NULL_TREE;
a1ab4c31 815 }
79714815 816 else if (TYPE_NAME (t)
aef308d0
PMR
817 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
818 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
a1ab4c31
AC
819 ;
820 else
821 t = NULL_TREE;
822
79714815
EB
823 /* Propagate the name to all the variants, this is needed for the type
824 qualifiers machinery to work properly (see check_qualified_type).
825 Also propagate the context to them. Note that it will be propagated
826 to all parallel types too thanks to gnat_set_type_context. */
a1ab4c31
AC
827 if (t)
828 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
79714815
EB
829 /* ??? Because of the previous kludge, we can have variants of fat
830 pointer types with different names. */
831 if (!(TYPE_IS_FAT_POINTER_P (t)
832 && TYPE_NAME (t)
833 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
d4d05b52
EB
834 {
835 TYPE_NAME (t) = decl;
9a30c7c4
AC
836 defer_or_set_type_context (t,
837 DECL_CONTEXT (decl),
838 deferred_decl_context);
d4d05b52 839 }
a1ab4c31
AC
840 }
841}
842\f
842d4ee2
EB
843/* Create a record type that contains a SIZE bytes long field of TYPE with a
844 starting bit position so that it is aligned to ALIGN bits, and leaving at
845 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
0746af5e
EB
846 record is guaranteed to get. GNAT_NODE is used for the position of the
847 associated TYPE_DECL. */
842d4ee2
EB
848
849tree
850make_aligning_type (tree type, unsigned int align, tree size,
0746af5e 851 unsigned int base_align, int room, Node_Id gnat_node)
842d4ee2
EB
852{
853 /* We will be crafting a record type with one field at a position set to be
854 the next multiple of ALIGN past record'address + room bytes. We use a
855 record placeholder to express record'address. */
856 tree record_type = make_node (RECORD_TYPE);
857 tree record = build0 (PLACEHOLDER_EXPR, record_type);
858
859 tree record_addr_st
860 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
861
862 /* The diagram below summarizes the shape of what we manipulate:
863
864 <--------- pos ---------->
865 { +------------+-------------+-----------------+
866 record =>{ |############| ... | field (type) |
867 { +------------+-------------+-----------------+
868 |<-- room -->|<- voffset ->|<---- size ----->|
869 o o
870 | |
871 record_addr vblock_addr
872
873 Every length is in sizetype bytes there, except "pos" which has to be
874 set as a bit position in the GCC tree for the record. */
875 tree room_st = size_int (room);
876 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
877 tree voffset_st, pos, field;
878
9dba4b55 879 tree name = TYPE_IDENTIFIER (type);
842d4ee2 880
842d4ee2
EB
881 name = concat_name (name, "ALIGN");
882 TYPE_NAME (record_type) = name;
883
884 /* Compute VOFFSET and then POS. The next byte position multiple of some
885 alignment after some address is obtained by "and"ing the alignment minus
886 1 with the two's complement of the address. */
887 voffset_st = size_binop (BIT_AND_EXPR,
888 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
889 size_int ((align / BITS_PER_UNIT) - 1));
890
891 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
892 pos = size_binop (MULT_EXPR,
893 convert (bitsizetype,
894 size_binop (PLUS_EXPR, room_st, voffset_st)),
895 bitsize_unit_node);
896
897 /* Craft the GCC record representation. We exceptionally do everything
898 manually here because 1) our generic circuitry is not quite ready to
899 handle the complex position/size expressions we are setting up, 2) we
900 have a strong simplifying factor at hand: we know the maximum possible
901 value of voffset, and 3) we have to set/reset at least the sizes in
902 accordance with this maximum value anyway, as we need them to convey
903 what should be "alloc"ated for this type.
904
905 Use -1 as the 'addressable' indication for the field to prevent the
906 creation of a bitfield. We don't need one, it would have damaging
907 consequences on the alignment computation, and create_field_decl would
908 make one without this special argument, for instance because of the
909 complex position expression. */
910 field = create_field_decl (get_identifier ("F"), type, record_type, size,
911 pos, 1, -1);
912 TYPE_FIELDS (record_type) = field;
913
fe37c7af 914 SET_TYPE_ALIGN (record_type, base_align);
842d4ee2
EB
915 TYPE_USER_ALIGN (record_type) = 1;
916
917 TYPE_SIZE (record_type)
918 = size_binop (PLUS_EXPR,
919 size_binop (MULT_EXPR, convert (bitsizetype, size),
920 bitsize_unit_node),
921 bitsize_int (align + room * BITS_PER_UNIT));
922 TYPE_SIZE_UNIT (record_type)
923 = size_binop (PLUS_EXPR, size,
924 size_int (room + align / BITS_PER_UNIT));
925
926 SET_TYPE_MODE (record_type, BLKmode);
927 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
928
929 /* Declare it now since it will never be declared otherwise. This is
930 necessary to ensure that its subtrees are properly marked. */
74746d49 931 create_type_decl (name, record_type, true, false, gnat_node);
842d4ee2
EB
932
933 return record_type;
934}
935
936/* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
937 as the field type of a packed record if IN_RECORD is true, or as the
938 component type of a packed array if IN_RECORD is false. See if we can
939 rewrite it either as a type that has a non-BLKmode, which we can pack
940 tighter in the packed record case, or as a smaller type. If so, return
941 the new type. If not, return the original type. */
942
943tree
944make_packable_type (tree type, bool in_record)
945{
ae7e9ddd 946 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
842d4ee2
EB
947 unsigned HOST_WIDE_INT new_size;
948 tree new_type, old_field, field_list = NULL_TREE;
949 unsigned int align;
950
951 /* No point in doing anything if the size is zero. */
952 if (size == 0)
953 return type;
954
955 new_type = make_node (TREE_CODE (type));
956
957 /* Copy the name and flags from the old type to that of the new.
958 Note that we rely on the pointer equality created here for
959 TYPE_NAME to look through conversions in various places. */
960 TYPE_NAME (new_type) = TYPE_NAME (type);
961 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
962 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
ee45a32d 963 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
842d4ee2
EB
964 if (TREE_CODE (type) == RECORD_TYPE)
965 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
966
967 /* If we are in a record and have a small size, set the alignment to
968 try for an integral mode. Otherwise set it to try for a smaller
969 type with BLKmode. */
970 if (in_record && size <= MAX_FIXED_MODE_SIZE)
971 {
972 align = ceil_pow2 (size);
fe37c7af 973 SET_TYPE_ALIGN (new_type, align);
842d4ee2
EB
974 new_size = (size + align - 1) & -align;
975 }
976 else
977 {
978 unsigned HOST_WIDE_INT align;
979
980 /* Do not try to shrink the size if the RM size is not constant. */
981 if (TYPE_CONTAINS_TEMPLATE_P (type)
cc269bb6 982 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
842d4ee2
EB
983 return type;
984
985 /* Round the RM size up to a unit boundary to get the minimal size
986 for a BLKmode record. Give up if it's already the size. */
eb1ce453 987 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
842d4ee2
EB
988 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
989 if (new_size == size)
990 return type;
991
992 align = new_size & -new_size;
fe37c7af 993 SET_TYPE_ALIGN (new_type, MIN (TYPE_ALIGN (type), align));
842d4ee2
EB
994 }
995
996 TYPE_USER_ALIGN (new_type) = 1;
997
998 /* Now copy the fields, keeping the position and size as we don't want
999 to change the layout by propagating the packedness downwards. */
1000 for (old_field = TYPE_FIELDS (type); old_field;
1001 old_field = DECL_CHAIN (old_field))
1002 {
1003 tree new_field_type = TREE_TYPE (old_field);
1004 tree new_field, new_size;
1005
1006 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1007 && !TYPE_FAT_POINTER_P (new_field_type)
cc269bb6 1008 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
842d4ee2
EB
1009 new_field_type = make_packable_type (new_field_type, true);
1010
1011 /* However, for the last field in a not already packed record type
1012 that is of an aggregate type, we need to use the RM size in the
1013 packable version of the record type, see finish_record_type. */
1014 if (!DECL_CHAIN (old_field)
1015 && !TYPE_PACKED (type)
1016 && RECORD_OR_UNION_TYPE_P (new_field_type)
1017 && !TYPE_FAT_POINTER_P (new_field_type)
1018 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1019 && TYPE_ADA_SIZE (new_field_type))
1020 new_size = TYPE_ADA_SIZE (new_field_type);
1021 else
1022 new_size = DECL_SIZE (old_field);
1023
1024 new_field
1025 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1026 new_size, bit_position (old_field),
1027 TYPE_PACKED (type),
1028 !DECL_NONADDRESSABLE_P (old_field));
1029
1030 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1031 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1032 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1033 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1034
1035 DECL_CHAIN (new_field) = field_list;
1036 field_list = new_field;
1037 }
1038
1039 finish_record_type (new_type, nreverse (field_list), 2, false);
1040 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
44e9e3ec
EB
1041 if (TYPE_STUB_DECL (type))
1042 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1043 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
842d4ee2
EB
1044
1045 /* If this is a padding record, we never want to make the size smaller
1046 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1047 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1048 {
1049 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1050 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1051 new_size = size;
1052 }
1053 else
1054 {
1055 TYPE_SIZE (new_type) = bitsize_int (new_size);
1056 TYPE_SIZE_UNIT (new_type)
1057 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1058 }
1059
1060 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1061 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1062
1063 compute_record_mode (new_type);
1064
1065 /* Try harder to get a packable type if necessary, for example
1066 in case the record itself contains a BLKmode field. */
1067 if (in_record && TYPE_MODE (new_type) == BLKmode)
1068 SET_TYPE_MODE (new_type,
1069 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1070
1071 /* If neither the mode nor the size has shrunk, return the old type. */
1072 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1073 return type;
1074
1075 return new_type;
1076}
1077
1078/* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1079 If TYPE is the best type, return it. Otherwise, make a new type. We
1080 only support new integral and pointer types. FOR_BIASED is true if
1081 we are making a biased type. */
1082
1083tree
1084make_type_from_size (tree type, tree size_tree, bool for_biased)
1085{
1086 unsigned HOST_WIDE_INT size;
1087 bool biased_p;
1088 tree new_type;
1089
1090 /* If size indicates an error, just return TYPE to avoid propagating
1091 the error. Likewise if it's too large to represent. */
cc269bb6 1092 if (!size_tree || !tree_fits_uhwi_p (size_tree))
842d4ee2
EB
1093 return type;
1094
ae7e9ddd 1095 size = tree_to_uhwi (size_tree);
842d4ee2
EB
1096
1097 switch (TREE_CODE (type))
1098 {
1099 case INTEGER_TYPE:
1100 case ENUMERAL_TYPE:
1101 case BOOLEAN_TYPE:
1102 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1103 && TYPE_BIASED_REPRESENTATION_P (type));
1104
1105 /* Integer types with precision 0 are forbidden. */
1106 if (size == 0)
1107 size = 1;
1108
1109 /* Only do something if the type isn't a packed array type and doesn't
1110 already have the proper size and the size isn't too large. */
1111 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1112 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1113 || size > LONG_LONG_TYPE_SIZE)
1114 break;
1115
1116 biased_p |= for_biased;
1117 if (TYPE_UNSIGNED (type) || biased_p)
1118 new_type = make_unsigned_type (size);
1119 else
1120 new_type = make_signed_type (size);
1121 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1eb58520
AC
1122 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1123 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
842d4ee2
EB
1124 /* Copy the name to show that it's essentially the same type and
1125 not a subrange type. */
1126 TYPE_NAME (new_type) = TYPE_NAME (type);
1127 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1128 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1129 return new_type;
1130
1131 case RECORD_TYPE:
1132 /* Do something if this is a fat pointer, in which case we
1133 may need to return the thin pointer. */
1134 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1135 {
ef4bddc2 1136 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
842d4ee2
EB
1137 if (!targetm.valid_pointer_mode (p_mode))
1138 p_mode = ptr_mode;
1139 return
1140 build_pointer_type_for_mode
1141 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1142 p_mode, 0);
1143 }
1144 break;
1145
1146 case POINTER_TYPE:
1147 /* Only do something if this is a thin pointer, in which case we
1148 may need to return the fat pointer. */
1149 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1150 return
1151 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1152 break;
1153
1154 default:
1155 break;
1156 }
1157
1158 return type;
1159}
1160
1161/* See if the data pointed to by the hash table slot is marked. */
1162
08ec2754
RS
1163int
1164pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
842d4ee2 1165{
08ec2754 1166 return ggc_marked_p (t->type);
842d4ee2
EB
1167}
1168
d242408f 1169/* Return true iff the padded types are equivalent. */
842d4ee2 1170
d242408f
TS
1171bool
1172pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
842d4ee2 1173{
842d4ee2
EB
1174 tree type1, type2;
1175
1176 if (t1->hash != t2->hash)
1177 return 0;
1178
1179 type1 = t1->type;
1180 type2 = t2->type;
1181
ee45a32d
EB
1182 /* We consider that the padded types are equivalent if they pad the same type
1183 and have the same size, alignment, RM size and storage order. Taking the
1184 mode into account is redundant since it is determined by the others. */
842d4ee2
EB
1185 return
1186 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1187 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1188 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
ee45a32d
EB
1189 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1190 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
842d4ee2
EB
1191}
1192
5cb7516d
EB
1193/* Look up the padded TYPE in the hash table and return its canonical version
1194 if it exists; otherwise, insert it into the hash table. */
1195
1196static tree
1197lookup_and_insert_pad_type (tree type)
1198{
1199 hashval_t hashcode;
1200 struct pad_type_hash in, *h;
5cb7516d
EB
1201
1202 hashcode
1203 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1204 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1205 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1206 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1207
1208 in.hash = hashcode;
1209 in.type = type;
d242408f 1210 h = pad_type_hash_table->find_with_hash (&in, hashcode);
5cb7516d
EB
1211 if (h)
1212 return h->type;
1213
1214 h = ggc_alloc<pad_type_hash> ();
1215 h->hash = hashcode;
1216 h->type = type;
d242408f 1217 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
5cb7516d
EB
1218 return NULL_TREE;
1219}
1220
842d4ee2 1221/* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
5cb7516d 1222 if needed. We have already verified that SIZE and ALIGN are large enough.
842d4ee2
EB
1223 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1224 IS_COMPONENT_TYPE is true if this is being done for the component type of
1225 an array. IS_USER_TYPE is true if the original type needs to be completed.
1226 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
afc737f0
EB
1227 the RM size of the resulting type is to be set to SIZE too; in this case,
1228 the padded type is canonicalized before being returned. */
842d4ee2
EB
1229
1230tree
1231maybe_pad_type (tree type, tree size, unsigned int align,
1232 Entity_Id gnat_entity, bool is_component_type,
1233 bool is_user_type, bool definition, bool set_rm_size)
1234{
1235 tree orig_size = TYPE_SIZE (type);
44e9e3ec 1236 unsigned int orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1237 tree record, field;
1238
1239 /* If TYPE is a padded type, see if it agrees with any size and alignment
1240 we were given. If so, return the original type. Otherwise, strip
1241 off the padding, since we will either be returning the inner type
1242 or repadding it. If no size or alignment is specified, use that of
1243 the original padded type. */
1244 if (TYPE_IS_PADDING_P (type))
1245 {
1246 if ((!size
44e9e3ec
EB
1247 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1248 && (align == 0 || align == orig_align))
842d4ee2
EB
1249 return type;
1250
1251 if (!size)
44e9e3ec 1252 size = orig_size;
842d4ee2 1253 if (align == 0)
44e9e3ec 1254 align = orig_align;
842d4ee2
EB
1255
1256 type = TREE_TYPE (TYPE_FIELDS (type));
1257 orig_size = TYPE_SIZE (type);
44e9e3ec 1258 orig_align = TYPE_ALIGN (type);
842d4ee2
EB
1259 }
1260
1261 /* If the size is either not being changed or is being made smaller (which
1262 is not done here and is only valid for bitfields anyway), show the size
1263 isn't changing. Likewise, clear the alignment if it isn't being
1264 changed. Then return if we aren't doing anything. */
1265 if (size
1266 && (operand_equal_p (size, orig_size, 0)
1267 || (TREE_CODE (orig_size) == INTEGER_CST
1268 && tree_int_cst_lt (size, orig_size))))
1269 size = NULL_TREE;
1270
44e9e3ec 1271 if (align == orig_align)
842d4ee2
EB
1272 align = 0;
1273
1274 if (align == 0 && !size)
1275 return type;
1276
1277 /* If requested, complete the original type and give it a name. */
1278 if (is_user_type)
1279 create_type_decl (get_entity_name (gnat_entity), type,
74746d49 1280 !Comes_From_Source (gnat_entity),
842d4ee2
EB
1281 !(TYPE_NAME (type)
1282 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1283 && DECL_IGNORED_P (TYPE_NAME (type))),
1284 gnat_entity);
1285
1286 /* We used to modify the record in place in some cases, but that could
1287 generate incorrect debugging information. So make a new record
1288 type and name. */
1289 record = make_node (RECORD_TYPE);
1290 TYPE_PADDING_P (record) = 1;
1291
7c775aca 1292 /* ??? Padding types around packed array implementation types will be
2d595887
PMR
1293 considered as root types in the array descriptor language hook (see
1294 gnat_get_array_descr_info). Give them the original packed array type
1295 name so that the one coming from sources appears in the debugging
1296 information. */
7c775aca
EB
1297 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1298 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1299 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1300 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
2d595887 1301 else if (Present (gnat_entity))
842d4ee2
EB
1302 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1303
fe37c7af 1304 SET_TYPE_ALIGN (record, align ? align : orig_align);
842d4ee2
EB
1305 TYPE_SIZE (record) = size ? size : orig_size;
1306 TYPE_SIZE_UNIT (record)
1307 = convert (sizetype,
1308 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1309 bitsize_unit_node));
1310
1311 /* If we are changing the alignment and the input type is a record with
1312 BLKmode and a small constant size, try to make a form that has an
1313 integral mode. This might allow the padding record to also have an
1314 integral mode, which will be much more efficient. There is no point
1315 in doing so if a size is specified unless it is also a small constant
1316 size and it is incorrect to do so if we cannot guarantee that the mode
1317 will be naturally aligned since the field must always be addressable.
1318
1319 ??? This might not always be a win when done for a stand-alone object:
1320 since the nominal and the effective type of the object will now have
1321 different modes, a VIEW_CONVERT_EXPR will be required for converting
1322 between them and it might be hard to overcome afterwards, including
1323 at the RTL level when the stand-alone object is accessed as a whole. */
1324 if (align != 0
1325 && RECORD_OR_UNION_TYPE_P (type)
1326 && TYPE_MODE (type) == BLKmode
1327 && !TYPE_BY_REFERENCE_P (type)
1328 && TREE_CODE (orig_size) == INTEGER_CST
1329 && !TREE_OVERFLOW (orig_size)
1330 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1331 && (!size
1332 || (TREE_CODE (size) == INTEGER_CST
1333 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1334 {
1335 tree packable_type = make_packable_type (type, true);
1336 if (TYPE_MODE (packable_type) != BLKmode
1337 && align >= TYPE_ALIGN (packable_type))
1338 type = packable_type;
1339 }
1340
1341 /* Now create the field with the original size. */
1366ba41
EB
1342 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1343 bitsize_zero_node, 0, 1);
842d4ee2
EB
1344 DECL_INTERNAL_P (field) = 1;
1345
afc737f0 1346 /* We will output additional debug info manually below. */
842d4ee2
EB
1347 finish_record_type (record, field, 1, false);
1348
afc737f0
EB
1349 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1350 SET_TYPE_DEBUG_TYPE (record, type);
1351
842d4ee2
EB
1352 /* Set the RM size if requested. */
1353 if (set_rm_size)
1354 {
5cb7516d
EB
1355 tree canonical_pad_type;
1356
842d4ee2
EB
1357 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1358
1359 /* If the padded type is complete and has constant size, we canonicalize
1360 it by means of the hash table. This is consistent with the language
1361 semantics and ensures that gigi and the middle-end have a common view
1362 of these padded types. */
5cb7516d
EB
1363 if (TREE_CONSTANT (TYPE_SIZE (record))
1364 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
842d4ee2 1365 {
5cb7516d
EB
1366 record = canonical_pad_type;
1367 goto built;
842d4ee2
EB
1368 }
1369 }
1370
1371 /* Unless debugging information isn't being written for the input type,
1372 write a record that shows what we are a subtype of and also make a
eb59e428
PMR
1373 variable that indicates our size, if still variable. */
1374 if (TREE_CODE (orig_size) != INTEGER_CST
842d4ee2
EB
1375 && TYPE_NAME (record)
1376 && TYPE_NAME (type)
1377 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1378 && DECL_IGNORED_P (TYPE_NAME (type))))
1379 {
9dba4b55 1380 tree name = TYPE_IDENTIFIER (record);
c1a569ef
EB
1381 tree size_unit = TYPE_SIZE_UNIT (record);
1382
1383 /* A variable that holds the size is required even with no encoding since
1384 it will be referenced by debugging information attributes. At global
1385 level, we need a single variable across all translation units. */
1386 if (size
1387 && TREE_CODE (size) != INTEGER_CST
1388 && (definition || global_bindings_p ()))
1389 {
eb59e428
PMR
1390 /* Whether or not gnat_entity comes from source, this XVZ variable is
1391 is a compilation artifact. */
c1a569ef
EB
1392 size_unit
1393 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1394 size_unit, true, global_bindings_p (),
1395 !definition && global_bindings_p (), false,
2056c5ed 1396 false, true, true, NULL, gnat_entity);
c1a569ef
EB
1397 TYPE_SIZE_UNIT (record) = size_unit;
1398 }
1399
eb59e428
PMR
1400 /* There is no need to show what we are a subtype of when outputting as
1401 few encodings as possible: regular debugging infomation makes this
1402 redundant. */
1403 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1404 {
1405 tree marker = make_node (RECORD_TYPE);
1406 tree orig_name = TYPE_IDENTIFIER (type);
1407
1408 TYPE_NAME (marker) = concat_name (name, "XVS");
1409 finish_record_type (marker,
1410 create_field_decl (orig_name,
1411 build_reference_type (type),
1412 marker, NULL_TREE, NULL_TREE,
1413 0, 0),
1414 0, true);
1415 TYPE_SIZE_UNIT (marker) = size_unit;
1416
1417 add_parallel_type (record, marker);
1418 }
842d4ee2
EB
1419 }
1420
842d4ee2 1421built:
80746f5d 1422 /* If a simple size was explicitly given, maybe issue a warning. */
842d4ee2
EB
1423 if (!size
1424 || TREE_CODE (size) == COND_EXPR
1425 || TREE_CODE (size) == MAX_EXPR
80746f5d 1426 || No (gnat_entity))
842d4ee2
EB
1427 return record;
1428
80746f5d
EB
1429 /* But don't do it if we are just annotating types and the type is tagged or
1430 concurrent, since these types aren't fully laid out in this mode. */
1431 if (type_annotate_only)
1432 {
1433 Entity_Id gnat_type
1434 = is_component_type
1435 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1436
1437 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1438 return record;
1439 }
1440
1441 /* Take the original size as the maximum size of the input if there was an
1442 unconstrained record involved and round it up to the specified alignment,
1443 if one was specified, but only for aggregate types. */
842d4ee2
EB
1444 if (CONTAINS_PLACEHOLDER_P (orig_size))
1445 orig_size = max_size (orig_size, true);
1446
f42dd37f 1447 if (align && AGGREGATE_TYPE_P (type))
842d4ee2
EB
1448 orig_size = round_up (orig_size, align);
1449
1450 if (!operand_equal_p (size, orig_size, 0)
1451 && !(TREE_CODE (size) == INTEGER_CST
1452 && TREE_CODE (orig_size) == INTEGER_CST
1453 && (TREE_OVERFLOW (size)
1454 || TREE_OVERFLOW (orig_size)
1455 || tree_int_cst_lt (size, orig_size))))
1456 {
1457 Node_Id gnat_error_node = Empty;
1458
1a4cb227
AC
1459 /* For a packed array, post the message on the original array type. */
1460 if (Is_Packed_Array_Impl_Type (gnat_entity))
842d4ee2
EB
1461 gnat_entity = Original_Array_Type (gnat_entity);
1462
1463 if ((Ekind (gnat_entity) == E_Component
1464 || Ekind (gnat_entity) == E_Discriminant)
1465 && Present (Component_Clause (gnat_entity)))
1466 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1467 else if (Present (Size_Clause (gnat_entity)))
1468 gnat_error_node = Expression (Size_Clause (gnat_entity));
1469
1470 /* Generate message only for entities that come from source, since
1471 if we have an entity created by expansion, the message will be
1472 generated for some other corresponding source entity. */
1473 if (Comes_From_Source (gnat_entity))
1474 {
1475 if (Present (gnat_error_node))
1476 post_error_ne_tree ("{^ }bits of & unused?",
1477 gnat_error_node, gnat_entity,
1478 size_diffop (size, orig_size));
1479 else if (is_component_type)
1480 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1481 gnat_entity, gnat_entity,
1482 size_diffop (size, orig_size));
1483 }
1484 }
1485
1486 return record;
1487}
ee45a32d
EB
1488
1489/* Return a copy of the padded TYPE but with reverse storage order. */
1490
1491tree
1492set_reverse_storage_order_on_pad_type (tree type)
1493{
1494 tree field, canonical_pad_type;
1495
4232ebbb
ML
1496 if (flag_checking)
1497 {
1498 /* If the inner type is not scalar then the function does nothing. */
1499 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1500 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1501 && !VECTOR_TYPE_P (inner_type));
1502 }
ee45a32d
EB
1503
1504 /* This is required for the canonicalization. */
1505 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1506
1507 field = copy_node (TYPE_FIELDS (type));
1508 type = copy_type (type);
1509 DECL_CONTEXT (field) = type;
1510 TYPE_FIELDS (type) = field;
1511 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1512 canonical_pad_type = lookup_and_insert_pad_type (type);
1513 return canonical_pad_type ? canonical_pad_type : type;
1514}
842d4ee2
EB
1515\f
1516/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1517 If this is a multi-dimensional array type, do this recursively.
1518
1519 OP may be
1520 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1521 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1522 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1523
1524void
1525relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1526{
1527 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1528 of a one-dimensional array, since the padding has the same alias set
1529 as the field type, but if it's a multi-dimensional array, we need to
1530 see the inner types. */
1531 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1532 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1533 || TYPE_PADDING_P (gnu_old_type)))
1534 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1535
1536 /* Unconstrained array types are deemed incomplete and would thus be given
1537 alias set 0. Retrieve the underlying array type. */
1538 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1539 gnu_old_type
1540 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1541 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1542 gnu_new_type
1543 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1544
1545 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1546 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1547 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1548 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1549
1550 switch (op)
1551 {
1552 case ALIAS_SET_COPY:
1553 /* The alias set shouldn't be copied between array types with different
1554 aliasing settings because this can break the aliasing relationship
1555 between the array type and its element type. */
9abe8b74 1556 if (flag_checking || flag_strict_aliasing)
842d4ee2
EB
1557 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1558 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1559 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1560 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1561
1562 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1563 break;
1564
1565 case ALIAS_SET_SUBSET:
1566 case ALIAS_SET_SUPERSET:
1567 {
1568 alias_set_type old_set = get_alias_set (gnu_old_type);
1569 alias_set_type new_set = get_alias_set (gnu_new_type);
1570
1571 /* Do nothing if the alias sets conflict. This ensures that we
1572 never call record_alias_subset several times for the same pair
1573 or at all for alias set 0. */
1574 if (!alias_sets_conflict_p (old_set, new_set))
1575 {
1576 if (op == ALIAS_SET_SUBSET)
1577 record_alias_subset (old_set, new_set);
1578 else
1579 record_alias_subset (new_set, old_set);
1580 }
1581 }
1582 break;
1583
1584 default:
1585 gcc_unreachable ();
1586 }
1587
1588 record_component_aliases (gnu_new_type);
1589}
1590\f
1aeb40dd 1591/* Record TYPE as a builtin type for Ada. NAME is the name of the type.
c1a569ef 1592 ARTIFICIAL_P is true if the type was generated by the compiler. */
a1ab4c31
AC
1593
1594void
1aeb40dd 1595record_builtin_type (const char *name, tree type, bool artificial_p)
a1ab4c31 1596{
c172df28
AH
1597 tree type_decl = build_decl (input_location,
1598 TYPE_DECL, get_identifier (name), type);
1aeb40dd 1599 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 1600 TYPE_ARTIFICIAL (type) = artificial_p;
10069d53 1601 gnat_pushdecl (type_decl, Empty);
a1ab4c31 1602
10069d53
EB
1603 if (debug_hooks->type_decl)
1604 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
1605}
1606\f
825da0d2
EB
1607/* Finish constructing the character type CHAR_TYPE.
1608
1609 In Ada character types are enumeration types and, as a consequence, are
1610 represented in the front-end by integral types holding the positions of
1611 the enumeration values as defined by the language, which means that the
1612 integral types are unsigned.
1613
1614 Unfortunately the signedness of 'char' in C is implementation-defined
1615 and GCC even has the option -fsigned-char to toggle it at run time.
1616 Since GNAT's philosophy is to be compatible with C by default, to wit
1617 Interfaces.C.char is defined as a mere copy of Character, we may need
1618 to declare character types as signed types in GENERIC and generate the
1619 necessary adjustments to make them behave as unsigned types.
1620
1621 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1622 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1623 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1624 types. The idea is to ensure that the bit pattern contained in the
1625 Esize'd objects is not changed, even though the numerical value will
1626 be interpreted differently depending on the signedness.
1627
1628 For character types, the bounds are implicit and, therefore, need to
1629 be adjusted. Morever, the debug info needs the unsigned version. */
1630
1631void
1632finish_character_type (tree char_type)
1633{
1634 if (TYPE_UNSIGNED (char_type))
1635 return;
1636
7005800c
EB
1637 /* Make a copy of a generic unsigned version since we'll modify it. */
1638 tree unsigned_char_type
1639 = (char_type == char_type_node
1640 ? unsigned_char_type_node
1641 : copy_type (gnat_unsigned_type_for (char_type)));
825da0d2
EB
1642
1643 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1644 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1645 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1646
1647 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1648 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1649 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1650}
1651
e3edbd56
EB
1652/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1653 finish constructing the record type as a fat pointer type. */
1654
1655void
1656finish_fat_pointer_type (tree record_type, tree field_list)
1657{
1658 /* Make sure we can put it into a register. */
5da8c011 1659 if (STRICT_ALIGNMENT)
fe37c7af 1660 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
e3edbd56
EB
1661
1662 /* Show what it really is. */
1663 TYPE_FAT_POINTER_P (record_type) = 1;
1664
1665 /* Do not emit debug info for it since the types of its fields may still be
1666 incomplete at this point. */
1667 finish_record_type (record_type, field_list, 0, false);
1668
1669 /* Force type_contains_placeholder_p to return true on it. Although the
1670 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1671 type but the representation of the unconstrained array. */
1672 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1673}
1674
032d1b71 1675/* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
a1ab4c31
AC
1676 finish constructing the record or union type. If REP_LEVEL is zero, this
1677 record has no representation clause and so will be entirely laid out here.
1678 If REP_LEVEL is one, this record has a representation clause and has been
1679 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1680 this record is derived from a parent record and thus inherits its layout;
032d1b71 1681 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
afc737f0 1682 additional debug info needs to be output for this type. */
a1ab4c31
AC
1683
1684void
032d1b71
EB
1685finish_record_type (tree record_type, tree field_list, int rep_level,
1686 bool debug_info_p)
a1ab4c31
AC
1687{
1688 enum tree_code code = TREE_CODE (record_type);
9dba4b55 1689 tree name = TYPE_IDENTIFIER (record_type);
a1ab4c31
AC
1690 tree ada_size = bitsize_zero_node;
1691 tree size = bitsize_zero_node;
1692 bool had_size = TYPE_SIZE (record_type) != 0;
1693 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1694 bool had_align = TYPE_ALIGN (record_type) != 0;
1695 tree field;
1696
032d1b71 1697 TYPE_FIELDS (record_type) = field_list;
a1ab4c31 1698
10069d53
EB
1699 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1700 generate debug info and have a parallel type. */
10069d53 1701 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
1702
1703 /* Globally initialize the record first. If this is a rep'ed record,
1704 that just means some initializations; otherwise, layout the record. */
1705 if (rep_level > 0)
1706 {
fe37c7af
MM
1707 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1708 TYPE_ALIGN (record_type)));
a1ab4c31
AC
1709
1710 if (!had_size_unit)
1711 TYPE_SIZE_UNIT (record_type) = size_zero_node;
b1fa9126 1712
a1ab4c31
AC
1713 if (!had_size)
1714 TYPE_SIZE (record_type) = bitsize_zero_node;
1715
1716 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1717 out just like a UNION_TYPE, since the size will be fixed. */
1718 else if (code == QUAL_UNION_TYPE)
1719 code = UNION_TYPE;
1720 }
1721 else
1722 {
1723 /* Ensure there isn't a size already set. There can be in an error
1724 case where there is a rep clause but all fields have errors and
1725 no longer have a position. */
1726 TYPE_SIZE (record_type) = 0;
bb358f1c
EB
1727
1728 /* Ensure we use the traditional GCC layout for bitfields when we need
1729 to pack the record type or have a representation clause. The other
1730 possible layout (Microsoft C compiler), if available, would prevent
1731 efficient packing in almost all cases. */
1732#ifdef TARGET_MS_BITFIELD_LAYOUT
1733 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1734 decl_attributes (&record_type,
1735 tree_cons (get_identifier ("gcc_struct"),
1736 NULL_TREE, NULL_TREE),
1737 ATTR_FLAG_TYPE_IN_PLACE);
1738#endif
1739
a1ab4c31
AC
1740 layout_type (record_type);
1741 }
1742
1743 /* At this point, the position and size of each field is known. It was
1744 either set before entry by a rep clause, or by laying out the type above.
1745
1746 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1747 to compute the Ada size; the GCC size and alignment (for rep'ed records
1748 that are not padding types); and the mode (for rep'ed records). We also
1749 clear the DECL_BIT_FIELD indication for the cases we know have not been
1750 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1751
1752 if (code == QUAL_UNION_TYPE)
032d1b71 1753 field_list = nreverse (field_list);
a1ab4c31 1754
910ad8de 1755 for (field = field_list; field; field = DECL_CHAIN (field))
a1ab4c31
AC
1756 {
1757 tree type = TREE_TYPE (field);
1758 tree pos = bit_position (field);
1759 tree this_size = DECL_SIZE (field);
1760 tree this_ada_size;
1761
e1e5852c 1762 if (RECORD_OR_UNION_TYPE_P (type)
315cff15 1763 && !TYPE_FAT_POINTER_P (type)
a1ab4c31
AC
1764 && !TYPE_CONTAINS_TEMPLATE_P (type)
1765 && TYPE_ADA_SIZE (type))
1766 this_ada_size = TYPE_ADA_SIZE (type);
1767 else
1768 this_ada_size = this_size;
1769
1770 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1771 if (DECL_BIT_FIELD (field)
1772 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1773 {
1774 unsigned int align = TYPE_ALIGN (type);
1775
1776 /* In the general case, type alignment is required. */
1777 if (value_factor_p (pos, align))
1778 {
1779 /* The enclosing record type must be sufficiently aligned.
1780 Otherwise, if no alignment was specified for it and it
1781 has been laid out already, bump its alignment to the
14ecca2e
EB
1782 desired one if this is compatible with its size and
1783 maximum alignment, if any. */
a1ab4c31
AC
1784 if (TYPE_ALIGN (record_type) >= align)
1785 {
fe37c7af 1786 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
a1ab4c31
AC
1787 DECL_BIT_FIELD (field) = 0;
1788 }
1789 else if (!had_align
1790 && rep_level == 0
14ecca2e
EB
1791 && value_factor_p (TYPE_SIZE (record_type), align)
1792 && (!TYPE_MAX_ALIGN (record_type)
1793 || TYPE_MAX_ALIGN (record_type) >= align))
a1ab4c31 1794 {
fe37c7af
MM
1795 SET_TYPE_ALIGN (record_type, align);
1796 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
a1ab4c31
AC
1797 DECL_BIT_FIELD (field) = 0;
1798 }
1799 }
1800
1801 /* In the non-strict alignment case, only byte alignment is. */
1802 if (!STRICT_ALIGNMENT
1803 && DECL_BIT_FIELD (field)
1804 && value_factor_p (pos, BITS_PER_UNIT))
1805 DECL_BIT_FIELD (field) = 0;
1806 }
1807
c1abd261
EB
1808 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1809 field is technically not addressable. Except that it can actually
1810 be addressed if it is BLKmode and happens to be properly aligned. */
1811 if (DECL_BIT_FIELD (field)
1812 && !(DECL_MODE (field) == BLKmode
1813 && value_factor_p (pos, BITS_PER_UNIT)))
1814 DECL_NONADDRESSABLE_P (field) = 1;
a1ab4c31
AC
1815
1816 /* A type must be as aligned as its most aligned field that is not
1817 a bit-field. But this is already enforced by layout_type. */
1818 if (rep_level > 0 && !DECL_BIT_FIELD (field))
fe37c7af
MM
1819 SET_TYPE_ALIGN (record_type,
1820 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
a1ab4c31
AC
1821
1822 switch (code)
1823 {
1824 case UNION_TYPE:
1825 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1826 size = size_binop (MAX_EXPR, size, this_size);
1827 break;
1828
1829 case QUAL_UNION_TYPE:
1830 ada_size
1831 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1832 this_ada_size, ada_size);
1833 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1834 this_size, size);
1835 break;
1836
1837 case RECORD_TYPE:
1838 /* Since we know here that all fields are sorted in order of
1839 increasing bit position, the size of the record is one
1840 higher than the ending bit of the last field processed
1841 unless we have a rep clause, since in that case we might
1842 have a field outside a QUAL_UNION_TYPE that has a higher ending
1843 position. So use a MAX in that case. Also, if this field is a
1844 QUAL_UNION_TYPE, we need to take into account the previous size in
1845 the case of empty variants. */
1846 ada_size
1847 = merge_sizes (ada_size, pos, this_ada_size,
1848 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1849 size
1850 = merge_sizes (size, pos, this_size,
1851 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1852 break;
1853
1854 default:
1855 gcc_unreachable ();
1856 }
1857 }
1858
1859 if (code == QUAL_UNION_TYPE)
032d1b71 1860 nreverse (field_list);
a1ab4c31
AC
1861
1862 if (rep_level < 2)
1863 {
1864 /* If this is a padding record, we never want to make the size smaller
1865 than what was specified in it, if any. */
315cff15 1866 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
a1ab4c31
AC
1867 size = TYPE_SIZE (record_type);
1868
1869 /* Now set any of the values we've just computed that apply. */
315cff15 1870 if (!TYPE_FAT_POINTER_P (record_type)
a1ab4c31
AC
1871 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1872 SET_TYPE_ADA_SIZE (record_type, ada_size);
1873
1874 if (rep_level > 0)
1875 {
1876 tree size_unit = had_size_unit
1877 ? TYPE_SIZE_UNIT (record_type)
1878 : convert (sizetype,
1879 size_binop (CEIL_DIV_EXPR, size,
1880 bitsize_unit_node));
1881 unsigned int align = TYPE_ALIGN (record_type);
1882
1883 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1884 TYPE_SIZE_UNIT (record_type)
1885 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1886
1887 compute_record_mode (record_type);
1888 }
1889 }
1890
14ecca2e
EB
1891 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1892 TYPE_MAX_ALIGN (record_type) = 0;
1893
032d1b71 1894 if (debug_info_p)
a1ab4c31
AC
1895 rest_of_record_type_compilation (record_type);
1896}
1897
24d4b3d5
AC
1898/* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1899 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1900 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1901 moment TYPE will get a context. */
a5695aa2
EB
1902
1903void
1904add_parallel_type (tree type, tree parallel_type)
1905{
1906 tree decl = TYPE_STUB_DECL (type);
1907
1908 while (DECL_PARALLEL_TYPE (decl))
1909 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1910
1911 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
24d4b3d5
AC
1912
1913 /* If PARALLEL_TYPE already has a context, we are done. */
7c775aca 1914 if (TYPE_CONTEXT (parallel_type))
24d4b3d5
AC
1915 return;
1916
7c775aca
EB
1917 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1918 it to PARALLEL_TYPE. */
1919 if (TYPE_CONTEXT (type))
24d4b3d5
AC
1920 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1921
7c775aca
EB
1922 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1923 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1924 so we have nothing to do in this case. */
a5695aa2
EB
1925}
1926
1927/* Return true if TYPE has a parallel type. */
1928
1929static bool
1930has_parallel_type (tree type)
1931{
1932 tree decl = TYPE_STUB_DECL (type);
1933
1934 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1935}
1936
afc737f0
EB
1937/* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1938 associated with it. It need not be invoked directly in most cases as
1939 finish_record_type takes care of doing so. */
a1ab4c31
AC
1940
1941void
1942rest_of_record_type_compilation (tree record_type)
1943{
a1ab4c31 1944 bool var_size = false;
fb88e1dd 1945 tree field;
a1ab4c31 1946
fb88e1dd
EB
1947 /* If this is a padded type, the bulk of the debug info has already been
1948 generated for the field's type. */
1949 if (TYPE_IS_PADDING_P (record_type))
1950 return;
1951
a5695aa2
EB
1952 /* If the type already has a parallel type (XVS type), then we're done. */
1953 if (has_parallel_type (record_type))
1954 return;
1955
fb88e1dd 1956 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
1957 {
1958 /* We need to make an XVE/XVU record if any field has variable size,
1959 whether or not the record does. For example, if we have a union,
1960 it may be that all fields, rounded up to the alignment, have the
1961 same size, in which case we'll use that size. But the debug
1962 output routines (except Dwarf2) won't be able to output the fields,
1963 so we need to make the special record. */
1964 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1965 /* If a field has a non-constant qualifier, the record will have
1966 variable size too. */
fb88e1dd 1967 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
a1ab4c31
AC
1968 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1969 {
1970 var_size = true;
1971 break;
1972 }
1973 }
1974
fb88e1dd
EB
1975 /* If this record type is of variable size, make a parallel record type that
1976 will tell the debugger how the former is laid out (see exp_dbug.ads). */
986ccd21 1977 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
a1ab4c31
AC
1978 {
1979 tree new_record_type
1980 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1981 ? UNION_TYPE : TREE_CODE (record_type));
9dba4b55 1982 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
a1ab4c31 1983 tree last_pos = bitsize_zero_node;
0fb2335d 1984 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 1985
0fb2335d
EB
1986 new_name
1987 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1988 ? "XVU" : "XVE");
1989 TYPE_NAME (new_record_type) = new_name;
fe37c7af 1990 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
a1ab4c31 1991 TYPE_STUB_DECL (new_record_type)
0fb2335d 1992 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
1993 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1994 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
396a2ee2 1995 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
a1ab4c31
AC
1996 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1997 TYPE_SIZE_UNIT (new_record_type)
1998 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1999
5c475ba9
EB
2000 /* Now scan all the fields, replacing each field with a new field
2001 corresponding to the new encoding. */
a1ab4c31 2002 for (old_field = TYPE_FIELDS (record_type); old_field;
910ad8de 2003 old_field = DECL_CHAIN (old_field))
a1ab4c31
AC
2004 {
2005 tree field_type = TREE_TYPE (old_field);
2006 tree field_name = DECL_NAME (old_field);
a1ab4c31 2007 tree curpos = bit_position (old_field);
5c475ba9 2008 tree pos, new_field;
a1ab4c31
AC
2009 bool var = false;
2010 unsigned int align = 0;
a1ab4c31 2011
5c475ba9
EB
2012 /* We're going to do some pattern matching below so remove as many
2013 conversions as possible. */
2014 curpos = remove_conversions (curpos, true);
a1ab4c31 2015
5c475ba9 2016 /* See how the position was modified from the last position.
a1ab4c31 2017
5c475ba9
EB
2018 There are two basic cases we support: a value was added
2019 to the last position or the last position was rounded to
2020 a boundary and they something was added. Check for the
2021 first case first. If not, see if there is any evidence
2022 of rounding. If so, round the last position and retry.
a1ab4c31 2023
5c475ba9 2024 If this is a union, the position can be taken as zero. */
a1ab4c31 2025 if (TREE_CODE (new_record_type) == UNION_TYPE)
5c475ba9 2026 pos = bitsize_zero_node;
a1ab4c31
AC
2027 else
2028 pos = compute_related_constant (curpos, last_pos);
2029
5c475ba9
EB
2030 if (!pos
2031 && TREE_CODE (curpos) == MULT_EXPR
cc269bb6 2032 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
a1ab4c31
AC
2033 {
2034 tree offset = TREE_OPERAND (curpos, 0);
ae7e9ddd 2035 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
5c475ba9
EB
2036 align = scale_by_factor_of (offset, align);
2037 last_pos = round_up (last_pos, align);
2038 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 2039 }
5c475ba9
EB
2040 else if (!pos
2041 && TREE_CODE (curpos) == PLUS_EXPR
cc269bb6 2042 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
a1ab4c31 2043 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
5a36c51b
RS
2044 && tree_fits_uhwi_p
2045 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
a1ab4c31 2046 {
5c475ba9
EB
2047 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2048 unsigned HOST_WIDE_INT addend
ae7e9ddd 2049 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
a1ab4c31 2050 align
ae7e9ddd 2051 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
5c475ba9
EB
2052 align = scale_by_factor_of (offset, align);
2053 align = MIN (align, addend & -addend);
2054 last_pos = round_up (last_pos, align);
2055 pos = compute_related_constant (curpos, last_pos);
a1ab4c31 2056 }
5c475ba9 2057 else if (potential_alignment_gap (prev_old_field, old_field, pos))
a1ab4c31
AC
2058 {
2059 align = TYPE_ALIGN (field_type);
5c475ba9
EB
2060 last_pos = round_up (last_pos, align);
2061 pos = compute_related_constant (curpos, last_pos);
a1ab4c31
AC
2062 }
2063
2064 /* If we can't compute a position, set it to zero.
2065
5c475ba9
EB
2066 ??? We really should abort here, but it's too much work
2067 to get this correct for all cases. */
a1ab4c31
AC
2068 if (!pos)
2069 pos = bitsize_zero_node;
2070
2071 /* See if this type is variable-sized and make a pointer type
2072 and indicate the indirection if so. Beware that the debug
2073 back-end may adjust the position computed above according
2074 to the alignment of the field type, i.e. the pointer type
2075 in this case, if we don't preventively counter that. */
2076 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2077 {
2078 field_type = build_pointer_type (field_type);
2079 if (align != 0 && TYPE_ALIGN (field_type) > align)
2080 {
afc737f0 2081 field_type = copy_type (field_type);
fe37c7af 2082 SET_TYPE_ALIGN (field_type, align);
a1ab4c31
AC
2083 }
2084 var = true;
2085 }
2086
2087 /* Make a new field name, if necessary. */
2088 if (var || align != 0)
2089 {
2090 char suffix[16];
2091
2092 if (align != 0)
2093 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2094 align / BITS_PER_UNIT);
2095 else
2096 strcpy (suffix, "XVL");
2097
0fb2335d 2098 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
2099 }
2100
da01bfee
EB
2101 new_field
2102 = create_field_decl (field_name, field_type, new_record_type,
2103 DECL_SIZE (old_field), pos, 0, 0);
910ad8de 2104 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
a1ab4c31
AC
2105 TYPE_FIELDS (new_record_type) = new_field;
2106
2107 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2108 zero. The only time it's not the last field of the record
2109 is when there are other components at fixed positions after
2110 it (meaning there was a rep clause for every field) and we
2111 want to be able to encode them. */
2112 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2113 (TREE_CODE (TREE_TYPE (old_field))
2114 == QUAL_UNION_TYPE)
2115 ? bitsize_zero_node
2116 : DECL_SIZE (old_field));
2117 prev_old_field = old_field;
2118 }
2119
fb88e1dd 2120 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
a1ab4c31 2121
a5695aa2 2122 add_parallel_type (record_type, new_record_type);
a1ab4c31 2123 }
a1ab4c31
AC
2124}
2125
a1ab4c31 2126/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
2127 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2128 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2129 replace a value of zero with the old size. If HAS_REP is true, we take the
2130 MAX of the end position of this field with LAST_SIZE. In all other cases,
2131 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
2132
2133static tree
2134merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2135 bool has_rep)
2136{
2137 tree type = TREE_TYPE (last_size);
c6bd4220 2138 tree new_size;
a1ab4c31
AC
2139
2140 if (!special || TREE_CODE (size) != COND_EXPR)
2141 {
c6bd4220 2142 new_size = size_binop (PLUS_EXPR, first_bit, size);
a1ab4c31 2143 if (has_rep)
c6bd4220 2144 new_size = size_binop (MAX_EXPR, last_size, new_size);
a1ab4c31
AC
2145 }
2146
2147 else
c6bd4220
EB
2148 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2149 integer_zerop (TREE_OPERAND (size, 1))
2150 ? last_size : merge_sizes (last_size, first_bit,
2151 TREE_OPERAND (size, 1),
2152 1, has_rep),
2153 integer_zerop (TREE_OPERAND (size, 2))
2154 ? last_size : merge_sizes (last_size, first_bit,
2155 TREE_OPERAND (size, 2),
2156 1, has_rep));
a1ab4c31
AC
2157
2158 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2159 when fed through substitute_in_expr) into thinking that a constant
2160 size is not constant. */
c6bd4220
EB
2161 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2162 new_size = TREE_OPERAND (new_size, 0);
a1ab4c31 2163
c6bd4220 2164 return new_size;
a1ab4c31
AC
2165}
2166
2167/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2168 related by the addition of a constant. Return that constant if so. */
2169
2170static tree
2171compute_related_constant (tree op0, tree op1)
2172{
2173 tree op0_var, op1_var;
2174 tree op0_con = split_plus (op0, &op0_var);
2175 tree op1_con = split_plus (op1, &op1_var);
2176 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2177
2178 if (operand_equal_p (op0_var, op1_var, 0))
2179 return result;
2180 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2181 return result;
2182 else
2183 return 0;
2184}
2185
2186/* Utility function of above to split a tree OP which may be a sum, into a
2187 constant part, which is returned, and a variable part, which is stored
2188 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2189 bitsizetype. */
2190
2191static tree
2192split_plus (tree in, tree *pvar)
2193{
722356ce
EB
2194 /* Strip conversions in order to ease the tree traversal and maximize the
2195 potential for constant or plus/minus discovery. We need to be careful
a1ab4c31
AC
2196 to always return and set *pvar to bitsizetype trees, but it's worth
2197 the effort. */
722356ce 2198 in = remove_conversions (in, false);
a1ab4c31
AC
2199
2200 *pvar = convert (bitsizetype, in);
2201
2202 if (TREE_CODE (in) == INTEGER_CST)
2203 {
2204 *pvar = bitsize_zero_node;
2205 return convert (bitsizetype, in);
2206 }
2207 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2208 {
2209 tree lhs_var, rhs_var;
2210 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2211 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2212
2213 if (lhs_var == TREE_OPERAND (in, 0)
2214 && rhs_var == TREE_OPERAND (in, 1))
2215 return bitsize_zero_node;
2216
2217 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2218 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2219 }
2220 else
2221 return bitsize_zero_node;
2222}
2223\f
d47d0a8d
EB
2224/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2225 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2226 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2227 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2228 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2229 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2230 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2231 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2232 invisible reference. */
a1ab4c31
AC
2233
2234tree
2235create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
d47d0a8d
EB
2236 bool return_unconstrained_p, bool return_by_direct_ref_p,
2237 bool return_by_invisi_ref_p)
a1ab4c31 2238{
e5b00edf
NF
2239 /* A list of the data type nodes of the subprogram formal parameters.
2240 This list is generated by traversing the input list of PARM_DECL
2241 nodes. */
9771b263 2242 vec<tree, va_gc> *param_type_list = NULL;
d47d0a8d 2243 tree t, type;
a1ab4c31 2244
910ad8de 2245 for (t = param_decl_list; t; t = DECL_CHAIN (t))
9771b263 2246 vec_safe_push (param_type_list, TREE_TYPE (t));
a1ab4c31 2247
e5b00edf 2248 type = build_function_type_vec (return_type, param_type_list);
a1ab4c31 2249
d47d0a8d
EB
2250 /* TYPE may have been shared since GCC hashes types. If it has a different
2251 CICO_LIST, make a copy. Likewise for the various flags. */
523e82a7
EB
2252 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2253 return_by_direct_ref_p, return_by_invisi_ref_p))
d47d0a8d
EB
2254 {
2255 type = copy_type (type);
2256 TYPE_CI_CO_LIST (type) = cico_list;
2257 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2258 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2259 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2260 }
a1ab4c31 2261
a1ab4c31
AC
2262 return type;
2263}
2264\f
2265/* Return a copy of TYPE but safe to modify in any way. */
2266
2267tree
2268copy_type (tree type)
2269{
c6bd4220 2270 tree new_type = copy_node (type);
a1ab4c31 2271
90dcfecb
EB
2272 /* Unshare the language-specific data. */
2273 if (TYPE_LANG_SPECIFIC (type))
2274 {
2275 TYPE_LANG_SPECIFIC (new_type) = NULL;
2276 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2277 }
2278
2279 /* And the contents of the language-specific slot if needed. */
2280 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2281 && TYPE_RM_VALUES (type))
2282 {
2283 TYPE_RM_VALUES (new_type) = NULL_TREE;
2284 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2285 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2286 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2287 }
2288
a1ab4c31
AC
2289 /* copy_node clears this field instead of copying it, because it is
2290 aliased with TREE_CHAIN. */
c6bd4220 2291 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
a1ab4c31 2292
afc737f0
EB
2293 TYPE_POINTER_TO (new_type) = NULL_TREE;
2294 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
c6bd4220 2295 TYPE_MAIN_VARIANT (new_type) = new_type;
afc737f0 2296 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
4b7bd260 2297 TYPE_CANONICAL (new_type) = new_type;
a1ab4c31 2298
c6bd4220 2299 return new_type;
a1ab4c31
AC
2300}
2301\f
c1abd261
EB
2302/* Return a subtype of sizetype with range MIN to MAX and whose
2303 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2304 of the associated TYPE_DECL. */
a1ab4c31
AC
2305
2306tree
2307create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2308{
2309 /* First build a type for the desired range. */
523e82a7 2310 tree type = build_nonshared_range_type (sizetype, min, max);
a1ab4c31 2311
523e82a7 2312 /* Then set the index type. */
a1ab4c31 2313 SET_TYPE_INDEX_TYPE (type, index);
74746d49 2314 create_type_decl (NULL_TREE, type, true, false, gnat_node);
c1abd261 2315
a1ab4c31
AC
2316 return type;
2317}
84fb43a1
EB
2318
2319/* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2320 sizetype is used. */
2321
2322tree
2323create_range_type (tree type, tree min, tree max)
2324{
2325 tree range_type;
2326
7c775aca 2327 if (!type)
84fb43a1
EB
2328 type = sizetype;
2329
2330 /* First build a type with the base range. */
523e82a7
EB
2331 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2332 TYPE_MAX_VALUE (type));
84fb43a1
EB
2333
2334 /* Then set the actual range. */
1eb58520
AC
2335 SET_TYPE_RM_MIN_VALUE (range_type, min);
2336 SET_TYPE_RM_MAX_VALUE (range_type, max);
84fb43a1
EB
2337
2338 return range_type;
2339}
a1ab4c31 2340\f
6249559b
EB
2341/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2342 NAME gives the name of the type to be used in the declaration. */
10069d53
EB
2343
2344tree
6249559b 2345create_type_stub_decl (tree name, tree type)
10069d53 2346{
6249559b 2347 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
10069d53 2348 DECL_ARTIFICIAL (type_decl) = 1;
bc712852 2349 TYPE_ARTIFICIAL (type) = 1;
10069d53
EB
2350 return type_decl;
2351}
2352
6249559b
EB
2353/* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2354 used in the declaration. ARTIFICIAL_P is true if the declaration was
2355 generated by the compiler. DEBUG_INFO_P is true if we need to write
2356 debug information about this type. GNAT_NODE is used for the position
2357 of the decl. */
a1ab4c31
AC
2358
2359tree
6249559b
EB
2360create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2361 Node_Id gnat_node)
a1ab4c31 2362{
a1ab4c31 2363 enum tree_code code = TREE_CODE (type);
6249559b
EB
2364 bool is_named
2365 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
10069d53 2366 tree type_decl;
a1ab4c31 2367
10069d53
EB
2368 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2369 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 2370
10069d53
EB
2371 /* If the type hasn't been named yet, we're naming it; preserve an existing
2372 TYPE_STUB_DECL that has been attached to it for some purpose. */
6249559b 2373 if (!is_named && TYPE_STUB_DECL (type))
10069d53
EB
2374 {
2375 type_decl = TYPE_STUB_DECL (type);
6249559b 2376 DECL_NAME (type_decl) = name;
10069d53
EB
2377 }
2378 else
6249559b 2379 type_decl = build_decl (input_location, TYPE_DECL, name, type);
a1ab4c31 2380
10069d53 2381 DECL_ARTIFICIAL (type_decl) = artificial_p;
bc712852 2382 TYPE_ARTIFICIAL (type) = artificial_p;
58c8f770
EB
2383
2384 /* Add this decl to the current binding level. */
10069d53 2385 gnat_pushdecl (type_decl, gnat_node);
58c8f770 2386
aef308d0
PMR
2387 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2388 causes the name to be also viewed as a "tag" by the debug back-end, with
2389 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2390 types in DWARF.
2391
2392 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2393 from multiple contexts, and "type_decl" references a copy of it: in such a
2394 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2395 with the mechanism above. */
6249559b 2396 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
10069d53
EB
2397 TYPE_STUB_DECL (type) = type_decl;
2398
50741117
EB
2399 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2400 back-end doesn't support, and for others if we don't need to. */
a1ab4c31
AC
2401 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2402 DECL_IGNORED_P (type_decl) = 1;
a1ab4c31
AC
2403
2404 return type_decl;
2405}
10069d53 2406\f
a1ab4c31
AC
2407/* Return a VAR_DECL or CONST_DECL node.
2408
6249559b
EB
2409 NAME gives the name of the variable. ASM_NAME is its assembler name
2410 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
a1ab4c31
AC
2411 the GCC tree for an optional initial expression; NULL_TREE if none.
2412
2413 CONST_FLAG is true if this variable is constant, in which case we might
2414 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2415
2416 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2417 definition to be made visible outside of the current compilation unit, for
2418 instance variable definitions in a package specification.
2419
1e17ef87 2420 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
2421 opposed to a definition: no storage is to be allocated for the variable).
2422
2056c5ed
EB
2423 STATIC_FLAG is only relevant when not at top level and indicates whether
2424 to always allocate storage to the variable.
2425
2426 VOLATILE_FLAG is true if this variable is declared as volatile.
a1ab4c31 2427
c1a569ef
EB
2428 ARTIFICIAL_P is true if the variable was generated by the compiler.
2429
2430 DEBUG_INFO_P is true if we need to write debug information for it.
2431
2ade427a
EB
2432 ATTR_LIST is the list of attributes to be attached to the variable.
2433
a1ab4c31
AC
2434 GNAT_NODE is used for the position of the decl. */
2435
2436tree
6249559b
EB
2437create_var_decl (tree name, tree asm_name, tree type, tree init,
2438 bool const_flag, bool public_flag, bool extern_flag,
2056c5ed
EB
2439 bool static_flag, bool volatile_flag, bool artificial_p,
2440 bool debug_info_p, struct attrib *attr_list,
2441 Node_Id gnat_node, bool const_decl_allowed_p)
a1ab4c31 2442{
5fe48b3d
EB
2443 /* Whether the object has static storage duration, either explicitly or by
2444 virtue of being declared at the global level. */
2445 const bool static_storage = static_flag || global_bindings_p ();
2446
2447 /* Whether the initializer is constant: for an external object or an object
2448 with static storage duration, we check that the initializer is a valid
2449 constant expression for initializing a static variable; otherwise, we
2450 only check that it is constant. */
2451 const bool init_const
6249559b
EB
2452 = (init
2453 && gnat_types_compatible_p (type, TREE_TYPE (init))
5fe48b3d 2454 && (extern_flag || static_storage
6249559b 2455 ? initializer_constant_valid_p (init, TREE_TYPE (init))
5fe48b3d 2456 != NULL_TREE
6249559b 2457 : TREE_CONSTANT (init)));
a1ab4c31
AC
2458
2459 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
5fe48b3d 2460 case the initializer may be used in lieu of the DECL node (as done in
a1ab4c31 2461 Identifier_to_gnu). This is useful to prevent the need of elaboration
5fe48b3d
EB
2462 code when an identifier for which such a DECL is made is in turn used
2463 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2464 but extra constraints apply to this choice (see below) and they are not
2465 relevant to the distinction we wish to make. */
2466 const bool constant_p = const_flag && init_const;
a1ab4c31
AC
2467
2468 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2469 and may be used for scalars in general but not for aggregates. */
2470 tree var_decl
c172df28
AH
2471 = build_decl (input_location,
2472 (constant_p && const_decl_allowed_p
a1ab4c31 2473 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
6249559b 2474 name, type);
a1ab4c31 2475
93e708f9
EB
2476 /* Detect constants created by the front-end to hold 'reference to function
2477 calls for stabilization purposes. This is needed for renaming. */
2478 if (const_flag && init && POINTER_TYPE_P (type))
2479 {
2480 tree inner = init;
2481 if (TREE_CODE (inner) == COMPOUND_EXPR)
2482 inner = TREE_OPERAND (inner, 1);
2483 inner = remove_conversions (inner, true);
2484 if (TREE_CODE (inner) == ADDR_EXPR
2485 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2486 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2487 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2488 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2489 DECL_RETURN_VALUE_P (var_decl) = 1;
2490 }
2491
a1ab4c31
AC
2492 /* If this is external, throw away any initializations (they will be done
2493 elsewhere) unless this is a constant for which we would like to remain
2494 able to get the initializer. If we are defining a global here, leave a
2495 constant initialization and save any variable elaborations for the
2496 elaboration routine. If we are just annotating types, throw away the
2497 initialization if it isn't a constant. */
2498 if ((extern_flag && !constant_p)
6249559b
EB
2499 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2500 init = NULL_TREE;
a1ab4c31 2501
5fe48b3d
EB
2502 /* At the global level, a non-constant initializer generates elaboration
2503 statements. Check that such statements are allowed, that is to say,
2504 not violating a No_Elaboration_Code restriction. */
6249559b 2505 if (init && !init_const && global_bindings_p ())
a1ab4c31 2506 Check_Elaboration_Code_Allowed (gnat_node);
3b9e8343 2507
c1a569ef 2508 /* Attach the initializer, if any. */
6249559b 2509 DECL_INITIAL (var_decl) = init;
c1a569ef
EB
2510
2511 /* Directly set some flags. */
2512 DECL_ARTIFICIAL (var_decl) = artificial_p;
8b7b0c36 2513 DECL_EXTERNAL (var_decl) = extern_flag;
a1ab4c31 2514
ffe9a0a7
EB
2515 TREE_CONSTANT (var_decl) = constant_p;
2516 TREE_READONLY (var_decl) = const_flag;
2517
2518 /* The object is public if it is external or if it is declared public
2519 and has static storage duration. */
2520 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2521
2522 /* We need to allocate static storage for an object with static storage
2523 duration if it isn't external. */
2524 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2525
2526 TREE_SIDE_EFFECTS (var_decl)
2527 = TREE_THIS_VOLATILE (var_decl)
2528 = TYPE_VOLATILE (type) | volatile_flag;
2529
2530 if (TREE_SIDE_EFFECTS (var_decl))
2531 TREE_ADDRESSABLE (var_decl) = 1;
2532
a1ab4c31
AC
2533 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2534 try to fiddle with DECL_COMMON. However, on platforms that don't
2535 support global BSS sections, uninitialized global variables would
2536 go in DATA instead, thus increasing the size of the executable. */
2537 if (!flag_no_common
2538 && TREE_CODE (var_decl) == VAR_DECL
3b9e8343 2539 && TREE_PUBLIC (var_decl)
a1ab4c31
AC
2540 && !have_global_bss_p ())
2541 DECL_COMMON (var_decl) = 1;
a1ab4c31 2542
c1a569ef
EB
2543 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2544 since we will create an associated variable. Likewise for an external
2545 constant whose initializer is not absolute, because this would mean a
2546 global relocation in a read-only section which runs afoul of the PE-COFF
2547 run-time relocation mechanism. */
2548 if (!debug_info_p
2549 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2550 || (extern_flag
2551 && constant_p
6249559b
EB
2552 && init
2553 && initializer_constant_valid_p (init, TREE_TYPE (init))
c1a569ef 2554 != null_pointer_node))
5225a138
EB
2555 DECL_IGNORED_P (var_decl) = 1;
2556
74746d49
EB
2557 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2558 if (TREE_CODE (var_decl) == VAR_DECL)
2559 process_attributes (&var_decl, &attr_list, true, gnat_node);
2560
2561 /* Add this decl to the current binding level. */
2562 gnat_pushdecl (var_decl, gnat_node);
2563
a22b794d 2564 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
a1ab4c31 2565 {
a22b794d
EB
2566 /* Let the target mangle the name if this isn't a verbatim asm. */
2567 if (*IDENTIFIER_POINTER (asm_name) != '*')
2568 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
74746d49 2569
a22b794d 2570 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
a1ab4c31 2571 }
a1ab4c31
AC
2572
2573 return var_decl;
2574}
2575\f
2576/* Return true if TYPE, an aggregate type, contains (or is) an array. */
2577
2578static bool
2579aggregate_type_contains_array_p (tree type)
2580{
2581 switch (TREE_CODE (type))
2582 {
2583 case RECORD_TYPE:
2584 case UNION_TYPE:
2585 case QUAL_UNION_TYPE:
2586 {
2587 tree field;
910ad8de 2588 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a1ab4c31
AC
2589 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2590 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2591 return true;
2592 return false;
2593 }
2594
2595 case ARRAY_TYPE:
2596 return true;
2597
2598 default:
2599 gcc_unreachable ();
2600 }
2601}
2602
6249559b
EB
2603/* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2604 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2605 is the specified size of the field. If POS is nonzero, it is the bit
2606 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2607 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
62f9f3ce
EB
2608 means we are allowed to take the address of the field; if it is negative,
2609 we should not make a bitfield, which is used by make_aligning_type. */
a1ab4c31
AC
2610
2611tree
6249559b
EB
2612create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2613 int packed, int addressable)
a1ab4c31 2614{
6249559b 2615 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
a1ab4c31
AC
2616
2617 DECL_CONTEXT (field_decl) = record_type;
6249559b 2618 TREE_READONLY (field_decl) = TYPE_READONLY (type);
a1ab4c31
AC
2619
2620 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2621 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2622 Likewise for an aggregate without specified position that contains an
2623 array, because in this case slices of variable length of this array
2624 must be handled by GCC and variable-sized objects need to be aligned
2625 to at least a byte boundary. */
6249559b 2626 if (packed && (TYPE_MODE (type) == BLKmode
a1ab4c31 2627 || (!pos
6249559b
EB
2628 && AGGREGATE_TYPE_P (type)
2629 && aggregate_type_contains_array_p (type))))
fe37c7af 2630 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
a1ab4c31
AC
2631
2632 /* If a size is specified, use it. Otherwise, if the record type is packed
2633 compute a size to use, which may differ from the object's natural size.
2634 We always set a size in this case to trigger the checks for bitfield
2635 creation below, which is typically required when no position has been
2636 specified. */
2637 if (size)
2638 size = convert (bitsizetype, size);
2639 else if (packed == 1)
2640 {
6249559b
EB
2641 size = rm_size (type);
2642 if (TYPE_MODE (type) == BLKmode)
62f9f3ce 2643 size = round_up (size, BITS_PER_UNIT);
a1ab4c31
AC
2644 }
2645
2646 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2647 specified for two reasons: first if the size differs from the natural
2648 size. Second, if the alignment is insufficient. There are a number of
2649 ways the latter can be true.
2650
2651 We never make a bitfield if the type of the field has a nonconstant size,
2652 because no such entity requiring bitfield operations should reach here.
2653
2654 We do *preventively* make a bitfield when there might be the need for it
2655 but we don't have all the necessary information to decide, as is the case
2656 of a field with no specified position in a packed record.
2657
2658 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2659 in layout_decl or finish_record_type to clear the bit_field indication if
2660 it is in fact not needed. */
2661 if (addressable >= 0
2662 && size
2663 && TREE_CODE (size) == INTEGER_CST
6249559b
EB
2664 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2665 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2666 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
a1ab4c31
AC
2667 || packed
2668 || (TYPE_ALIGN (record_type) != 0
6249559b 2669 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
a1ab4c31
AC
2670 {
2671 DECL_BIT_FIELD (field_decl) = 1;
2672 DECL_SIZE (field_decl) = size;
2673 if (!packed && !pos)
feec4372
EB
2674 {
2675 if (TYPE_ALIGN (record_type) != 0
6249559b 2676 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
fe37c7af 2677 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
feec4372 2678 else
fe37c7af 2679 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
feec4372 2680 }
a1ab4c31
AC
2681 }
2682
2683 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2684
2685 /* Bump the alignment if need be, either for bitfield/packing purposes or
2686 to satisfy the type requirements if no such consideration applies. When
2687 we get the alignment from the type, indicate if this is from an explicit
2688 user request, which prevents stor-layout from lowering it later on. */
2689 {
d9223014 2690 unsigned int bit_align
a1ab4c31 2691 = (DECL_BIT_FIELD (field_decl) ? 1
6249559b 2692 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
a1ab4c31
AC
2693
2694 if (bit_align > DECL_ALIGN (field_decl))
fe37c7af 2695 SET_DECL_ALIGN (field_decl, bit_align);
6249559b 2696 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
a1ab4c31 2697 {
fe37c7af 2698 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
6249559b 2699 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
a1ab4c31
AC
2700 }
2701 }
2702
2703 if (pos)
2704 {
2705 /* We need to pass in the alignment the DECL is known to have.
2706 This is the lowest-order bit set in POS, but no more than
2707 the alignment of the record, if one is specified. Note
2708 that an alignment of 0 is taken as infinite. */
2709 unsigned int known_align;
2710
cc269bb6 2711 if (tree_fits_uhwi_p (pos))
ae7e9ddd 2712 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
a1ab4c31
AC
2713 else
2714 known_align = BITS_PER_UNIT;
2715
2716 if (TYPE_ALIGN (record_type)
2717 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2718 known_align = TYPE_ALIGN (record_type);
2719
2720 layout_decl (field_decl, known_align);
2721 SET_DECL_OFFSET_ALIGN (field_decl,
cc269bb6 2722 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
a1ab4c31
AC
2723 : BITS_PER_UNIT);
2724 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2725 &DECL_FIELD_BIT_OFFSET (field_decl),
2726 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
2727 }
2728
2729 /* In addition to what our caller says, claim the field is addressable if we
2730 know that its type is not suitable.
2731
2732 The field may also be "technically" nonaddressable, meaning that even if
2733 we attempt to take the field's address we will actually get the address
2734 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2735 value we have at this point is not accurate enough, so we don't account
2736 for this here and let finish_record_type decide. */
6249559b 2737 if (!addressable && !type_for_nonaliased_component_p (type))
a1ab4c31
AC
2738 addressable = 1;
2739
2740 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2741
2742 return field_decl;
2743}
2744\f
6249559b
EB
2745/* Return a PARM_DECL node. NAME is the name of the parameter and TYPE is
2746 its type. READONLY is true if the parameter is readonly (either an In
2747 parameter or an address of a pass-by-ref parameter). */
a1ab4c31
AC
2748
2749tree
6249559b 2750create_param_decl (tree name, tree type, bool readonly)
a1ab4c31 2751{
6249559b 2752 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
a1ab4c31 2753
a8e05f92
EB
2754 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2755 can lead to various ABI violations. */
2756 if (targetm.calls.promote_prototypes (NULL_TREE)
6249559b
EB
2757 && INTEGRAL_TYPE_P (type)
2758 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
a1ab4c31
AC
2759 {
2760 /* We have to be careful about biased types here. Make a subtype
2761 of integer_type_node with the proper biasing. */
6249559b
EB
2762 if (TREE_CODE (type) == INTEGER_TYPE
2763 && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 2764 {
84fb43a1
EB
2765 tree subtype
2766 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
c1abd261
EB
2767 TREE_TYPE (subtype) = integer_type_node;
2768 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
6249559b
EB
2769 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2770 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2771 type = subtype;
a1ab4c31
AC
2772 }
2773 else
6249559b 2774 type = integer_type_node;
a1ab4c31
AC
2775 }
2776
6249559b 2777 DECL_ARG_TYPE (param_decl) = type;
a1ab4c31
AC
2778 TREE_READONLY (param_decl) = readonly;
2779 return param_decl;
2780}
2781\f
74746d49
EB
2782/* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2783 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2784 changed. GNAT_NODE is used for the position of error messages. */
a1ab4c31 2785
74746d49
EB
2786void
2787process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2788 Node_Id gnat_node)
a1ab4c31 2789{
74746d49
EB
2790 struct attrib *attr;
2791
2792 for (attr = *attr_list; attr; attr = attr->next)
2793 switch (attr->type)
a1ab4c31
AC
2794 {
2795 case ATTR_MACHINE_ATTRIBUTE:
74746d49
EB
2796 Sloc_to_locus (Sloc (gnat_node), &input_location);
2797 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2798 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
a1ab4c31
AC
2799 break;
2800
2801 case ATTR_LINK_ALIAS:
74746d49 2802 if (!DECL_EXTERNAL (*node))
a1ab4c31 2803 {
74746d49
EB
2804 TREE_STATIC (*node) = 1;
2805 assemble_alias (*node, attr->name);
a1ab4c31
AC
2806 }
2807 break;
2808
2809 case ATTR_WEAK_EXTERNAL:
2810 if (SUPPORTS_WEAK)
74746d49 2811 declare_weak (*node);
a1ab4c31
AC
2812 else
2813 post_error ("?weak declarations not supported on this target",
74746d49 2814 attr->error_point);
a1ab4c31
AC
2815 break;
2816
2817 case ATTR_LINK_SECTION:
677f3fa8 2818 if (targetm_common.have_named_sections)
a1ab4c31 2819 {
0ab75824 2820 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
74746d49 2821 DECL_COMMON (*node) = 0;
a1ab4c31
AC
2822 }
2823 else
2824 post_error ("?section attributes are not supported for this target",
74746d49 2825 attr->error_point);
a1ab4c31
AC
2826 break;
2827
2828 case ATTR_LINK_CONSTRUCTOR:
74746d49
EB
2829 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2830 TREE_USED (*node) = 1;
a1ab4c31
AC
2831 break;
2832
2833 case ATTR_LINK_DESTRUCTOR:
74746d49
EB
2834 DECL_STATIC_DESTRUCTOR (*node) = 1;
2835 TREE_USED (*node) = 1;
a1ab4c31 2836 break;
40a14772
TG
2837
2838 case ATTR_THREAD_LOCAL_STORAGE:
56363ffd 2839 set_decl_tls_model (*node, decl_default_tls_model (*node));
74746d49 2840 DECL_COMMON (*node) = 0;
40a14772 2841 break;
a1ab4c31 2842 }
74746d49
EB
2843
2844 *attr_list = NULL;
a1ab4c31 2845}
a1ab4c31
AC
2846
2847/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2848 a power of 2. */
2849
2850bool
2851value_factor_p (tree value, HOST_WIDE_INT factor)
2852{
cc269bb6 2853 if (tree_fits_uhwi_p (value))
ae7e9ddd 2854 return tree_to_uhwi (value) % factor == 0;
a1ab4c31
AC
2855
2856 if (TREE_CODE (value) == MULT_EXPR)
2857 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2858 || value_factor_p (TREE_OPERAND (value, 1), factor));
2859
2860 return false;
2861}
2862
e8fa3dcd
PMR
2863/* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2864 from the parameter association for the instantiation of a generic. We do
2865 not want to emit source location for them: the code generated for their
2866 initialization is likely to disturb debugging. */
2867
2868bool
2869renaming_from_generic_instantiation_p (Node_Id gnat_node)
2870{
2871 if (Nkind (gnat_node) != N_Defining_Identifier
2872 || !IN (Ekind (gnat_node), Object_Kind)
2873 || Comes_From_Source (gnat_node)
2874 || !Present (Renamed_Object (gnat_node)))
2875 return false;
2876
2877 /* Get the object declaration of the renamed object, if any and if the
2878 renamed object is a mere identifier. */
2879 gnat_node = Renamed_Object (gnat_node);
2880 if (Nkind (gnat_node) != N_Identifier)
2881 return false;
2882
2883 gnat_node = Entity (gnat_node);
2884 if (!Present (Parent (gnat_node)))
2885 return false;
2886
2887 gnat_node = Parent (gnat_node);
2888 return
2889 (Present (gnat_node)
2890 && Nkind (gnat_node) == N_Object_Declaration
2891 && Present (Corresponding_Generic_Association (gnat_node)));
2892}
2893
9a30c7c4
AC
2894/* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2895 feed it with the elaboration of GNAT_SCOPE. */
2896
2897static struct deferred_decl_context_node *
2898add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2899{
2900 struct deferred_decl_context_node *new_node;
2901
2902 new_node
2903 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2904 new_node->decl = decl;
2905 new_node->gnat_scope = gnat_scope;
2906 new_node->force_global = force_global;
2907 new_node->types.create (1);
2908 new_node->next = deferred_decl_context_queue;
2909 deferred_decl_context_queue = new_node;
2910 return new_node;
2911}
2912
2913/* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2914 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2915 computed. */
2916
2917static void
2918add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2919{
2920 n->types.safe_push (type);
2921}
2922
2923/* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2924 NULL_TREE if it is not available. */
2925
2926static tree
2927compute_deferred_decl_context (Entity_Id gnat_scope)
2928{
2929 tree context;
2930
2931 if (present_gnu_tree (gnat_scope))
2932 context = get_gnu_tree (gnat_scope);
2933 else
2934 return NULL_TREE;
2935
2936 if (TREE_CODE (context) == TYPE_DECL)
2937 {
2938 const tree context_type = TREE_TYPE (context);
2939
2940 /* Skip dummy types: only the final ones can appear in the context
2941 chain. */
2942 if (TYPE_DUMMY_P (context_type))
2943 return NULL_TREE;
2944
2945 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2946 chain. */
2947 else
2948 context = context_type;
2949 }
2950
2951 return context;
2952}
2953
2954/* Try to process all deferred nodes in the queue. Keep in the queue the ones
2955 that cannot be processed yet, remove the other ones. If FORCE is true,
2956 force the processing for all nodes, use the global context when nodes don't
2957 have a GNU translation. */
2958
2959void
2960process_deferred_decl_context (bool force)
2961{
2962 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2963 struct deferred_decl_context_node *node;
2964
2965 while (*it != NULL)
2966 {
2967 bool processed = false;
2968 tree context = NULL_TREE;
2969 Entity_Id gnat_scope;
2970
2971 node = *it;
2972
2973 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2974 get the first scope. */
2975 gnat_scope = node->gnat_scope;
2976 while (Present (gnat_scope))
2977 {
2978 context = compute_deferred_decl_context (gnat_scope);
7c775aca 2979 if (!force || context)
9a30c7c4
AC
2980 break;
2981 gnat_scope = get_debug_scope (gnat_scope, NULL);
2982 }
2983
2984 /* Imported declarations must not be in a local context (i.e. not inside
2985 a function). */
7c775aca 2986 if (context && node->force_global > 0)
9a30c7c4
AC
2987 {
2988 tree ctx = context;
2989
7c775aca 2990 while (ctx)
9a30c7c4
AC
2991 {
2992 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
7c775aca 2993 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
9a30c7c4
AC
2994 }
2995 }
2996
2997 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2998 was no elaborated scope, use the global context. */
7c775aca 2999 if (force && !context)
9a30c7c4
AC
3000 context = get_global_context ();
3001
7c775aca 3002 if (context)
9a30c7c4
AC
3003 {
3004 tree t;
3005 int i;
3006
3007 DECL_CONTEXT (node->decl) = context;
3008
3009 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3010 ..._TYPE nodes. */
3011 FOR_EACH_VEC_ELT (node->types, i, t)
3012 {
24d4b3d5 3013 gnat_set_type_context (t, context);
9a30c7c4
AC
3014 }
3015 processed = true;
3016 }
3017
3018 /* If this node has been successfuly processed, remove it from the
3019 queue. Then move to the next node. */
3020 if (processed)
3021 {
3022 *it = node->next;
3023 node->types.release ();
3024 free (node);
3025 }
3026 else
3027 it = &node->next;
3028 }
3029}
3030
3031
5c475ba9
EB
3032/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3033
3034static unsigned int
3035scale_by_factor_of (tree expr, unsigned int value)
3036{
3b5d86ec
PMR
3037 unsigned HOST_WIDE_INT addend = 0;
3038 unsigned HOST_WIDE_INT factor = 1;
3039
3040 /* Peel conversions around EXPR and try to extract bodies from function
3041 calls: it is possible to get the scale factor from size functions. */
5c475ba9 3042 expr = remove_conversions (expr, true);
3b5d86ec
PMR
3043 if (TREE_CODE (expr) == CALL_EXPR)
3044 expr = maybe_inline_call_in_expr (expr);
3045
3046 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3047 multiple of the scale factor we are looking for. */
3048 if (TREE_CODE (expr) == PLUS_EXPR
3049 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3050 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3051 {
3052 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3053 expr = TREE_OPERAND (expr, 0);
3054 }
5c475ba9
EB
3055
3056 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3057 corresponding to the number of trailing zeros of the mask. */
3058 if (TREE_CODE (expr) == BIT_AND_EXPR
3059 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3060 {
3061 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3062 unsigned int i = 0;
3063
3064 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3065 {
3066 mask >>= 1;
3b5d86ec 3067 factor *= 2;
5c475ba9
EB
3068 i++;
3069 }
3070 }
3071
3b5d86ec
PMR
3072 /* If the addend is not a multiple of the factor we found, give up. In
3073 theory we could find a smaller common factor but it's useless for our
3074 needs. This situation arises when dealing with a field F1 with no
3075 alignment requirement but that is following a field F2 with such
3076 requirements. As long as we have F2's offset, we don't need alignment
3077 information to compute F1's. */
3078 if (addend % factor != 0)
3079 factor = 1;
3080
3081 return factor * value;
5c475ba9
EB
3082}
3083
7d7fcb08 3084/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
a1ab4c31
AC
3085 unless we can prove these 2 fields are laid out in such a way that no gap
3086 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3087 is the distance in bits between the end of PREV_FIELD and the starting
3088 position of CURR_FIELD. It is ignored if null. */
3089
3090static bool
3091potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3092{
3093 /* If this is the first field of the record, there cannot be any gap */
3094 if (!prev_field)
3095 return false;
3096
78df6221 3097 /* If the previous field is a union type, then return false: The only
a1ab4c31
AC
3098 time when such a field is not the last field of the record is when
3099 there are other components at fixed positions after it (meaning there
3100 was a rep clause for every field), in which case we don't want the
3101 alignment constraint to override them. */
3102 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3103 return false;
3104
3105 /* If the distance between the end of prev_field and the beginning of
3106 curr_field is constant, then there is a gap if the value of this
3107 constant is not null. */
cc269bb6 3108 if (offset && tree_fits_uhwi_p (offset))
a1ab4c31
AC
3109 return !integer_zerop (offset);
3110
3111 /* If the size and position of the previous field are constant,
3112 then check the sum of this size and position. There will be a gap
3113 iff it is not multiple of the current field alignment. */
cc269bb6
RS
3114 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3115 && tree_fits_uhwi_p (bit_position (prev_field)))
ae7e9ddd
RS
3116 return ((tree_to_uhwi (bit_position (prev_field))
3117 + tree_to_uhwi (DECL_SIZE (prev_field)))
a1ab4c31
AC
3118 % DECL_ALIGN (curr_field) != 0);
3119
3120 /* If both the position and size of the previous field are multiples
3121 of the current field alignment, there cannot be any gap. */
3122 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3123 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3124 return false;
3125
3126 /* Fallback, return that there may be a potential gap */
3127 return true;
3128}
3129
6249559b
EB
3130/* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3131 the decl. */
a1ab4c31
AC
3132
3133tree
6249559b 3134create_label_decl (tree name, Node_Id gnat_node)
a1ab4c31 3135{
88a94e2b 3136 tree label_decl
6249559b 3137 = build_decl (input_location, LABEL_DECL, name, void_type_node);
a1ab4c31 3138
88a94e2b
EB
3139 DECL_MODE (label_decl) = VOIDmode;
3140
3141 /* Add this decl to the current binding level. */
3142 gnat_pushdecl (label_decl, gnat_node);
a1ab4c31
AC
3143
3144 return label_decl;
3145}
3146\f
6249559b
EB
3147/* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3148 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3149 the list of its parameters (a list of PARM_DECL nodes chained through the
3150 DECL_CHAIN field).
a1ab4c31 3151
2ade427a
EB
3152 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3153
3154 CONST_FLAG, PUBLIC_FLAG, EXTERN_FLAG, VOLATILE_FLAG are used to set the
3155 appropriate flags on the FUNCTION_DECL.
c1a569ef
EB
3156
3157 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3158
3159 DEBUG_INFO_P is true if we need to write debug information for it.
3160
2ade427a
EB
3161 ATTR_LIST is the list of attributes to be attached to the subprogram.
3162
c1a569ef 3163 GNAT_NODE is used for the position of the decl. */
a1ab4c31
AC
3164
3165tree
6249559b 3166create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
2056c5ed
EB
3167 enum inline_status_t inline_status, bool const_flag,
3168 bool public_flag, bool extern_flag, bool volatile_flag,
3169 bool artificial_p, bool debug_info_p,
6249559b 3170 struct attrib *attr_list, Node_Id gnat_node)
a1ab4c31 3171{
6249559b
EB
3172 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3173 tree result_decl
3174 = build_decl (input_location, RESULT_DECL, NULL_TREE, TREE_TYPE (type));
7d7fcb08 3175 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
a1ab4c31 3176
c1a569ef 3177 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
7d7fcb08 3178 DECL_EXTERNAL (subprog_decl) = extern_flag;
0e24192c
EB
3179
3180 switch (inline_status)
3181 {
3182 case is_suppressed:
3183 DECL_UNINLINABLE (subprog_decl) = 1;
3184 break;
3185
3186 case is_disabled:
3187 break;
3188
f087ea44
AC
3189 case is_required:
3190 if (Back_End_Inlining)
1eb58520
AC
3191 decl_attributes (&subprog_decl,
3192 tree_cons (get_identifier ("always_inline"),
3193 NULL_TREE, NULL_TREE),
3194 ATTR_FLAG_TYPE_IN_PLACE);
3195
f087ea44
AC
3196 /* ... fall through ... */
3197
0e24192c
EB
3198 case is_enabled:
3199 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
c1a569ef 3200 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
0e24192c
EB
3201 break;
3202
3203 default:
3204 gcc_unreachable ();
3205 }
7d7fcb08 3206
c1a569ef
EB
3207 if (!debug_info_p)
3208 DECL_IGNORED_P (subprog_decl) = 1;
3209
2056c5ed 3210 TREE_READONLY (subprog_decl) = TYPE_READONLY (type) | const_flag;
7d7fcb08 3211 TREE_PUBLIC (subprog_decl) = public_flag;
2056c5ed
EB
3212 TREE_SIDE_EFFECTS (subprog_decl)
3213 = TREE_THIS_VOLATILE (subprog_decl)
3214 = TYPE_VOLATILE (type) | volatile_flag;
a1ab4c31 3215
d47d0a8d
EB
3216 DECL_ARTIFICIAL (result_decl) = 1;
3217 DECL_IGNORED_P (result_decl) = 1;
6249559b 3218 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
d47d0a8d 3219 DECL_RESULT (subprog_decl) = result_decl;
a1ab4c31 3220
2b50232a
EB
3221 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3222
3223 /* Add this decl to the current binding level. */
3224 gnat_pushdecl (subprog_decl, gnat_node);
3225
a1ab4c31
AC
3226 if (asm_name)
3227 {
2b50232a
EB
3228 /* Let the target mangle the name if this isn't a verbatim asm. */
3229 if (*IDENTIFIER_POINTER (asm_name) != '*')
3230 asm_name = targetm.mangle_decl_assembler_name (subprog_decl, asm_name);
3231
a1ab4c31
AC
3232 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3233
3234 /* The expand_main_function circuitry expects "main_identifier_node" to
3235 designate the DECL_NAME of the 'main' entry point, in turn expected
3236 to be declared as the "main" function literally by default. Ada
3237 program entry points are typically declared with a different name
3238 within the binder generated file, exported as 'main' to satisfy the
cfbb663c 3239 system expectations. Force main_identifier_node in this case. */
a1ab4c31 3240 if (asm_name == main_identifier_node)
cfbb663c 3241 DECL_NAME (subprog_decl) = main_identifier_node;
a1ab4c31
AC
3242 }
3243
a1ab4c31
AC
3244 /* Output the assembler code and/or RTL for the declaration. */
3245 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3246
3247 return subprog_decl;
3248}
3249\f
3250/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3251 body. This routine needs to be invoked before processing the declarations
3252 appearing in the subprogram. */
3253
3254void
3255begin_subprog_body (tree subprog_decl)
3256{
3257 tree param_decl;
3258
a1ab4c31
AC
3259 announce_function (subprog_decl);
3260
0ae44446
JR
3261 /* This function is being defined. */
3262 TREE_STATIC (subprog_decl) = 1;
3263
e2d13a4a
EB
3264 /* The failure of this assertion will likely come from a wrong context for
3265 the subprogram body, e.g. another procedure for a procedure declared at
3266 library level. */
3267 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3268
58c8f770
EB
3269 current_function_decl = subprog_decl;
3270
a1ab4c31
AC
3271 /* Enter a new binding level and show that all the parameters belong to
3272 this function. */
3273 gnat_pushlevel ();
a09d56d8 3274
a1ab4c31 3275 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
910ad8de 3276 param_decl = DECL_CHAIN (param_decl))
a1ab4c31
AC
3277 DECL_CONTEXT (param_decl) = subprog_decl;
3278
3279 make_decl_rtl (subprog_decl);
a1ab4c31
AC
3280}
3281
71196d4e 3282/* Finish translating the current subprogram and set its BODY. */
a1ab4c31
AC
3283
3284void
a406865a 3285end_subprog_body (tree body)
a1ab4c31
AC
3286{
3287 tree fndecl = current_function_decl;
3288
bd9c7fb9 3289 /* Attach the BLOCK for this level to the function and pop the level. */
a1ab4c31
AC
3290 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3291 DECL_INITIAL (fndecl) = current_binding_level->block;
3292 gnat_poplevel ();
3293
a1ab4c31
AC
3294 /* Mark the RESULT_DECL as being in this subprogram. */
3295 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3296
a963da4d
EB
3297 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3298 if (TREE_CODE (body) == BIND_EXPR)
3299 {
3300 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3301 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3302 }
3303
a1ab4c31
AC
3304 DECL_SAVED_TREE (fndecl) = body;
3305
228ee426 3306 current_function_decl = decl_function_context (fndecl);
71196d4e
EB
3307}
3308
3309/* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
a1ab4c31 3310
71196d4e
EB
3311void
3312rest_of_subprog_body_compilation (tree subprog_decl)
3313{
a1ab4c31
AC
3314 /* We cannot track the location of errors past this point. */
3315 error_gnat_node = Empty;
3316
3317 /* If we're only annotating types, don't actually compile this function. */
3318 if (type_annotate_only)
3319 return;
3320
a406865a 3321 /* Dump functions before gimplification. */
71196d4e 3322 dump_function (TDI_original, subprog_decl);
a406865a 3323
228ee426 3324 if (!decl_function_context (subprog_decl))
3dafb85c 3325 cgraph_node::finalize_function (subprog_decl, false);
a1ab4c31
AC
3326 else
3327 /* Register this function with cgraph just far enough to get it
3328 added to our parent's nested function list. */
037e5573 3329 (void) cgraph_node::get_create (subprog_decl);
a1ab4c31
AC
3330}
3331
a1ab4c31
AC
3332tree
3333gnat_builtin_function (tree decl)
3334{
3335 gnat_pushdecl (decl, Empty);
3336 return decl;
3337}
3338
3339/* Return an integer type with the number of bits of precision given by
3340 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3341 it is a signed type. */
3342
3343tree
3344gnat_type_for_size (unsigned precision, int unsignedp)
3345{
3346 tree t;
3347 char type_name[20];
3348
3349 if (precision <= 2 * MAX_BITS_PER_WORD
3350 && signed_and_unsigned_types[precision][unsignedp])
3351 return signed_and_unsigned_types[precision][unsignedp];
3352
3353 if (unsignedp)
3354 t = make_unsigned_type (precision);
3355 else
3356 t = make_signed_type (precision);
3357
3358 if (precision <= 2 * MAX_BITS_PER_WORD)
3359 signed_and_unsigned_types[precision][unsignedp] = t;
3360
3361 if (!TYPE_NAME (t))
3362 {
c3d79c60 3363 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
a1ab4c31
AC
3364 TYPE_NAME (t) = get_identifier (type_name);
3365 }
3366
3367 return t;
3368}
3369
3370/* Likewise for floating-point types. */
3371
3372static tree
ef4bddc2 3373float_type_for_precision (int precision, machine_mode mode)
a1ab4c31
AC
3374{
3375 tree t;
3376 char type_name[20];
3377
3378 if (float_types[(int) mode])
3379 return float_types[(int) mode];
3380
3381 float_types[(int) mode] = t = make_node (REAL_TYPE);
3382 TYPE_PRECISION (t) = precision;
3383 layout_type (t);
3384
3385 gcc_assert (TYPE_MODE (t) == mode);
3386 if (!TYPE_NAME (t))
3387 {
3388 sprintf (type_name, "FLOAT_%d", precision);
3389 TYPE_NAME (t) = get_identifier (type_name);
3390 }
3391
3392 return t;
3393}
3394
3395/* Return a data type that has machine mode MODE. UNSIGNEDP selects
3396 an unsigned type; otherwise a signed type is returned. */
3397
3398tree
ef4bddc2 3399gnat_type_for_mode (machine_mode mode, int unsignedp)
a1ab4c31
AC
3400{
3401 if (mode == BLKmode)
3402 return NULL_TREE;
2799d18c
EB
3403
3404 if (mode == VOIDmode)
a1ab4c31 3405 return void_type_node;
2799d18c
EB
3406
3407 if (COMPLEX_MODE_P (mode))
a1ab4c31 3408 return NULL_TREE;
2799d18c
EB
3409
3410 if (SCALAR_FLOAT_MODE_P (mode))
a1ab4c31 3411 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2799d18c
EB
3412
3413 if (SCALAR_INT_MODE_P (mode))
a1ab4c31 3414 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2799d18c
EB
3415
3416 if (VECTOR_MODE_P (mode))
3417 {
ef4bddc2 3418 machine_mode inner_mode = GET_MODE_INNER (mode);
2799d18c
EB
3419 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3420 if (inner_type)
3421 return build_vector_type_for_mode (inner_type, mode);
3422 }
3423
3424 return NULL_TREE;
a1ab4c31
AC
3425}
3426
9a1bdc31
EB
3427/* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3428 signedness being specified by UNSIGNEDP. */
a1ab4c31
AC
3429
3430tree
9a1bdc31 3431gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
a1ab4c31 3432{
825da0d2
EB
3433 if (type_node == char_type_node)
3434 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3435
9a1bdc31 3436 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
a1ab4c31
AC
3437
3438 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3439 {
afc737f0 3440 type = copy_type (type);
a1ab4c31
AC
3441 TREE_TYPE (type) = type_node;
3442 }
3443 else if (TREE_TYPE (type_node)
3444 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3445 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3446 {
afc737f0 3447 type = copy_type (type);
a1ab4c31
AC
3448 TREE_TYPE (type) = TREE_TYPE (type_node);
3449 }
3450
3451 return type;
3452}
3453
3454/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3455 transparently converted to each other. */
3456
3457int
3458gnat_types_compatible_p (tree t1, tree t2)
3459{
3460 enum tree_code code;
3461
3462 /* This is the default criterion. */
3463 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3464 return 1;
3465
3466 /* We only check structural equivalence here. */
3467 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3468 return 0;
3469
7948ae37
OH
3470 /* Vector types are also compatible if they have the same number of subparts
3471 and the same form of (scalar) element type. */
3472 if (code == VECTOR_TYPE
3473 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3474 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3475 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3476 return 1;
3477
cfa0bd19 3478 /* Array types are also compatible if they are constrained and have the same
ee45a32d 3479 domain(s), the same component type and the same scalar storage order. */
a1ab4c31 3480 if (code == ARRAY_TYPE
0adef32b
JJ
3481 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3482 || (TYPE_DOMAIN (t1)
b4680ca1 3483 && TYPE_DOMAIN (t2)
0adef32b
JJ
3484 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3485 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3486 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
cfa0bd19 3487 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
96bba5e6
EB
3488 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3489 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
ee45a32d
EB
3490 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3491 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
a1ab4c31
AC
3492 return 1;
3493
a1ab4c31
AC
3494 return 0;
3495}
523e82a7 3496
71196d4e
EB
3497/* Return true if EXPR is a useless type conversion. */
3498
3499bool
3500gnat_useless_type_conversion (tree expr)
3501{
3502 if (CONVERT_EXPR_P (expr)
3503 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3504 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3505 return gnat_types_compatible_p (TREE_TYPE (expr),
3506 TREE_TYPE (TREE_OPERAND (expr, 0)));
3507
3508 return false;
3509}
3510
523e82a7
EB
3511/* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3512
3513bool
3514fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3515 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3516{
3517 return TYPE_CI_CO_LIST (t) == cico_list
3518 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3519 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3520 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3521}
a1ab4c31
AC
3522\f
3523/* EXP is an expression for the size of an object. If this size contains
3524 discriminant references, replace them with the maximum (if MAX_P) or
3525 minimum (if !MAX_P) possible value of the discriminant. */
3526
3527tree
3528max_size (tree exp, bool max_p)
3529{
3530 enum tree_code code = TREE_CODE (exp);
3531 tree type = TREE_TYPE (exp);
3532
3533 switch (TREE_CODE_CLASS (code))
3534 {
3535 case tcc_declaration:
3536 case tcc_constant:
3537 return exp;
3538
3539 case tcc_vl_exp:
3540 if (code == CALL_EXPR)
3541 {
f82a627c
EB
3542 tree t, *argarray;
3543 int n, i;
3544
3545 t = maybe_inline_call_in_expr (exp);
3546 if (t)
3547 return max_size (t, max_p);
a1ab4c31 3548
f82a627c
EB
3549 n = call_expr_nargs (exp);
3550 gcc_assert (n > 0);
2bb1fc26 3551 argarray = XALLOCAVEC (tree, n);
a1ab4c31
AC
3552 for (i = 0; i < n; i++)
3553 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3554 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3555 }
3556 break;
3557
3558 case tcc_reference:
3559 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3560 modify. Otherwise, we treat it like a variable. */
1eb58520
AC
3561 if (CONTAINS_PLACEHOLDER_P (exp))
3562 {
3563 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3564 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3565 return max_size (convert (get_base_type (val_type), val), true);
3566 }
a1ab4c31 3567
1eb58520 3568 return exp;
a1ab4c31
AC
3569
3570 case tcc_comparison:
3571 return max_p ? size_one_node : size_zero_node;
3572
3573 case tcc_unary:
ce3da0d0
EB
3574 if (code == NON_LVALUE_EXPR)
3575 return max_size (TREE_OPERAND (exp, 0), max_p);
6625d7bc 3576
ce3da0d0
EB
3577 return fold_build1 (code, type,
3578 max_size (TREE_OPERAND (exp, 0),
3579 code == NEGATE_EXPR ? !max_p : max_p));
3580
a1ab4c31 3581 case tcc_binary:
ce3da0d0
EB
3582 {
3583 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3584 tree rhs = max_size (TREE_OPERAND (exp, 1),
3585 code == MINUS_EXPR ? !max_p : max_p);
3586
3587 /* Special-case wanting the maximum value of a MIN_EXPR.
3588 In that case, if one side overflows, return the other. */
3589 if (max_p && code == MIN_EXPR)
3590 {
3591 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3592 return lhs;
3593
3594 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3595 return rhs;
3596 }
3597
3598 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3599 overflowing and the RHS a variable. */
3600 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3601 && TREE_CODE (lhs) == INTEGER_CST
3602 && TREE_OVERFLOW (lhs)
396e67d2 3603 && TREE_CODE (rhs) != INTEGER_CST)
ce3da0d0
EB
3604 return lhs;
3605
396e67d2
EB
3606 /* If we are going to subtract a "negative" value in an unsigned type,
3607 do the operation as an addition of the negated value, in order to
3608 avoid creating a spurious overflow below. */
3609 if (code == MINUS_EXPR
3610 && TYPE_UNSIGNED (type)
3611 && TREE_CODE (rhs) == INTEGER_CST
3612 && !TREE_OVERFLOW (rhs)
3613 && tree_int_cst_sign_bit (rhs) != 0)
3614 {
3615 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3616 code = PLUS_EXPR;
3617 }
3618
3619 /* We need to detect overflows so we call size_binop here. */
ce3da0d0
EB
3620 return size_binop (code, lhs, rhs);
3621 }
3622
a1ab4c31
AC
3623 case tcc_expression:
3624 switch (TREE_CODE_LENGTH (code))
3625 {
3626 case 1:
722356ce
EB
3627 if (code == SAVE_EXPR)
3628 return exp;
ce3da0d0
EB
3629
3630 return fold_build1 (code, type,
3631 max_size (TREE_OPERAND (exp, 0), max_p));
a1ab4c31
AC
3632
3633 case 2:
3634 if (code == COMPOUND_EXPR)
3635 return max_size (TREE_OPERAND (exp, 1), max_p);
3636
ce3da0d0
EB
3637 return fold_build2 (code, type,
3638 max_size (TREE_OPERAND (exp, 0), max_p),
3639 max_size (TREE_OPERAND (exp, 1), max_p));
a1ab4c31
AC
3640
3641 case 3:
722356ce 3642 if (code == COND_EXPR)
a1ab4c31
AC
3643 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3644 max_size (TREE_OPERAND (exp, 1), max_p),
3645 max_size (TREE_OPERAND (exp, 2), max_p));
ce3da0d0
EB
3646
3647 default:
3648 break;
a1ab4c31
AC
3649 }
3650
3651 /* Other tree classes cannot happen. */
3652 default:
3653 break;
3654 }
3655
3656 gcc_unreachable ();
3657}
3658\f
3659/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3660 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3661 Return a constructor for the template. */
3662
3663tree
3664build_template (tree template_type, tree array_type, tree expr)
3665{
9771b263 3666 vec<constructor_elt, va_gc> *template_elts = NULL;
a1ab4c31
AC
3667 tree bound_list = NULL_TREE;
3668 tree field;
3669
3670 while (TREE_CODE (array_type) == RECORD_TYPE
315cff15 3671 && (TYPE_PADDING_P (array_type)
a1ab4c31
AC
3672 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3673 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3674
3675 if (TREE_CODE (array_type) == ARRAY_TYPE
3676 || (TREE_CODE (array_type) == INTEGER_TYPE
3677 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3678 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3679
3680 /* First make the list for a CONSTRUCTOR for the template. Go down the
3681 field list of the template instead of the type chain because this
3682 array might be an Ada array of arrays and we can't tell where the
3683 nested arrays stop being the underlying object. */
3684
3685 for (field = TYPE_FIELDS (template_type); field;
3686 (bound_list
3687 ? (bound_list = TREE_CHAIN (bound_list))
3688 : (array_type = TREE_TYPE (array_type))),
910ad8de 3689 field = DECL_CHAIN (DECL_CHAIN (field)))
a1ab4c31
AC
3690 {
3691 tree bounds, min, max;
3692
3693 /* If we have a bound list, get the bounds from there. Likewise
3694 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3695 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3696 This will give us a maximum range. */
3697 if (bound_list)
3698 bounds = TREE_VALUE (bound_list);
3699 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3700 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3701 else if (expr && TREE_CODE (expr) == PARM_DECL
3702 && DECL_BY_COMPONENT_PTR_P (expr))
3703 bounds = TREE_TYPE (field);
3704 else
3705 gcc_unreachable ();
3706
3707 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
910ad8de 3708 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
a1ab4c31
AC
3709
3710 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3711 substitute it from OBJECT. */
3712 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3713 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3714
0e228dd9 3715 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
910ad8de 3716 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
a1ab4c31
AC
3717 }
3718
0e228dd9 3719 return gnat_build_constructor (template_type, template_elts);
a1ab4c31
AC
3720}
3721\f
e63b36bd
EB
3722/* Return true if TYPE is suitable for the element type of a vector. */
3723
3724static bool
3725type_for_vector_element_p (tree type)
3726{
ef4bddc2 3727 machine_mode mode;
e63b36bd
EB
3728
3729 if (!INTEGRAL_TYPE_P (type)
3730 && !SCALAR_FLOAT_TYPE_P (type)
3731 && !FIXED_POINT_TYPE_P (type))
3732 return false;
3733
3734 mode = TYPE_MODE (type);
3735 if (GET_MODE_CLASS (mode) != MODE_INT
3736 && !SCALAR_FLOAT_MODE_P (mode)
3737 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3738 return false;
3739
3740 return true;
3741}
3742
3743/* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3744 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3745 attribute declaration and want to issue error messages on failure. */
3746
3747static tree
3748build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3749{
3750 unsigned HOST_WIDE_INT size_int, inner_size_int;
3751 int nunits;
3752
3753 /* Silently punt on variable sizes. We can't make vector types for them,
3754 need to ignore them on front-end generated subtypes of unconstrained
3755 base types, and this attribute is for binding implementors, not end
3756 users, so we should never get there from legitimate explicit uses. */
3757 if (!tree_fits_uhwi_p (size))
3758 return NULL_TREE;
3759 size_int = tree_to_uhwi (size);
3760
3761 if (!type_for_vector_element_p (inner_type))
3762 {
3763 if (attribute)
3764 error ("invalid element type for attribute %qs",
3765 IDENTIFIER_POINTER (attribute));
3766 return NULL_TREE;
3767 }
3768 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3769
3770 if (size_int % inner_size_int)
3771 {
3772 if (attribute)
3773 error ("vector size not an integral multiple of component size");
3774 return NULL_TREE;
3775 }
3776
3777 if (size_int == 0)
3778 {
3779 if (attribute)
3780 error ("zero vector size");
3781 return NULL_TREE;
3782 }
3783
3784 nunits = size_int / inner_size_int;
3785 if (nunits & (nunits - 1))
3786 {
3787 if (attribute)
3788 error ("number of components of vector not a power of two");
3789 return NULL_TREE;
3790 }
3791
3792 return build_vector_type (inner_type, nunits);
3793}
3794
3795/* Return a vector type whose representative array type is ARRAY_TYPE, or
3796 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3797 processing the attribute and want to issue error messages on failure. */
3798
3799static tree
3800build_vector_type_for_array (tree array_type, tree attribute)
3801{
3802 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3803 TYPE_SIZE_UNIT (array_type),
3804 attribute);
3805 if (!vector_type)
3806 return NULL_TREE;
3807
3808 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3809 return vector_type;
3810}
3811\f
928dfa4b
EB
3812/* Build a type to be used to represent an aliased object whose nominal type
3813 is an unconstrained array. This consists of a RECORD_TYPE containing a
3814 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3815 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3816 an arbitrary unconstrained object. Use NAME as the name of the record.
3817 DEBUG_INFO_P is true if we need to write debug information for the type. */
a1ab4c31
AC
3818
3819tree
928dfa4b
EB
3820build_unc_object_type (tree template_type, tree object_type, tree name,
3821 bool debug_info_p)
a1ab4c31 3822{
24d4b3d5 3823 tree decl;
a1ab4c31 3824 tree type = make_node (RECORD_TYPE);
da01bfee
EB
3825 tree template_field
3826 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3827 NULL_TREE, NULL_TREE, 0, 1);
3828 tree array_field
3829 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3830 NULL_TREE, NULL_TREE, 0, 1);
a1ab4c31
AC
3831
3832 TYPE_NAME (type) = name;
3833 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
910ad8de 3834 DECL_CHAIN (template_field) = array_field;
928dfa4b
EB
3835 finish_record_type (type, template_field, 0, true);
3836
3837 /* Declare it now since it will never be declared otherwise. This is
3838 necessary to ensure that its subtrees are properly marked. */
24d4b3d5
AC
3839 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3840
3841 /* template_type will not be used elsewhere than here, so to keep the debug
3842 info clean and in order to avoid scoping issues, make decl its
3843 context. */
3844 gnat_set_type_context (template_type, decl);
a1ab4c31
AC
3845
3846 return type;
3847}
3848
3849/* Same, taking a thin or fat pointer type instead of a template type. */
3850
3851tree
3852build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
928dfa4b 3853 tree name, bool debug_info_p)
a1ab4c31
AC
3854{
3855 tree template_type;
3856
315cff15 3857 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
a1ab4c31
AC
3858
3859 template_type
315cff15 3860 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
910ad8de 3861 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
a1ab4c31 3862 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
928dfa4b
EB
3863
3864 return
3865 build_unc_object_type (template_type, object_type, name, debug_info_p);
a1ab4c31 3866}
a1ab4c31 3867\f
229077b0
EB
3868/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3869 In the normal case this is just two adjustments, but we have more to
3870 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3871
3872void
3873update_pointer_to (tree old_type, tree new_type)
3874{
3875 tree ptr = TYPE_POINTER_TO (old_type);
3876 tree ref = TYPE_REFERENCE_TO (old_type);
aeecf17c 3877 tree t;
a1ab4c31
AC
3878
3879 /* If this is the main variant, process all the other variants first. */
3880 if (TYPE_MAIN_VARIANT (old_type) == old_type)
aeecf17c
EB
3881 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3882 update_pointer_to (t, new_type);
a1ab4c31 3883
229077b0 3884 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3885 if (!ptr && !ref)
3886 return;
3887
3888 /* Merge the old type qualifiers in the new type.
3889
3890 Each old variant has qualifiers for specific reasons, and the new
229077b0 3891 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3892 information grabbed at some point, and merging the two simply unifies
3893 these inputs into the final type description.
3894
3895 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3896 type designating it; after the designated type's freeze, we get here with
3897 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3898 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3899 designated type, because that's what it really is.
3900
229077b0
EB
3901 We might also get here for a non-dummy OLD_TYPE variant with different
3902 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3903 to private record type elaboration (see the comments around the call to
229077b0
EB
3904 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3905 the qualifiers in those cases too, to avoid accidentally discarding the
3906 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3907 new_type
3908 = build_qualified_type (new_type,
3909 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3910
3911 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3912 if (old_type == new_type)
3913 return;
3914
3915 /* Otherwise, first handle the simple case. */
3916 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3917 {
aeecf17c
EB
3918 tree new_ptr, new_ref;
3919
3920 /* If pointer or reference already points to new type, nothing to do.
3921 This can happen as update_pointer_to can be invoked multiple times
3922 on the same couple of types because of the type variants. */
3923 if ((ptr && TREE_TYPE (ptr) == new_type)
3924 || (ref && TREE_TYPE (ref) == new_type))
3925 return;
3926
3927 /* Chain PTR and its variants at the end. */
3928 new_ptr = TYPE_POINTER_TO (new_type);
3929 if (new_ptr)
3930 {
3931 while (TYPE_NEXT_PTR_TO (new_ptr))
3932 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3933 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3934 }
3935 else
3936 TYPE_POINTER_TO (new_type) = ptr;
a1ab4c31 3937
aeecf17c 3938 /* Now adjust them. */
a1ab4c31 3939 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
aeecf17c 3940 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
50179d58
EB
3941 {
3942 TREE_TYPE (t) = new_type;
3943 if (TYPE_NULL_BOUNDS (t))
3944 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3945 }
de9528f0 3946
aeecf17c
EB
3947 /* Chain REF and its variants at the end. */
3948 new_ref = TYPE_REFERENCE_TO (new_type);
3949 if (new_ref)
3950 {
3951 while (TYPE_NEXT_REF_TO (new_ref))
3952 new_ref = TYPE_NEXT_REF_TO (new_ref);
3953 TYPE_NEXT_REF_TO (new_ref) = ref;
3954 }
3955 else
3956 TYPE_REFERENCE_TO (new_type) = ref;
3957
3958 /* Now adjust them. */
a1ab4c31 3959 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
aeecf17c
EB
3960 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3961 TREE_TYPE (t) = new_type;
de9528f0
EB
3962
3963 TYPE_POINTER_TO (old_type) = NULL_TREE;
3bd6ca3f 3964 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3965 }
3966
aeecf17c
EB
3967 /* Now deal with the unconstrained array case. In this case the pointer
3968 is actually a record where both fields are pointers to dummy nodes.
e3edbd56
EB
3969 Turn them into pointers to the correct types using update_pointer_to.
3970 Likewise for the pointer to the object record (thin pointer). */
a1ab4c31
AC
3971 else
3972 {
e3edbd56 3973 tree new_ptr = TYPE_POINTER_TO (new_type);
aeecf17c
EB
3974
3975 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3976
e3edbd56 3977 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
aeecf17c
EB
3978 since update_pointer_to can be invoked multiple times on the same
3979 couple of types because of the type variants. */
3980 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3981 return;
3982
a1ab4c31 3983 update_pointer_to
e3edbd56
EB
3984 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3985 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
a1ab4c31 3986
a1ab4c31 3987 update_pointer_to
e3edbd56
EB
3988 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3989 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
aeecf17c 3990
e3edbd56
EB
3991 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3992 TYPE_OBJECT_RECORD_TYPE (new_type));
a1ab4c31 3993
e3edbd56 3994 TYPE_POINTER_TO (old_type) = NULL_TREE;
a1ab4c31
AC
3995 }
3996}
3997\f
8df2e902
EB
3998/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3999 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
4000
4001static tree
4002convert_to_fat_pointer (tree type, tree expr)
4003{
910ad8de 4004 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
8df2e902 4005 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 4006 tree etype = TREE_TYPE (expr);
88293f03 4007 tree template_addr;
9771b263
DN
4008 vec<constructor_elt, va_gc> *v;
4009 vec_alloc (v, 2);
a1ab4c31 4010
50179d58
EB
4011 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4012 array (compare_fat_pointers ensures that this is the full discriminant)
4013 and a valid pointer to the bounds. This latter property is necessary
4014 since the compiler can hoist the load of the bounds done through it. */
a1ab4c31 4015 if (integer_zerop (expr))
0e228dd9 4016 {
50179d58
EB
4017 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4018 tree null_bounds, t;
4019
4020 if (TYPE_NULL_BOUNDS (ptr_template_type))
4021 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4022 else
4023 {
4024 /* The template type can still be dummy at this point so we build an
4025 empty constructor. The middle-end will fill it in with zeros. */
90b4c164 4026 t = build_constructor (template_type, NULL);
50179d58
EB
4027 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4028 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4029 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4030 }
4031
0e228dd9 4032 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
50179d58
EB
4033 fold_convert (p_array_type, null_pointer_node));
4034 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4035 t = build_constructor (type, v);
4036 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4037 TREE_CONSTANT (t) = 0;
4038 TREE_STATIC (t) = 1;
4039
4040 return t;
0e228dd9 4041 }
a1ab4c31 4042
0d7de0e1
EB
4043 /* If EXPR is a thin pointer, make template and data from the record. */
4044 if (TYPE_IS_THIN_POINTER_P (etype))
a1ab4c31 4045 {
0d7de0e1 4046 tree field = TYPE_FIELDS (TREE_TYPE (etype));
a1ab4c31 4047
7d7a1fe8 4048 expr = gnat_protect_expr (expr);
88293f03
EB
4049
4050 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4051 the thin pointer value has been shifted so we shift it back to get
4052 the template address. */
4053 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
2b45154d 4054 {
88293f03
EB
4055 template_addr
4056 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4057 fold_build1 (NEGATE_EXPR, sizetype,
4058 byte_position
4059 (DECL_CHAIN (field))));
4060 template_addr
4061 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4062 template_addr);
2b45154d 4063 }
a1ab4c31 4064
88293f03
EB
4065 /* Otherwise we explicitly take the address of the fields. */
4066 else
4067 {
4068 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4069 template_addr
4070 = build_unary_op (ADDR_EXPR, NULL_TREE,
64235766 4071 build_component_ref (expr, field, false));
88293f03 4072 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
64235766 4073 build_component_ref (expr, DECL_CHAIN (field),
88293f03
EB
4074 false));
4075 }
a1ab4c31 4076 }
8df2e902
EB
4077
4078 /* Otherwise, build the constructor for the template. */
a1ab4c31 4079 else
88293f03
EB
4080 template_addr
4081 = build_unary_op (ADDR_EXPR, NULL_TREE,
4082 build_template (template_type, TREE_TYPE (etype),
4083 expr));
a1ab4c31 4084
8df2e902 4085 /* The final result is a constructor for the fat pointer.
a1ab4c31 4086
8df2e902
EB
4087 If EXPR is an argument of a foreign convention subprogram, the type it
4088 points to is directly the component type. In this case, the expression
a1ab4c31 4089 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 4090 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 4091 required, for instance because it ensures that possible later folding of
8df2e902 4092 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
4093 same type as the initial reference.
4094
8df2e902
EB
4095 Note that the call to "build_template" above is still fine because it
4096 will only refer to the provided TEMPLATE_TYPE in this case. */
88293f03
EB
4097 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4098 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
0e228dd9 4099 return gnat_build_constructor (type, v);
a1ab4c31
AC
4100}
4101\f
a1ab4c31
AC
4102/* Create an expression whose value is that of EXPR,
4103 converted to type TYPE. The TREE_TYPE of the value
4104 is always TYPE. This function implements all reasonable
4105 conversions; callers should filter out those that are
4106 not permitted by the language being compiled. */
4107
4108tree
4109convert (tree type, tree expr)
4110{
a1ab4c31
AC
4111 tree etype = TREE_TYPE (expr);
4112 enum tree_code ecode = TREE_CODE (etype);
c34f3839 4113 enum tree_code code = TREE_CODE (type);
a1ab4c31 4114
c34f3839
EB
4115 /* If the expression is already of the right type, we are done. */
4116 if (etype == type)
a1ab4c31
AC
4117 return expr;
4118
4119 /* If both input and output have padding and are of variable size, do this
4120 as an unchecked conversion. Likewise if one is a mere variant of the
4121 other, so we avoid a pointless unpad/repad sequence. */
4122 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
315cff15 4123 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
a1ab4c31
AC
4124 && (!TREE_CONSTANT (TYPE_SIZE (type))
4125 || !TREE_CONSTANT (TYPE_SIZE (etype))
842d4ee2 4126 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
a1ab4c31
AC
4127 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4128 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4129 ;
4130
f88facfe
EB
4131 /* If the output type has padding, convert to the inner type and make a
4132 constructor to build the record, unless a variable size is involved. */
315cff15 4133 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
a1ab4c31 4134 {
9771b263 4135 vec<constructor_elt, va_gc> *v;
0e228dd9 4136
a1ab4c31
AC
4137 /* If we previously converted from another type and our type is
4138 of variable size, remove the conversion to avoid the need for
f88facfe 4139 variable-sized temporaries. Likewise for a conversion between
a1ab4c31
AC
4140 original and packable version. */
4141 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4142 && (!TREE_CONSTANT (TYPE_SIZE (type))
4143 || (ecode == RECORD_TYPE
4144 && TYPE_NAME (etype)
4145 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4146 expr = TREE_OPERAND (expr, 0);
4147
4148 /* If we are just removing the padding from expr, convert the original
4149 object if we have variable size in order to avoid the need for some
f88facfe 4150 variable-sized temporaries. Likewise if the padding is a variant
a1ab4c31
AC
4151 of the other, so we avoid a pointless unpad/repad sequence. */
4152 if (TREE_CODE (expr) == COMPONENT_REF
a1ab4c31
AC
4153 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4154 && (!TREE_CONSTANT (TYPE_SIZE (type))
842d4ee2
EB
4155 || TYPE_MAIN_VARIANT (type)
4156 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
a1ab4c31
AC
4157 || (ecode == RECORD_TYPE
4158 && TYPE_NAME (etype)
4159 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4160 return convert (type, TREE_OPERAND (expr, 0));
4161
431cfac1
EB
4162 /* If the inner type is of self-referential size and the expression type
4163 is a record, do this as an unchecked conversion. But first pad the
4164 expression if possible to have the same size on both sides. */
c34f3839 4165 if (ecode == RECORD_TYPE
f88facfe 4166 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
431cfac1 4167 {
980a0501 4168 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
431cfac1 4169 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
980a0501
EB
4170 false, false, false, true),
4171 expr);
431cfac1
EB
4172 return unchecked_convert (type, expr, false);
4173 }
a1ab4c31 4174
f88facfe
EB
4175 /* If we are converting between array types with variable size, do the
4176 final conversion as an unchecked conversion, again to avoid the need
4177 for some variable-sized temporaries. If valid, this conversion is
4178 very likely purely technical and without real effects. */
c34f3839 4179 if (ecode == ARRAY_TYPE
f88facfe
EB
4180 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4181 && !TREE_CONSTANT (TYPE_SIZE (etype))
4182 && !TREE_CONSTANT (TYPE_SIZE (type)))
4183 return unchecked_convert (type,
4184 convert (TREE_TYPE (TYPE_FIELDS (type)),
4185 expr),
4186 false);
4187
9771b263 4188 vec_alloc (v, 1);
0e228dd9
NF
4189 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4190 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4191 return gnat_build_constructor (type, v);
a1ab4c31
AC
4192 }
4193
4194 /* If the input type has padding, remove it and convert to the output type.
4195 The conditions ordering is arranged to ensure that the output type is not
4196 a padding type here, as it is not clear whether the conversion would
4197 always be correct if this was to happen. */
315cff15 4198 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
a1ab4c31
AC
4199 {
4200 tree unpadded;
4201
4202 /* If we have just converted to this padded type, just get the
4203 inner expression. */
2117b9bb
EB
4204 if (TREE_CODE (expr) == CONSTRUCTOR)
4205 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
a1ab4c31
AC
4206
4207 /* Otherwise, build an explicit component reference. */
4208 else
64235766 4209 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4210
4211 return convert (type, unpadded);
4212 }
4213
4214 /* If the input is a biased type, adjust first. */
4215 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4216 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
1eb58520 4217 fold_convert (TREE_TYPE (etype), expr),
a1ab4c31 4218 fold_convert (TREE_TYPE (etype),
1eb58520 4219 TYPE_MIN_VALUE (etype))));
a1ab4c31
AC
4220
4221 /* If the input is a justified modular type, we need to extract the actual
4222 object before converting it to any other type with the exceptions of an
4223 unconstrained array or of a mere type variant. It is useful to avoid the
4224 extraction and conversion in the type variant case because it could end
4225 up replacing a VAR_DECL expr by a constructor and we might be about the
4226 take the address of the result. */
4227 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4228 && code != UNCONSTRAINED_ARRAY_TYPE
4229 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
64235766
EB
4230 return
4231 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
a1ab4c31
AC
4232
4233 /* If converting to a type that contains a template, convert to the data
4234 type and then build the template. */
4235 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4236 {
910ad8de 4237 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
9771b263
DN
4238 vec<constructor_elt, va_gc> *v;
4239 vec_alloc (v, 2);
a1ab4c31
AC
4240
4241 /* If the source already has a template, get a reference to the
4242 associated array only, as we are going to rebuild a template
4243 for the target type anyway. */
4244 expr = maybe_unconstrained_array (expr);
4245
0e228dd9
NF
4246 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4247 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4248 obj_type, NULL_TREE));
73a1a803
EB
4249 if (expr)
4250 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4251 convert (obj_type, expr));
0e228dd9 4252 return gnat_build_constructor (type, v);
a1ab4c31
AC
4253 }
4254
a1c7d797 4255 /* There are some cases of expressions that we process specially. */
a1ab4c31
AC
4256 switch (TREE_CODE (expr))
4257 {
4258 case ERROR_MARK:
4259 return expr;
4260
4261 case NULL_EXPR:
4262 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4263 conversion in gnat_expand_expr. NULL_EXPR does not represent
4264 and actual value, so no conversion is needed. */
4265 expr = copy_node (expr);
4266 TREE_TYPE (expr) = type;
4267 return expr;
4268
4269 case STRING_CST:
4270 /* If we are converting a STRING_CST to another constrained array type,
4271 just make a new one in the proper type. */
4272 if (code == ecode && AGGREGATE_TYPE_P (etype)
4273 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4274 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4275 {
4276 expr = copy_node (expr);
4277 TREE_TYPE (expr) = type;
4278 return expr;
4279 }
4280 break;
4281
7948ae37 4282 case VECTOR_CST:
44e9e3ec 4283 /* If we are converting a VECTOR_CST to a mere type variant, just make
7948ae37
OH
4284 a new one in the proper type. */
4285 if (code == ecode && gnat_types_compatible_p (type, etype))
4286 {
4287 expr = copy_node (expr);
4288 TREE_TYPE (expr) = type;
4289 return expr;
4290 }
4291
a1ab4c31 4292 case CONSTRUCTOR:
44e9e3ec
EB
4293 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4294 another padding type around the same type, just make a new one in
4295 the proper type. */
4296 if (code == ecode
4297 && (gnat_types_compatible_p (type, etype)
4298 || (code == RECORD_TYPE
4299 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4300 && TREE_TYPE (TYPE_FIELDS (type))
4301 == TREE_TYPE (TYPE_FIELDS (etype)))))
a1ab4c31
AC
4302 {
4303 expr = copy_node (expr);
4304 TREE_TYPE (expr) = type;
9771b263 4305 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
a1ab4c31
AC
4306 return expr;
4307 }
4308
cb3d597d
EB
4309 /* Likewise for a conversion between original and packable version, or
4310 conversion between types of the same size and with the same list of
4311 fields, but we have to work harder to preserve type consistency. */
a1ab4c31
AC
4312 if (code == ecode
4313 && code == RECORD_TYPE
cb3d597d
EB
4314 && (TYPE_NAME (type) == TYPE_NAME (etype)
4315 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4316
a1ab4c31 4317 {
9771b263
DN
4318 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4319 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4320 vec<constructor_elt, va_gc> *v;
4321 vec_alloc (v, len);
a1ab4c31
AC
4322 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4323 unsigned HOST_WIDE_INT idx;
4324 tree index, value;
4325
db868e1e
OH
4326 /* Whether we need to clear TREE_CONSTANT et al. on the output
4327 constructor when we convert in place. */
4328 bool clear_constant = false;
4329
a1ab4c31
AC
4330 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4331 {
44e9e3ec
EB
4332 /* Skip the missing fields in the CONSTRUCTOR. */
4333 while (efield && field && !SAME_FIELD_P (efield, index))
4334 {
4335 efield = DECL_CHAIN (efield);
4336 field = DECL_CHAIN (field);
4337 }
cb3d597d 4338 /* The field must be the same. */
44e9e3ec 4339 if (!(efield && field && SAME_FIELD_P (efield, field)))
a1ab4c31 4340 break;
44e9e3ec
EB
4341 constructor_elt elt
4342 = {field, convert (TREE_TYPE (field), value)};
9771b263 4343 v->quick_push (elt);
db868e1e
OH
4344
4345 /* If packing has made this field a bitfield and the input
4346 value couldn't be emitted statically any more, we need to
4347 clear TREE_CONSTANT on our output. */
ced57283
EB
4348 if (!clear_constant
4349 && TREE_CONSTANT (expr)
db868e1e
OH
4350 && !CONSTRUCTOR_BITFIELD_P (efield)
4351 && CONSTRUCTOR_BITFIELD_P (field)
4352 && !initializer_constant_valid_for_bitfield_p (value))
4353 clear_constant = true;
4354
910ad8de
NF
4355 efield = DECL_CHAIN (efield);
4356 field = DECL_CHAIN (field);
a1ab4c31
AC
4357 }
4358
db868e1e
OH
4359 /* If we have been able to match and convert all the input fields
4360 to their output type, convert in place now. We'll fallback to a
4361 view conversion downstream otherwise. */
a1ab4c31
AC
4362 if (idx == len)
4363 {
4364 expr = copy_node (expr);
4365 TREE_TYPE (expr) = type;
4366 CONSTRUCTOR_ELTS (expr) = v;
db868e1e 4367 if (clear_constant)
ced57283 4368 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
a1ab4c31
AC
4369 return expr;
4370 }
4371 }
7948ae37
OH
4372
4373 /* Likewise for a conversion between array type and vector type with a
4374 compatible representative array. */
4375 else if (code == VECTOR_TYPE
4376 && ecode == ARRAY_TYPE
4377 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4378 etype))
4379 {
9771b263
DN
4380 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4381 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4382 vec<constructor_elt, va_gc> *v;
7948ae37
OH
4383 unsigned HOST_WIDE_INT ix;
4384 tree value;
4385
4386 /* Build a VECTOR_CST from a *constant* array constructor. */
4387 if (TREE_CONSTANT (expr))
4388 {
4389 bool constant_p = true;
4390
4391 /* Iterate through elements and check if all constructor
4392 elements are *_CSTs. */
4393 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4394 if (!CONSTANT_CLASS_P (value))
4395 {
4396 constant_p = false;
4397 break;
4398 }
4399
4400 if (constant_p)
4401 return build_vector_from_ctor (type,
4402 CONSTRUCTOR_ELTS (expr));
4403 }
4404
4405 /* Otherwise, build a regular vector constructor. */
9771b263 4406 vec_alloc (v, len);
7948ae37
OH
4407 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4408 {
f32682ca 4409 constructor_elt elt = {NULL_TREE, value};
9771b263 4410 v->quick_push (elt);
7948ae37
OH
4411 }
4412 expr = copy_node (expr);
4413 TREE_TYPE (expr) = type;
4414 CONSTRUCTOR_ELTS (expr) = v;
4415 return expr;
4416 }
a1ab4c31
AC
4417 break;
4418
4419 case UNCONSTRAINED_ARRAY_REF:
a1c7d797
EB
4420 /* First retrieve the underlying array. */
4421 expr = maybe_unconstrained_array (expr);
4422 etype = TREE_TYPE (expr);
4423 ecode = TREE_CODE (etype);
4424 break;
a1ab4c31
AC
4425
4426 case VIEW_CONVERT_EXPR:
4427 {
4428 /* GCC 4.x is very sensitive to type consistency overall, and view
4429 conversions thus are very frequent. Even though just "convert"ing
4430 the inner operand to the output type is fine in most cases, it
4431 might expose unexpected input/output type mismatches in special
4432 circumstances so we avoid such recursive calls when we can. */
4433 tree op0 = TREE_OPERAND (expr, 0);
4434
4435 /* If we are converting back to the original type, we can just
4436 lift the input conversion. This is a common occurrence with
4437 switches back-and-forth amongst type variants. */
4438 if (type == TREE_TYPE (op0))
4439 return op0;
4440
7948ae37
OH
4441 /* Otherwise, if we're converting between two aggregate or vector
4442 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4443 target type in place or to just convert the inner expression. */
4444 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4445 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
a1ab4c31
AC
4446 {
4447 /* If we are converting between mere variants, we can just
4448 substitute the VIEW_CONVERT_EXPR in place. */
4449 if (gnat_types_compatible_p (type, etype))
4450 return build1 (VIEW_CONVERT_EXPR, type, op0);
4451
4452 /* Otherwise, we may just bypass the input view conversion unless
4453 one of the types is a fat pointer, which is handled by
4454 specialized code below which relies on exact type matching. */
315cff15
EB
4455 else if (!TYPE_IS_FAT_POINTER_P (type)
4456 && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4457 return convert (type, op0);
4458 }
ad1d36ba
EB
4459
4460 break;
a1ab4c31 4461 }
a1ab4c31 4462
a1ab4c31
AC
4463 default:
4464 break;
4465 }
4466
4467 /* Check for converting to a pointer to an unconstrained array. */
315cff15 4468 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
a1ab4c31
AC
4469 return convert_to_fat_pointer (type, expr);
4470
7948ae37
OH
4471 /* If we are converting between two aggregate or vector types that are mere
4472 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4473 to a vector type from its representative array type. */
4474 else if ((code == ecode
4475 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4476 && gnat_types_compatible_p (type, etype))
4477 || (code == VECTOR_TYPE
4478 && ecode == ARRAY_TYPE
4479 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4480 etype)))
a1ab4c31
AC
4481 return build1 (VIEW_CONVERT_EXPR, type, expr);
4482
76af763d
EB
4483 /* If we are converting between tagged types, try to upcast properly. */
4484 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4485 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4486 {
4487 tree child_etype = etype;
4488 do {
4489 tree field = TYPE_FIELDS (child_etype);
4490 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
64235766 4491 return build_component_ref (expr, field, false);
76af763d
EB
4492 child_etype = TREE_TYPE (field);
4493 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4494 }
4495
bb1f7929
EB
4496 /* If we are converting from a smaller form of record type back to it, just
4497 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4498 size on both sides. */
4499 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4500 && smaller_form_type_p (etype, type))
4501 {
4502 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4503 false, false, false, true),
4504 expr);
4505 return build1 (VIEW_CONVERT_EXPR, type, expr);
4506 }
4507
a1ab4c31 4508 /* In all other cases of related types, make a NOP_EXPR. */
86060344 4509 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
a1ab4c31
AC
4510 return fold_convert (type, expr);
4511
4512 switch (code)
4513 {
4514 case VOID_TYPE:
4515 return fold_build1 (CONVERT_EXPR, type, expr);
4516
a1ab4c31
AC
4517 case INTEGER_TYPE:
4518 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4519 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4520 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4521 return unchecked_convert (type, expr, false);
4522 else if (TYPE_BIASED_REPRESENTATION_P (type))
4523 return fold_convert (type,
4524 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4525 convert (TREE_TYPE (type), expr),
1eb58520
AC
4526 convert (TREE_TYPE (type),
4527 TYPE_MIN_VALUE (type))));
a1ab4c31
AC
4528
4529 /* ... fall through ... */
4530
4531 case ENUMERAL_TYPE:
01ddebf2 4532 case BOOLEAN_TYPE:
a1ab4c31
AC
4533 /* If we are converting an additive expression to an integer type
4534 with lower precision, be wary of the optimization that can be
4535 applied by convert_to_integer. There are 2 problematic cases:
4536 - if the first operand was originally of a biased type,
4537 because we could be recursively called to convert it
4538 to an intermediate type and thus rematerialize the
4539 additive operator endlessly,
4540 - if the expression contains a placeholder, because an
4541 intermediate conversion that changes the sign could
4542 be inserted and thus introduce an artificial overflow
4543 at compile time when the placeholder is substituted. */
4544 if (code == INTEGER_TYPE
4545 && ecode == INTEGER_TYPE
4546 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4547 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4548 {
4549 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4550
4551 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4552 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4553 || CONTAINS_PLACEHOLDER_P (expr))
4554 return build1 (NOP_EXPR, type, expr);
4555 }
4556
4557 return fold (convert_to_integer (type, expr));
4558
4559 case POINTER_TYPE:
4560 case REFERENCE_TYPE:
0d7de0e1 4561 /* If converting between two thin pointers, adjust if needed to account
2b45154d
EB
4562 for differing offsets from the base pointer, depending on whether
4563 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
315cff15 4564 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
a1ab4c31 4565 {
2b45154d 4566 tree etype_pos
7c775aca 4567 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
2b45154d
EB
4568 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4569 : size_zero_node;
4570 tree type_pos
7c775aca 4571 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
2b45154d
EB
4572 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4573 : size_zero_node;
4574 tree byte_diff = size_diffop (type_pos, etype_pos);
0d7de0e1 4575
a1ab4c31 4576 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4577 if (integer_zerop (byte_diff))
4578 return expr;
4579
4580 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
0d7de0e1 4581 fold_convert (sizetype, byte_diff));
a1ab4c31
AC
4582 }
4583
0d7de0e1
EB
4584 /* If converting fat pointer to normal or thin pointer, get the pointer
4585 to the array and then convert it. */
4586 if (TYPE_IS_FAT_POINTER_P (etype))
64235766 4587 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
a1ab4c31
AC
4588
4589 return fold (convert_to_pointer (type, expr));
4590
4591 case REAL_TYPE:
4592 return fold (convert_to_real (type, expr));
4593
4594 case RECORD_TYPE:
4595 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
0e228dd9 4596 {
9771b263
DN
4597 vec<constructor_elt, va_gc> *v;
4598 vec_alloc (v, 1);
0e228dd9
NF
4599
4600 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4601 convert (TREE_TYPE (TYPE_FIELDS (type)),
4602 expr));
4603 return gnat_build_constructor (type, v);
4604 }
a1ab4c31
AC
4605
4606 /* ... fall through ... */
4607
4608 case ARRAY_TYPE:
4609 /* In these cases, assume the front-end has validated the conversion.
4610 If the conversion is valid, it will be a bit-wise conversion, so
4611 it can be viewed as an unchecked conversion. */
4612 return unchecked_convert (type, expr, false);
4613
4614 case UNION_TYPE:
4615 /* This is a either a conversion between a tagged type and some
4616 subtype, which we have to mark as a UNION_TYPE because of
4617 overlapping fields or a conversion of an Unchecked_Union. */
4618 return unchecked_convert (type, expr, false);
4619
4620 case UNCONSTRAINED_ARRAY_TYPE:
7948ae37
OH
4621 /* If the input is a VECTOR_TYPE, convert to the representative
4622 array type first. */
4623 if (ecode == VECTOR_TYPE)
4624 {
4625 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4626 etype = TREE_TYPE (expr);
4627 ecode = TREE_CODE (etype);
4628 }
4629
a1ab4c31
AC
4630 /* If EXPR is a constrained array, take its address, convert it to a
4631 fat pointer, and then dereference it. Likewise if EXPR is a
4632 record containing both a template and a constrained array.
4633 Note that a record representing a justified modular type
4634 always represents a packed constrained array. */
4635 if (ecode == ARRAY_TYPE
4636 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4637 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4638 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4639 return
4640 build_unary_op
4641 (INDIRECT_REF, NULL_TREE,
4642 convert_to_fat_pointer (TREE_TYPE (type),
4643 build_unary_op (ADDR_EXPR,
4644 NULL_TREE, expr)));
4645
4646 /* Do something very similar for converting one unconstrained
4647 array to another. */
4648 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4649 return
4650 build_unary_op (INDIRECT_REF, NULL_TREE,
4651 convert (TREE_TYPE (type),
4652 build_unary_op (ADDR_EXPR,
4653 NULL_TREE, expr)));
4654 else
4655 gcc_unreachable ();
4656
4657 case COMPLEX_TYPE:
4658 return fold (convert_to_complex (type, expr));
4659
4660 default:
4661 gcc_unreachable ();
4662 }
4663}
15bf7d19
EB
4664
4665/* Create an expression whose value is that of EXPR converted to the common
4666 index type, which is sizetype. EXPR is supposed to be in the base type
4667 of the GNAT index type. Calling it is equivalent to doing
4668
4669 convert (sizetype, expr)
4670
4671 but we try to distribute the type conversion with the knowledge that EXPR
4672 cannot overflow in its type. This is a best-effort approach and we fall
4673 back to the above expression as soon as difficulties are encountered.
4674
4675 This is necessary to overcome issues that arise when the GNAT base index
4676 type and the GCC common index type (sizetype) don't have the same size,
4677 which is quite frequent on 64-bit architectures. In this case, and if
4678 the GNAT base index type is signed but the iteration type of the loop has
4679 been forced to unsigned, the loop scalar evolution engine cannot compute
4680 a simple evolution for the general induction variables associated with the
4681 array indices, because it will preserve the wrap-around semantics in the
4682 unsigned type of their "inner" part. As a result, many loop optimizations
4683 are blocked.
4684
4685 The solution is to use a special (basic) induction variable that is at
4686 least as large as sizetype, and to express the aforementioned general
4687 induction variables in terms of this induction variable, eliminating
4688 the problematic intermediate truncation to the GNAT base index type.
4689 This is possible as long as the original expression doesn't overflow
4690 and if the middle-end hasn't introduced artificial overflows in the
4691 course of the various simplification it can make to the expression. */
4692
4693tree
4694convert_to_index_type (tree expr)
4695{
4696 enum tree_code code = TREE_CODE (expr);
4697 tree type = TREE_TYPE (expr);
4698
4699 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4700 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4701 if (TYPE_UNSIGNED (type) || !optimize)
4702 return convert (sizetype, expr);
4703
4704 switch (code)
4705 {
4706 case VAR_DECL:
4707 /* The main effect of the function: replace a loop parameter with its
4708 associated special induction variable. */
4709 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4710 expr = DECL_INDUCTION_VAR (expr);
4711 break;
4712
4713 CASE_CONVERT:
4714 {
4715 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4716 /* Bail out as soon as we suspect some sort of type frobbing. */
4717 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4718 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4719 break;
4720 }
4721
4722 /* ... fall through ... */
4723
4724 case NON_LVALUE_EXPR:
4725 return fold_build1 (code, sizetype,
4726 convert_to_index_type (TREE_OPERAND (expr, 0)));
4727
4728 case PLUS_EXPR:
4729 case MINUS_EXPR:
4730 case MULT_EXPR:
4731 return fold_build2 (code, sizetype,
4732 convert_to_index_type (TREE_OPERAND (expr, 0)),
4733 convert_to_index_type (TREE_OPERAND (expr, 1)));
4734
4735 case COMPOUND_EXPR:
4736 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4737 convert_to_index_type (TREE_OPERAND (expr, 1)));
4738
4739 case COND_EXPR:
4740 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4741 convert_to_index_type (TREE_OPERAND (expr, 1)),
4742 convert_to_index_type (TREE_OPERAND (expr, 2)));
4743
4744 default:
4745 break;
4746 }
4747
4748 return convert (sizetype, expr);
4749}
a1ab4c31
AC
4750\f
4751/* Remove all conversions that are done in EXP. This includes converting
4752 from a padded type or to a justified modular type. If TRUE_ADDRESS
4753 is true, always return the address of the containing object even if
4754 the address is not bit-aligned. */
4755
4756tree
4757remove_conversions (tree exp, bool true_address)
4758{
4759 switch (TREE_CODE (exp))
4760 {
4761 case CONSTRUCTOR:
4762 if (true_address
4763 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4764 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4765 return
2117b9bb 4766 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
a1ab4c31
AC
4767 break;
4768
4769 case COMPONENT_REF:
315cff15 4770 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
a1ab4c31
AC
4771 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4772 break;
4773
a1ab4c31 4774 CASE_CONVERT:
722356ce
EB
4775 case VIEW_CONVERT_EXPR:
4776 case NON_LVALUE_EXPR:
a1ab4c31
AC
4777 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4778
4779 default:
4780 break;
4781 }
4782
4783 return exp;
4784}
4785\f
4786/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
86060344 4787 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
a1ab4c31
AC
4788 likewise return an expression pointing to the underlying array. */
4789
4790tree
4791maybe_unconstrained_array (tree exp)
4792{
4793 enum tree_code code = TREE_CODE (exp);
1aa291f7 4794 tree type = TREE_TYPE (exp);
a1ab4c31 4795
1aa291f7 4796 switch (TREE_CODE (type))
a1ab4c31
AC
4797 {
4798 case UNCONSTRAINED_ARRAY_TYPE:
4799 if (code == UNCONSTRAINED_ARRAY_REF)
4800 {
7e169899 4801 const bool read_only = TREE_READONLY (exp);
a1c7d797
EB
4802 const bool no_trap = TREE_THIS_NOTRAP (exp);
4803
7e169899 4804 exp = TREE_OPERAND (exp, 0);
1aa291f7
EB
4805 type = TREE_TYPE (exp);
4806
7e169899
EB
4807 if (TREE_CODE (exp) == COND_EXPR)
4808 {
4809 tree op1
4810 = build_unary_op (INDIRECT_REF, NULL_TREE,
4811 build_component_ref (TREE_OPERAND (exp, 1),
1aa291f7 4812 TYPE_FIELDS (type),
7e169899
EB
4813 false));
4814 tree op2
4815 = build_unary_op (INDIRECT_REF, NULL_TREE,
4816 build_component_ref (TREE_OPERAND (exp, 2),
1aa291f7 4817 TYPE_FIELDS (type),
7e169899
EB
4818 false));
4819
4820 exp = build3 (COND_EXPR,
1aa291f7 4821 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
7e169899
EB
4822 TREE_OPERAND (exp, 0), op1, op2);
4823 }
4824 else
a1c7d797
EB
4825 {
4826 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
64235766
EB
4827 build_component_ref (exp,
4828 TYPE_FIELDS (type),
a1c7d797
EB
4829 false));
4830 TREE_READONLY (exp) = read_only;
4831 TREE_THIS_NOTRAP (exp) = no_trap;
4832 }
a1ab4c31
AC
4833 }
4834
4835 else if (code == NULL_EXPR)
1aa291f7
EB
4836 exp = build1 (NULL_EXPR,
4837 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4838 TREE_OPERAND (exp, 0));
4839 break;
a1ab4c31
AC
4840
4841 case RECORD_TYPE:
1aa291f7
EB
4842 /* If this is a padded type and it contains a template, convert to the
4843 unpadded type first. */
4844 if (TYPE_PADDING_P (type)
4845 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4846 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
a1ab4c31 4847 {
1aa291f7 4848 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
64235766 4849 code = TREE_CODE (exp);
1aa291f7
EB
4850 type = TREE_TYPE (exp);
4851 }
4852
4853 if (TYPE_CONTAINS_TEMPLATE_P (type))
4854 {
64235766
EB
4855 /* If the array initializer is a box, return NULL_TREE. */
4856 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4857 return NULL_TREE;
4858
4859 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4860 false);
4861 type = TREE_TYPE (exp);
1aa291f7
EB
4862
4863 /* If the array type is padded, convert to the unpadded type. */
64235766
EB
4864 if (TYPE_IS_PADDING_P (type))
4865 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
a1ab4c31 4866 }
a1ab4c31
AC
4867 break;
4868
4869 default:
4870 break;
4871 }
4872
4873 return exp;
4874}
4875\f
afcea859 4876/* Return true if EXPR is an expression that can be folded as an operand
84fb43a1 4877 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
afcea859
EB
4878
4879static bool
4880can_fold_for_view_convert_p (tree expr)
4881{
4882 tree t1, t2;
4883
4884 /* The folder will fold NOP_EXPRs between integral types with the same
4885 precision (in the middle-end's sense). We cannot allow it if the
4886 types don't have the same precision in the Ada sense as well. */
4887 if (TREE_CODE (expr) != NOP_EXPR)
4888 return true;
4889
4890 t1 = TREE_TYPE (expr);
4891 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4892
4893 /* Defer to the folder for non-integral conversions. */
4894 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4895 return true;
4896
4897 /* Only fold conversions that preserve both precisions. */
4898 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4899 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4900 return true;
4901
4902 return false;
4903}
4904
a1ab4c31 4905/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4906 If NOTRUNC_P is true, truncation operations should be suppressed.
4907
4908 Special care is required with (source or target) integral types whose
4909 precision is not equal to their size, to make sure we fetch or assign
4910 the value bits whose location might depend on the endianness, e.g.
4911
4912 Rmsize : constant := 8;
4913 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4914
4915 type Bit_Array is array (1 .. Rmsize) of Boolean;
4916 pragma Pack (Bit_Array);
4917
4918 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4919
4920 Value : Int := 2#1000_0001#;
4921 Vbits : Bit_Array := To_Bit_Array (Value);
4922
4923 we expect the 8 bits at Vbits'Address to always contain Value, while
4924 their original location depends on the endianness, at Value'Address
84fb43a1 4925 on a little-endian architecture but not on a big-endian one. */
a1ab4c31
AC
4926
4927tree
4928unchecked_convert (tree type, tree expr, bool notrunc_p)
4929{
4930 tree etype = TREE_TYPE (expr);
c34f3839
EB
4931 enum tree_code ecode = TREE_CODE (etype);
4932 enum tree_code code = TREE_CODE (type);
e63b36bd 4933 tree tem;
980a0501 4934 int c;
a1ab4c31 4935
c34f3839 4936 /* If the expression is already of the right type, we are done. */
a1ab4c31
AC
4937 if (etype == type)
4938 return expr;
4939
026c3cfd 4940 /* If both types are integral just do a normal conversion.
a1ab4c31 4941 Likewise for a conversion to an unconstrained array. */
1eb58520 4942 if (((INTEGRAL_TYPE_P (type)
0d7de0e1 4943 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
c34f3839 4944 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
1eb58520 4945 && (INTEGRAL_TYPE_P (etype)
315cff15 4946 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
c34f3839
EB
4947 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4948 || code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31 4949 {
c34f3839 4950 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
a1ab4c31
AC
4951 {
4952 tree ntype = copy_type (etype);
a1ab4c31
AC
4953 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4954 TYPE_MAIN_VARIANT (ntype) = ntype;
4955 expr = build1 (NOP_EXPR, ntype, expr);
4956 }
4957
c34f3839 4958 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31 4959 {
afcea859 4960 tree rtype = copy_type (type);
a1ab4c31
AC
4961 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4962 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4963 expr = convert (rtype, expr);
4964 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31 4965 }
afcea859
EB
4966 else
4967 expr = convert (type, expr);
a1ab4c31
AC
4968 }
4969
afcea859 4970 /* If we are converting to an integral type whose precision is not equal
ee45a32d
EB
4971 to its size, first unchecked convert to a record type that contains a
4972 field of the given precision. Then extract the result from the field.
4973
4974 There is a subtlety if the source type is an aggregate type with reverse
4975 storage order because its representation is not contiguous in the native
4976 storage order, i.e. a direct unchecked conversion to an integral type
4977 with N bits of precision cannot read the first N bits of the aggregate
4978 type. To overcome it, we do an unchecked conversion to an integral type
4979 with reverse storage order and return the resulting value. This also
4980 ensures that the result of the unchecked conversion doesn't depend on
4981 the endianness of the target machine, but only on the storage order of
4982 the aggregate type.
4983
4984 Finally, for the sake of consistency, we do the unchecked conversion
4985 to an integral type with reverse storage order as soon as the source
4986 type is an aggregate type with reverse storage order, even if there
4987 are no considerations of precision or size involved. */
980a0501
EB
4988 else if (INTEGRAL_TYPE_P (type)
4989 && TYPE_RM_SIZE (type)
9a1bdc31
EB
4990 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
4991 TYPE_SIZE (type)) < 0
ee45a32d
EB
4992 || (AGGREGATE_TYPE_P (etype)
4993 && TYPE_REVERSE_STORAGE_ORDER (etype))))
a1ab4c31
AC
4994 {
4995 tree rec_type = make_node (RECORD_TYPE);
416de7d5
EB
4996 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4997 tree field_type, field;
4998
ee45a32d
EB
4999 if (AGGREGATE_TYPE_P (etype))
5000 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5001 = TYPE_REVERSE_STORAGE_ORDER (etype);
5002
416de7d5
EB
5003 if (TYPE_UNSIGNED (type))
5004 field_type = make_unsigned_type (prec);
5005 else
5006 field_type = make_signed_type (prec);
5007 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5008
5009 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 5010 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 5011
44e9e3ec 5012 finish_record_type (rec_type, field, 1, false);
a1ab4c31
AC
5013
5014 expr = unchecked_convert (rec_type, expr, notrunc_p);
64235766 5015 expr = build_component_ref (expr, field, false);
416de7d5 5016 expr = fold_build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
5017 }
5018
416de7d5
EB
5019 /* Similarly if we are converting from an integral type whose precision is
5020 not equal to its size, first copy into a field of the given precision
ee45a32d
EB
5021 and unchecked convert the record type.
5022
5023 The same considerations as above apply if the target type is an aggregate
5024 type with reverse storage order and we also proceed similarly. */
980a0501
EB
5025 else if (INTEGRAL_TYPE_P (etype)
5026 && TYPE_RM_SIZE (etype)
9a1bdc31
EB
5027 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5028 TYPE_SIZE (etype)) < 0
ee45a32d
EB
5029 || (AGGREGATE_TYPE_P (type)
5030 && TYPE_REVERSE_STORAGE_ORDER (type))))
a1ab4c31
AC
5031 {
5032 tree rec_type = make_node (RECORD_TYPE);
416de7d5 5033 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
9771b263
DN
5034 vec<constructor_elt, va_gc> *v;
5035 vec_alloc (v, 1);
416de7d5
EB
5036 tree field_type, field;
5037
ee45a32d
EB
5038 if (AGGREGATE_TYPE_P (type))
5039 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5040 = TYPE_REVERSE_STORAGE_ORDER (type);
5041
416de7d5
EB
5042 if (TYPE_UNSIGNED (etype))
5043 field_type = make_unsigned_type (prec);
5044 else
5045 field_type = make_signed_type (prec);
5046 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5047
5048 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
44e9e3ec 5049 NULL_TREE, bitsize_zero_node, 1, 0);
a1ab4c31 5050
44e9e3ec 5051 finish_record_type (rec_type, field, 1, false);
a1ab4c31 5052
416de7d5 5053 expr = fold_build1 (NOP_EXPR, field_type, expr);
0e228dd9
NF
5054 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5055 expr = gnat_build_constructor (rec_type, v);
a1ab4c31
AC
5056 expr = unchecked_convert (type, expr, notrunc_p);
5057 }
5058
980a0501
EB
5059 /* If we are converting from a scalar type to a type with a different size,
5060 we need to pad to have the same size on both sides.
5061
5062 ??? We cannot do it unconditionally because unchecked conversions are
5063 used liberally by the front-end to implement polymorphism, e.g. in:
5064
5065 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5066 return p___size__4 (p__object!(S191s.all));
5067
5068 so we skip all expressions that are references. */
5069 else if (!REFERENCE_CLASS_P (expr)
5070 && !AGGREGATE_TYPE_P (etype)
5071 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5072 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5073 {
5074 if (c < 0)
5075 {
5076 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5077 false, false, false, true),
5078 expr);
5079 expr = unchecked_convert (type, expr, notrunc_p);
5080 }
5081 else
5082 {
5083 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5084 false, false, false, true);
5085 expr = unchecked_convert (rec_type, expr, notrunc_p);
64235766 5086 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
980a0501
EB
5087 }
5088 }
5089
7948ae37
OH
5090 /* We have a special case when we are converting between two unconstrained
5091 array types. In that case, take the address, convert the fat pointer
5092 types, and dereference. */
c34f3839 5093 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
a1ab4c31
AC
5094 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5095 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5096 build_unary_op (ADDR_EXPR, NULL_TREE,
5097 expr)));
7948ae37
OH
5098
5099 /* Another special case is when we are converting to a vector type from its
5100 representative array type; this a regular conversion. */
c34f3839
EB
5101 else if (code == VECTOR_TYPE
5102 && ecode == ARRAY_TYPE
7948ae37
OH
5103 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5104 etype))
5105 expr = convert (type, expr);
5106
e63b36bd
EB
5107 /* And, if the array type is not the representative, we try to build an
5108 intermediate vector type of which the array type is the representative
5109 and to do the unchecked conversion between the vector types, in order
5110 to enable further simplifications in the middle-end. */
5111 else if (code == VECTOR_TYPE
5112 && ecode == ARRAY_TYPE
5113 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5114 {
5115 expr = convert (tem, expr);
5116 return unchecked_convert (type, expr, notrunc_p);
5117 }
5118
44e9e3ec
EB
5119 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5120 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5121 else if (TREE_CODE (expr) == CONSTRUCTOR
5122 && code == RECORD_TYPE
5123 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5124 {
5125 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5126 Empty, false, false, false, true),
5127 expr);
5128 return unchecked_convert (type, expr, notrunc_p);
5129 }
5130
5131 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
a1ab4c31
AC
5132 else
5133 {
5134 expr = maybe_unconstrained_array (expr);
5135 etype = TREE_TYPE (expr);
c34f3839 5136 ecode = TREE_CODE (etype);
afcea859
EB
5137 if (can_fold_for_view_convert_p (expr))
5138 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5139 else
5140 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
5141 }
5142
afcea859
EB
5143 /* If the result is an integral type whose precision is not equal to its
5144 size, sign- or zero-extend the result. We need not do this if the input
5145 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
5146 is a biased type or if both the input and output are unsigned. */
5147 if (!notrunc_p
9a1bdc31
EB
5148 && INTEGRAL_TYPE_P (type)
5149 && TYPE_RM_SIZE (type)
5150 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
a1ab4c31
AC
5151 && !(INTEGRAL_TYPE_P (etype)
5152 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
9a1bdc31
EB
5153 && tree_int_cst_compare (TYPE_RM_SIZE (type),
5154 TYPE_RM_SIZE (etype)
5155 ? TYPE_RM_SIZE (etype)
5156 : TYPE_SIZE (etype)) == 0)
5157 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
a1ab4c31
AC
5158 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5159 {
c34f3839 5160 tree base_type
9a1bdc31
EB
5161 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5162 TYPE_UNSIGNED (type));
a1ab4c31
AC
5163 tree shift_expr
5164 = convert (base_type,
5165 size_binop (MINUS_EXPR,
9a1bdc31 5166 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
a1ab4c31
AC
5167 expr
5168 = convert (type,
5169 build_binary_op (RSHIFT_EXPR, base_type,
5170 build_binary_op (LSHIFT_EXPR, base_type,
5171 convert (base_type, expr),
5172 shift_expr),
5173 shift_expr));
5174 }
5175
5176 /* An unchecked conversion should never raise Constraint_Error. The code
5177 below assumes that GCC's conversion routines overflow the same way that
5178 the underlying hardware does. This is probably true. In the rare case
5179 when it is false, we can rely on the fact that such conversions are
5180 erroneous anyway. */
5181 if (TREE_CODE (expr) == INTEGER_CST)
5182 TREE_OVERFLOW (expr) = 0;
5183
5184 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5185 show no longer constant. */
5186 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5187 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5188 OEP_ONLY_CONST))
5189 TREE_CONSTANT (expr) = 0;
5190
5191 return expr;
5192}
5193\f
feec4372 5194/* Return the appropriate GCC tree code for the specified GNAT_TYPE,
a1ab4c31
AC
5195 the latter being a record type as predicated by Is_Record_Type. */
5196
5197enum tree_code
5198tree_code_for_record_type (Entity_Id gnat_type)
5199{
b1a785fb 5200 Node_Id component_list, component;
a1ab4c31 5201
b1a785fb
EB
5202 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5203 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
a1ab4c31
AC
5204 if (!Is_Unchecked_Union (gnat_type))
5205 return RECORD_TYPE;
5206
b1a785fb
EB
5207 gnat_type = Implementation_Base_Type (gnat_type);
5208 component_list
5209 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5210
a1ab4c31
AC
5211 for (component = First_Non_Pragma (Component_Items (component_list));
5212 Present (component);
5213 component = Next_Non_Pragma (component))
5214 if (Ekind (Defining_Entity (component)) == E_Component)
5215 return RECORD_TYPE;
5216
5217 return UNION_TYPE;
5218}
5219
caa9d12a
EB
5220/* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5221 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5222 according to the presence of an alignment clause on the type or, if it
5223 is an array, on the component type. */
5224
5225bool
5226is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5227{
5228 gnat_type = Underlying_Type (gnat_type);
5229
5230 *align_clause = Present (Alignment_Clause (gnat_type));
5231
5232 if (Is_Array_Type (gnat_type))
5233 {
5234 gnat_type = Underlying_Type (Component_Type (gnat_type));
5235 if (Present (Alignment_Clause (gnat_type)))
5236 *align_clause = true;
5237 }
5238
5239 if (!Is_Floating_Point_Type (gnat_type))
5240 return false;
5241
5242 if (UI_To_Int (Esize (gnat_type)) != 64)
5243 return false;
5244
5245 return true;
5246}
5247
5248/* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5249 size is greater or equal to 64 bits, or an array of such a type. Set
5250 ALIGN_CLAUSE according to the presence of an alignment clause on the
5251 type or, if it is an array, on the component type. */
5252
5253bool
5254is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5255{
5256 gnat_type = Underlying_Type (gnat_type);
5257
5258 *align_clause = Present (Alignment_Clause (gnat_type));
5259
5260 if (Is_Array_Type (gnat_type))
5261 {
5262 gnat_type = Underlying_Type (Component_Type (gnat_type));
5263 if (Present (Alignment_Clause (gnat_type)))
5264 *align_clause = true;
5265 }
5266
5267 if (!Is_Scalar_Type (gnat_type))
5268 return false;
5269
5270 if (UI_To_Int (Esize (gnat_type)) < 64)
5271 return false;
5272
5273 return true;
5274}
5275
a1ab4c31
AC
5276/* Return true if GNU_TYPE is suitable as the type of a non-aliased
5277 component of an aggregate type. */
5278
5279bool
5280type_for_nonaliased_component_p (tree gnu_type)
5281{
5282 /* If the type is passed by reference, we may have pointers to the
5283 component so it cannot be made non-aliased. */
5284 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5285 return false;
5286
5287 /* We used to say that any component of aggregate type is aliased
5288 because the front-end may take 'Reference of it. The front-end
5289 has been enhanced in the meantime so as to use a renaming instead
5290 in most cases, but the back-end can probably take the address of
5291 such a component too so we go for the conservative stance.
5292
5293 For instance, we might need the address of any array type, even
5294 if normally passed by copy, to construct a fat pointer if the
5295 component is used as an actual for an unconstrained formal.
5296
5297 Likewise for record types: even if a specific record subtype is
5298 passed by copy, the parent type might be passed by ref (e.g. if
5299 it's of variable size) and we might take the address of a child
5300 component to pass to a parent formal. We have no way to check
5301 for such conditions here. */
5302 if (AGGREGATE_TYPE_P (gnu_type))
5303 return false;
5304
5305 return true;
5306}
5307
bb1f7929
EB
5308/* Return true if TYPE is a smaller form of ORIG_TYPE. */
5309
5310bool
5311smaller_form_type_p (tree type, tree orig_type)
5312{
5313 tree size, osize;
5314
5315 /* We're not interested in variants here. */
5316 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5317 return false;
5318
5319 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5320 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5321 return false;
5322
5323 size = TYPE_SIZE (type);
5324 osize = TYPE_SIZE (orig_type);
5325
5326 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5327 return false;
5328
5329 return tree_int_cst_lt (size, osize) != 0;
5330}
5331
a22b794d 5332/* Perform final processing on global declarations. */
d7438551 5333
0f50b3db
EB
5334static GTY (()) tree dummy_global;
5335
a1ab4c31 5336void
a22b794d 5337gnat_write_global_declarations (void)
a1ab4c31 5338{
10e4d056
EB
5339 unsigned int i;
5340 tree iter;
5341
65444786 5342 /* If we have declared types as used at the global level, insert them in
755c71fa
EB
5343 the global hash table. We use a dummy variable for this purpose, but
5344 we need to build it unconditionally to avoid -fcompare-debug issues. */
5345 if (first_global_object_name)
65444786 5346 {
35e8bcf4 5347 struct varpool_node *node;
d3c268ab
EB
5348 char *label;
5349
5350 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
65444786 5351 dummy_global
d3c268ab
EB
5352 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5353 void_type_node);
dd25fe0a 5354 DECL_HARD_REGISTER (dummy_global) = 1;
65444786 5355 TREE_STATIC (dummy_global) = 1;
037e5573 5356 node = varpool_node::get_create (dummy_global);
dd25fe0a 5357 node->definition = 1;
67348ccc 5358 node->force_output = 1;
65444786 5359
755c71fa
EB
5360 if (types_used_by_cur_var_decl)
5361 while (!types_used_by_cur_var_decl->is_empty ())
5362 {
5363 tree t = types_used_by_cur_var_decl->pop ();
5364 types_used_by_var_decl_insert (t, dummy_global);
5365 }
65444786
EB
5366 }
5367
a22b794d
EB
5368 /* Output debug information for all global type declarations first. This
5369 ensures that global types whose compilation hasn't been finalized yet,
5370 for example pointers to Taft amendment types, have their compilation
5371 finalized in the right context. */
5372 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5373 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
1f0e2688 5374 debug_hooks->type_decl (iter, false);
a22b794d
EB
5375
5376 /* Then output the global variables. We need to do that after the debug
8afaddaa 5377 information for global types is emitted so that they are finalized. */
a22b794d
EB
5378 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5379 if (TREE_CODE (iter) == VAR_DECL)
5380 rest_of_decl_compilation (iter, true, 0);
caadda8e
PMR
5381
5382 /* Output the imported modules/declarations. In GNAT, these are only
5383 materializing subprogram. */
5384 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5385 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5386 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5387 DECL_CONTEXT (iter), 0);
a1ab4c31
AC
5388}
5389
5390/* ************************************************************************
5391 * * GCC builtins support *
5392 * ************************************************************************ */
5393
5394/* The general scheme is fairly simple:
5395
5396 For each builtin function/type to be declared, gnat_install_builtins calls
aef308d0 5397 internal facilities which eventually get to gnat_pushdecl, which in turn
a1ab4c31
AC
5398 tracks the so declared builtin function decls in the 'builtin_decls' global
5399 datastructure. When an Intrinsic subprogram declaration is processed, we
5400 search this global datastructure to retrieve the associated BUILT_IN DECL
5401 node. */
5402
5403/* Search the chain of currently available builtin declarations for a node
5404 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5405 found, if any, or NULL_TREE otherwise. */
5406tree
5407builtin_decl_for (tree name)
5408{
5409 unsigned i;
5410 tree decl;
5411
9771b263 5412 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
a1ab4c31
AC
5413 if (DECL_NAME (decl) == name)
5414 return decl;
5415
5416 return NULL_TREE;
5417}
5418
5419/* The code below eventually exposes gnat_install_builtins, which declares
5420 the builtin types and functions we might need, either internally or as
5421 user accessible facilities.
5422
5423 ??? This is a first implementation shot, still in rough shape. It is
5424 heavily inspired from the "C" family implementation, with chunks copied
5425 verbatim from there.
5426
ba464315 5427 Two obvious improvement candidates are:
a1ab4c31
AC
5428 o Use a more efficient name/decl mapping scheme
5429 o Devise a middle-end infrastructure to avoid having to copy
5430 pieces between front-ends. */
5431
5432/* ----------------------------------------------------------------------- *
5433 * BUILTIN ELEMENTARY TYPES *
5434 * ----------------------------------------------------------------------- */
5435
5436/* Standard data types to be used in builtin argument declarations. */
5437
5438enum c_tree_index
5439{
5440 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5441 CTI_STRING_TYPE,
5442 CTI_CONST_STRING_TYPE,
5443
5444 CTI_MAX
5445};
5446
5447static tree c_global_trees[CTI_MAX];
5448
5449#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5450#define string_type_node c_global_trees[CTI_STRING_TYPE]
5451#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5452
5453/* ??? In addition some attribute handlers, we currently don't support a
5454 (small) number of builtin-types, which in turns inhibits support for a
5455 number of builtin functions. */
5456#define wint_type_node void_type_node
5457#define intmax_type_node void_type_node
5458#define uintmax_type_node void_type_node
5459
5460/* Build the void_list_node (void_type_node having been created). */
5461
5462static tree
5463build_void_list_node (void)
5464{
5465 tree t = build_tree_list (NULL_TREE, void_type_node);
5466 return t;
5467}
5468
5469/* Used to help initialize the builtin-types.def table. When a type of
5470 the correct size doesn't exist, use error_mark_node instead of NULL.
5471 The later results in segfaults even when a decl using the type doesn't
5472 get invoked. */
5473
5474static tree
5475builtin_type_for_size (int size, bool unsignedp)
5476{
ced57283 5477 tree type = gnat_type_for_size (size, unsignedp);
a1ab4c31
AC
5478 return type ? type : error_mark_node;
5479}
5480
5481/* Build/push the elementary type decls that builtin functions/types
5482 will need. */
5483
5484static void
5485install_builtin_elementary_types (void)
5486{
9a1bdc31 5487 signed_size_type_node = gnat_signed_type_for (size_type_node);
a1ab4c31
AC
5488 pid_type_node = integer_type_node;
5489 void_list_node = build_void_list_node ();
5490
5491 string_type_node = build_pointer_type (char_type_node);
5492 const_string_type_node
5493 = build_pointer_type (build_qualified_type
5494 (char_type_node, TYPE_QUAL_CONST));
5495}
5496
5497/* ----------------------------------------------------------------------- *
5498 * BUILTIN FUNCTION TYPES *
5499 * ----------------------------------------------------------------------- */
5500
5501/* Now, builtin function types per se. */
5502
5503enum c_builtin_type
5504{
5505#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5506#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5507#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5508#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5509#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5510#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5511#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
f6a7cffc
TS
5512#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5513 ARG6) NAME,
5514#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5515 ARG6, ARG7) NAME,
5516#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5517 ARG6, ARG7, ARG8) NAME,
d9a6bd32
JJ
5518#define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5519 ARG6, ARG7, ARG8, ARG9) NAME,
5520#define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5521 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5522#define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5523 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
a1ab4c31
AC
5524#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5525#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5526#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5527#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5528#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
f6a7cffc 5529#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
56a9f6bc 5530 NAME,
3e32ee19
NS
5531#define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5532 ARG6) NAME,
56a9f6bc
TS
5533#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5534 ARG6, ARG7) NAME,
a1ab4c31
AC
5535#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5536#include "builtin-types.def"
5537#undef DEF_PRIMITIVE_TYPE
5538#undef DEF_FUNCTION_TYPE_0
5539#undef DEF_FUNCTION_TYPE_1
5540#undef DEF_FUNCTION_TYPE_2
5541#undef DEF_FUNCTION_TYPE_3
5542#undef DEF_FUNCTION_TYPE_4
5543#undef DEF_FUNCTION_TYPE_5
5544#undef DEF_FUNCTION_TYPE_6
5545#undef DEF_FUNCTION_TYPE_7
acf0174b 5546#undef DEF_FUNCTION_TYPE_8
d9a6bd32
JJ
5547#undef DEF_FUNCTION_TYPE_9
5548#undef DEF_FUNCTION_TYPE_10
5549#undef DEF_FUNCTION_TYPE_11
a1ab4c31
AC
5550#undef DEF_FUNCTION_TYPE_VAR_0
5551#undef DEF_FUNCTION_TYPE_VAR_1
5552#undef DEF_FUNCTION_TYPE_VAR_2
5553#undef DEF_FUNCTION_TYPE_VAR_3
5554#undef DEF_FUNCTION_TYPE_VAR_4
5555#undef DEF_FUNCTION_TYPE_VAR_5
3e32ee19 5556#undef DEF_FUNCTION_TYPE_VAR_6
56a9f6bc 5557#undef DEF_FUNCTION_TYPE_VAR_7
a1ab4c31
AC
5558#undef DEF_POINTER_TYPE
5559 BT_LAST
5560};
5561
5562typedef enum c_builtin_type builtin_type;
5563
5564/* A temporary array used in communication with def_fn_type. */
5565static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5566
5567/* A helper function for install_builtin_types. Build function type
5568 for DEF with return type RET and N arguments. If VAR is true, then the
5569 function should be variadic after those N arguments.
5570
5571 Takes special care not to ICE if any of the types involved are
5572 error_mark_node, which indicates that said type is not in fact available
5573 (see builtin_type_for_size). In which case the function type as a whole
5574 should be error_mark_node. */
5575
5576static void
5577def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5578{
e5b00edf
NF
5579 tree t;
5580 tree *args = XALLOCAVEC (tree, n);
a1ab4c31
AC
5581 va_list list;
5582 int i;
5583
5584 va_start (list, n);
5585 for (i = 0; i < n; ++i)
5586 {
c6bd4220 5587 builtin_type a = (builtin_type) va_arg (list, int);
a1ab4c31
AC
5588 t = builtin_types[a];
5589 if (t == error_mark_node)
5590 goto egress;
e5b00edf 5591 args[i] = t;
a1ab4c31 5592 }
a1ab4c31 5593
a1ab4c31
AC
5594 t = builtin_types[ret];
5595 if (t == error_mark_node)
5596 goto egress;
e5b00edf
NF
5597 if (var)
5598 t = build_varargs_function_type_array (t, n, args);
5599 else
5600 t = build_function_type_array (t, n, args);
a1ab4c31
AC
5601
5602 egress:
5603 builtin_types[def] = t;
0edf1bb2 5604 va_end (list);
a1ab4c31
AC
5605}
5606
5607/* Build the builtin function types and install them in the builtin_types
5608 array for later use in builtin function decls. */
5609
5610static void
5611install_builtin_function_types (void)
5612{
5613 tree va_list_ref_type_node;
5614 tree va_list_arg_type_node;
5615
5616 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5617 {
5618 va_list_arg_type_node = va_list_ref_type_node =
5619 build_pointer_type (TREE_TYPE (va_list_type_node));
5620 }
5621 else
5622 {
5623 va_list_arg_type_node = va_list_type_node;
5624 va_list_ref_type_node = build_reference_type (va_list_type_node);
5625 }
5626
5627#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5628 builtin_types[ENUM] = VALUE;
5629#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5630 def_fn_type (ENUM, RETURN, 0, 0);
5631#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5632 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5633#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5634 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5635#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5636 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5637#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5638 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5639#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5640 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5641#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5642 ARG6) \
5643 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5644#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5645 ARG6, ARG7) \
5646 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
acf0174b
JJ
5647#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5648 ARG6, ARG7, ARG8) \
5649 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5650 ARG7, ARG8);
d9a6bd32
JJ
5651#define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5652 ARG6, ARG7, ARG8, ARG9) \
5653 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5654 ARG7, ARG8, ARG9);
5655#define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5656 ARG6, ARG7, ARG8, ARG9, ARG10) \
5657 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5658 ARG7, ARG8, ARG9, ARG10);
5659#define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5660 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5661 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5662 ARG7, ARG8, ARG9, ARG10, ARG11);
a1ab4c31
AC
5663#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5664 def_fn_type (ENUM, RETURN, 1, 0);
5665#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5666 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5667#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5668 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5669#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5670 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5671#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5672 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5673#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5674 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
3e32ee19
NS
5675#define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5676 ARG6) \
5677 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
56a9f6bc
TS
5678#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5679 ARG6, ARG7) \
5680 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
a1ab4c31
AC
5681#define DEF_POINTER_TYPE(ENUM, TYPE) \
5682 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5683
5684#include "builtin-types.def"
5685
5686#undef DEF_PRIMITIVE_TYPE
f6a7cffc 5687#undef DEF_FUNCTION_TYPE_0
a1ab4c31
AC
5688#undef DEF_FUNCTION_TYPE_1
5689#undef DEF_FUNCTION_TYPE_2
5690#undef DEF_FUNCTION_TYPE_3
5691#undef DEF_FUNCTION_TYPE_4
5692#undef DEF_FUNCTION_TYPE_5
5693#undef DEF_FUNCTION_TYPE_6
f6a7cffc
TS
5694#undef DEF_FUNCTION_TYPE_7
5695#undef DEF_FUNCTION_TYPE_8
d9a6bd32
JJ
5696#undef DEF_FUNCTION_TYPE_9
5697#undef DEF_FUNCTION_TYPE_10
5698#undef DEF_FUNCTION_TYPE_11
a1ab4c31
AC
5699#undef DEF_FUNCTION_TYPE_VAR_0
5700#undef DEF_FUNCTION_TYPE_VAR_1
5701#undef DEF_FUNCTION_TYPE_VAR_2
5702#undef DEF_FUNCTION_TYPE_VAR_3
5703#undef DEF_FUNCTION_TYPE_VAR_4
5704#undef DEF_FUNCTION_TYPE_VAR_5
3e32ee19 5705#undef DEF_FUNCTION_TYPE_VAR_6
56a9f6bc 5706#undef DEF_FUNCTION_TYPE_VAR_7
a1ab4c31
AC
5707#undef DEF_POINTER_TYPE
5708 builtin_types[(int) BT_LAST] = NULL_TREE;
5709}
5710
5711/* ----------------------------------------------------------------------- *
5712 * BUILTIN ATTRIBUTES *
5713 * ----------------------------------------------------------------------- */
5714
5715enum built_in_attribute
5716{
5717#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5718#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
e384e6b5 5719#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
a1ab4c31
AC
5720#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5721#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5722#include "builtin-attrs.def"
5723#undef DEF_ATTR_NULL_TREE
5724#undef DEF_ATTR_INT
e384e6b5 5725#undef DEF_ATTR_STRING
a1ab4c31
AC
5726#undef DEF_ATTR_IDENT
5727#undef DEF_ATTR_TREE_LIST
5728 ATTR_LAST
5729};
5730
5731static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5732
5733static void
5734install_builtin_attributes (void)
5735{
5736 /* Fill in the built_in_attributes array. */
5737#define DEF_ATTR_NULL_TREE(ENUM) \
5738 built_in_attributes[(int) ENUM] = NULL_TREE;
5739#define DEF_ATTR_INT(ENUM, VALUE) \
5740 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
e384e6b5
BS
5741#define DEF_ATTR_STRING(ENUM, VALUE) \
5742 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
a1ab4c31
AC
5743#define DEF_ATTR_IDENT(ENUM, STRING) \
5744 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5745#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5746 built_in_attributes[(int) ENUM] \
5747 = tree_cons (built_in_attributes[(int) PURPOSE], \
5748 built_in_attributes[(int) VALUE], \
5749 built_in_attributes[(int) CHAIN]);
5750#include "builtin-attrs.def"
5751#undef DEF_ATTR_NULL_TREE
5752#undef DEF_ATTR_INT
e384e6b5 5753#undef DEF_ATTR_STRING
a1ab4c31
AC
5754#undef DEF_ATTR_IDENT
5755#undef DEF_ATTR_TREE_LIST
5756}
5757
5758/* Handle a "const" attribute; arguments as in
5759 struct attribute_spec.handler. */
5760
5761static tree
5762handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5763 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5764 bool *no_add_attrs)
5765{
5766 if (TREE_CODE (*node) == FUNCTION_DECL)
5767 TREE_READONLY (*node) = 1;
5768 else
5769 *no_add_attrs = true;
5770
5771 return NULL_TREE;
5772}
5773
5774/* Handle a "nothrow" attribute; arguments as in
5775 struct attribute_spec.handler. */
5776
5777static tree
5778handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5779 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5780 bool *no_add_attrs)
5781{
5782 if (TREE_CODE (*node) == FUNCTION_DECL)
5783 TREE_NOTHROW (*node) = 1;
5784 else
5785 *no_add_attrs = true;
5786
5787 return NULL_TREE;
5788}
5789
5790/* Handle a "pure" attribute; arguments as in
5791 struct attribute_spec.handler. */
5792
5793static tree
5794handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5795 int ARG_UNUSED (flags), bool *no_add_attrs)
5796{
5797 if (TREE_CODE (*node) == FUNCTION_DECL)
5798 DECL_PURE_P (*node) = 1;
ba464315 5799 /* TODO: support types. */
a1ab4c31
AC
5800 else
5801 {
7948ae37
OH
5802 warning (OPT_Wattributes, "%qs attribute ignored",
5803 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5804 *no_add_attrs = true;
5805 }
5806
5807 return NULL_TREE;
5808}
5809
5810/* Handle a "no vops" attribute; arguments as in
5811 struct attribute_spec.handler. */
5812
5813static tree
5814handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5815 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5816 bool *ARG_UNUSED (no_add_attrs))
5817{
5818 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5819 DECL_IS_NOVOPS (*node) = 1;
5820 return NULL_TREE;
5821}
5822
5823/* Helper for nonnull attribute handling; fetch the operand number
5824 from the attribute argument list. */
5825
5826static bool
5827get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5828{
5829 /* Verify the arg number is a constant. */
807e902e 5830 if (!tree_fits_uhwi_p (arg_num_expr))
a1ab4c31
AC
5831 return false;
5832
5833 *valp = TREE_INT_CST_LOW (arg_num_expr);
5834 return true;
5835}
5836
5837/* Handle the "nonnull" attribute. */
5838static tree
5839handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5840 tree args, int ARG_UNUSED (flags),
5841 bool *no_add_attrs)
5842{
5843 tree type = *node;
5844 unsigned HOST_WIDE_INT attr_arg_num;
5845
5846 /* If no arguments are specified, all pointer arguments should be
5847 non-null. Verify a full prototype is given so that the arguments
5848 will have the correct types when we actually check them later. */
5849 if (!args)
5850 {
f4da8dce 5851 if (!prototype_p (type))
a1ab4c31
AC
5852 {
5853 error ("nonnull attribute without arguments on a non-prototype");
5854 *no_add_attrs = true;
5855 }
5856 return NULL_TREE;
5857 }
5858
5859 /* Argument list specified. Verify that each argument number references
5860 a pointer argument. */
5861 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5862 {
a1ab4c31
AC
5863 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5864
5865 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5866 {
5867 error ("nonnull argument has invalid operand number (argument %lu)",
5868 (unsigned long) attr_arg_num);
5869 *no_add_attrs = true;
5870 return NULL_TREE;
5871 }
5872
d7d058c5 5873 if (prototype_p (type))
a1ab4c31 5874 {
d7d058c5
NF
5875 function_args_iterator iter;
5876 tree argument;
5877
5878 function_args_iter_init (&iter, type);
5879 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
a1ab4c31 5880 {
d7d058c5 5881 argument = function_args_iter_cond (&iter);
a1ab4c31
AC
5882 if (!argument || ck_num == arg_num)
5883 break;
a1ab4c31
AC
5884 }
5885
5886 if (!argument
d7d058c5 5887 || TREE_CODE (argument) == VOID_TYPE)
a1ab4c31 5888 {
58c8f770
EB
5889 error ("nonnull argument with out-of-range operand number "
5890 "(argument %lu, operand %lu)",
a1ab4c31
AC
5891 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5892 *no_add_attrs = true;
5893 return NULL_TREE;
5894 }
5895
d7d058c5 5896 if (TREE_CODE (argument) != POINTER_TYPE)
a1ab4c31 5897 {
58c8f770
EB
5898 error ("nonnull argument references non-pointer operand "
5899 "(argument %lu, operand %lu)",
a1ab4c31
AC
5900 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5901 *no_add_attrs = true;
5902 return NULL_TREE;
5903 }
5904 }
5905 }
5906
5907 return NULL_TREE;
5908}
5909
5910/* Handle a "sentinel" attribute. */
5911
5912static tree
5913handle_sentinel_attribute (tree *node, tree name, tree args,
5914 int ARG_UNUSED (flags), bool *no_add_attrs)
5915{
f4da8dce 5916 if (!prototype_p (*node))
a1ab4c31
AC
5917 {
5918 warning (OPT_Wattributes,
7948ae37
OH
5919 "%qs attribute requires prototypes with named arguments",
5920 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5921 *no_add_attrs = true;
5922 }
5923 else
5924 {
dcf0c47e 5925 if (!stdarg_p (*node))
a1ab4c31
AC
5926 {
5927 warning (OPT_Wattributes,
7948ae37
OH
5928 "%qs attribute only applies to variadic functions",
5929 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5930 *no_add_attrs = true;
5931 }
5932 }
5933
5934 if (args)
5935 {
5936 tree position = TREE_VALUE (args);
5937
5938 if (TREE_CODE (position) != INTEGER_CST)
5939 {
5940 warning (0, "requested position is not an integer constant");
5941 *no_add_attrs = true;
5942 }
5943 else
5944 {
5945 if (tree_int_cst_lt (position, integer_zero_node))
5946 {
5947 warning (0, "requested position is less than zero");
5948 *no_add_attrs = true;
5949 }
5950 }
5951 }
5952
5953 return NULL_TREE;
5954}
5955
5956/* Handle a "noreturn" attribute; arguments as in
5957 struct attribute_spec.handler. */
5958
5959static tree
5960handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5961 int ARG_UNUSED (flags), bool *no_add_attrs)
5962{
5963 tree type = TREE_TYPE (*node);
5964
5965 /* See FIXME comment in c_common_attribute_table. */
5966 if (TREE_CODE (*node) == FUNCTION_DECL)
5967 TREE_THIS_VOLATILE (*node) = 1;
5968 else if (TREE_CODE (type) == POINTER_TYPE
5969 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5970 TREE_TYPE (*node)
5971 = build_pointer_type
5972 (build_type_variant (TREE_TYPE (type),
5973 TYPE_READONLY (TREE_TYPE (type)), 1));
5974 else
5975 {
7948ae37
OH
5976 warning (OPT_Wattributes, "%qs attribute ignored",
5977 IDENTIFIER_POINTER (name));
a1ab4c31
AC
5978 *no_add_attrs = true;
5979 }
5980
5981 return NULL_TREE;
5982}
5983
0d6e14fd
JH
5984/* Handle a "leaf" attribute; arguments as in
5985 struct attribute_spec.handler. */
5986
5987static tree
f087ea44 5988handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
0d6e14fd
JH
5989 int ARG_UNUSED (flags), bool *no_add_attrs)
5990{
5991 if (TREE_CODE (*node) != FUNCTION_DECL)
5992 {
5993 warning (OPT_Wattributes, "%qE attribute ignored", name);
5994 *no_add_attrs = true;
5995 }
5996 if (!TREE_PUBLIC (*node))
5997 {
32a5388a 5998 warning (OPT_Wattributes, "%qE attribute has no effect", name);
0d6e14fd 5999 *no_add_attrs = true;
f087ea44
AC
6000 }
6001
6002 return NULL_TREE;
6003}
6004
6005/* Handle a "always_inline" attribute; arguments as in
6006 struct attribute_spec.handler. */
6007
6008static tree
6009handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6010 int ARG_UNUSED (flags), bool *no_add_attrs)
6011{
6012 if (TREE_CODE (*node) == FUNCTION_DECL)
6013 {
6014 /* Set the attribute and mark it for disregarding inline limits. */
6015 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6016 }
6017 else
6018 {
6019 warning (OPT_Wattributes, "%qE attribute ignored", name);
6020 *no_add_attrs = true;
0d6e14fd
JH
6021 }
6022
6023 return NULL_TREE;
6024}
6025
a1ab4c31
AC
6026/* Handle a "malloc" attribute; arguments as in
6027 struct attribute_spec.handler. */
6028
6029static tree
6030handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6031 int ARG_UNUSED (flags), bool *no_add_attrs)
6032{
6033 if (TREE_CODE (*node) == FUNCTION_DECL
6034 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6035 DECL_IS_MALLOC (*node) = 1;
6036 else
6037 {
7948ae37
OH
6038 warning (OPT_Wattributes, "%qs attribute ignored",
6039 IDENTIFIER_POINTER (name));
a1ab4c31
AC
6040 *no_add_attrs = true;
6041 }
6042
6043 return NULL_TREE;
6044}
6045
6046/* Fake handler for attributes we don't properly support. */
6047
6048tree
6049fake_attribute_handler (tree * ARG_UNUSED (node),
6050 tree ARG_UNUSED (name),
6051 tree ARG_UNUSED (args),
6052 int ARG_UNUSED (flags),
6053 bool * ARG_UNUSED (no_add_attrs))
6054{
6055 return NULL_TREE;
6056}
6057
6058/* Handle a "type_generic" attribute. */
6059
6060static tree
6061handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6062 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6063 bool * ARG_UNUSED (no_add_attrs))
6064{
a1ab4c31
AC
6065 /* Ensure we have a function type. */
6066 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 6067
a1ab4c31 6068 /* Ensure we have a variadic function. */
dcf0c47e 6069 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
a1ab4c31
AC
6070
6071 return NULL_TREE;
6072}
6073
2724e58f
OH
6074/* Handle a "vector_size" attribute; arguments as in
6075 struct attribute_spec.handler. */
6076
6077static tree
6078handle_vector_size_attribute (tree *node, tree name, tree args,
e63b36bd 6079 int ARG_UNUSED (flags), bool *no_add_attrs)
2724e58f 6080{
e63b36bd
EB
6081 tree type = *node;
6082 tree vector_type;
2724e58f
OH
6083
6084 *no_add_attrs = true;
6085
2724e58f
OH
6086 /* We need to provide for vector pointers, vector arrays, and
6087 functions returning vectors. For example:
6088
6089 __attribute__((vector_size(16))) short *foo;
6090
6091 In this case, the mode is SI, but the type being modified is
6092 HI, so we need to look further. */
2724e58f
OH
6093 while (POINTER_TYPE_P (type)
6094 || TREE_CODE (type) == FUNCTION_TYPE
132a5459 6095 || TREE_CODE (type) == ARRAY_TYPE)
2724e58f
OH
6096 type = TREE_TYPE (type);
6097
e63b36bd
EB
6098 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6099 if (!vector_type)
6100 return NULL_TREE;
2724e58f
OH
6101
6102 /* Build back pointers if needed. */
e63b36bd 6103 *node = reconstruct_complex_type (*node, vector_type);
2724e58f
OH
6104
6105 return NULL_TREE;
6106}
6107
7948ae37
OH
6108/* Handle a "vector_type" attribute; arguments as in
6109 struct attribute_spec.handler. */
6110
6111static tree
6112handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
e63b36bd 6113 int ARG_UNUSED (flags), bool *no_add_attrs)
7948ae37 6114{
e63b36bd
EB
6115 tree type = *node;
6116 tree vector_type;
7948ae37
OH
6117
6118 *no_add_attrs = true;
6119
e63b36bd 6120 if (TREE_CODE (type) != ARRAY_TYPE)
7948ae37
OH
6121 {
6122 error ("attribute %qs applies to array types only",
6123 IDENTIFIER_POINTER (name));
6124 return NULL_TREE;
6125 }
6126
e63b36bd
EB
6127 vector_type = build_vector_type_for_array (type, name);
6128 if (!vector_type)
7948ae37
OH
6129 return NULL_TREE;
6130
e63b36bd
EB
6131 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6132 *node = vector_type;
7948ae37
OH
6133
6134 return NULL_TREE;
6135}
6136
a1ab4c31
AC
6137/* ----------------------------------------------------------------------- *
6138 * BUILTIN FUNCTIONS *
6139 * ----------------------------------------------------------------------- */
6140
6141/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6142 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6143 if nonansi_p and flag_no_nonansi_builtin. */
6144
6145static void
6146def_builtin_1 (enum built_in_function fncode,
6147 const char *name,
6148 enum built_in_class fnclass,
6149 tree fntype, tree libtype,
6150 bool both_p, bool fallback_p,
6151 bool nonansi_p ATTRIBUTE_UNUSED,
6152 tree fnattrs, bool implicit_p)
6153{
6154 tree decl;
6155 const char *libname;
6156
6157 /* Preserve an already installed decl. It most likely was setup in advance
6158 (e.g. as part of the internal builtins) for specific reasons. */
7c775aca 6159 if (builtin_decl_explicit (fncode))
a1ab4c31
AC
6160 return;
6161
6162 gcc_assert ((!both_p && !fallback_p)
6163 || !strncmp (name, "__builtin_",
6164 strlen ("__builtin_")));
6165
6166 libname = name + strlen ("__builtin_");
6167 decl = add_builtin_function (name, fntype, fncode, fnclass,
6168 (fallback_p ? libname : NULL),
6169 fnattrs);
6170 if (both_p)
6171 /* ??? This is normally further controlled by command-line options
6172 like -fno-builtin, but we don't have them for Ada. */
6173 add_builtin_function (libname, libtype, fncode, fnclass,
6174 NULL, fnattrs);
6175
e79983f4 6176 set_builtin_decl (fncode, decl, implicit_p);
a1ab4c31
AC
6177}
6178
6179static int flag_isoc94 = 0;
6180static int flag_isoc99 = 0;
22869a37 6181static int flag_isoc11 = 0;
a1ab4c31
AC
6182
6183/* Install what the common builtins.def offers. */
6184
6185static void
6186install_builtin_functions (void)
6187{
6188#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6189 NONANSI_P, ATTRS, IMPLICIT, COND) \
6190 if (NAME && COND) \
6191 def_builtin_1 (ENUM, NAME, CLASS, \
6192 builtin_types[(int) TYPE], \
6193 builtin_types[(int) LIBTYPE], \
6194 BOTH_P, FALLBACK_P, NONANSI_P, \
6195 built_in_attributes[(int) ATTRS], IMPLICIT);
6196#include "builtins.def"
a1ab4c31
AC
6197}
6198
6199/* ----------------------------------------------------------------------- *
6200 * BUILTIN FUNCTIONS *
6201 * ----------------------------------------------------------------------- */
6202
6203/* Install the builtin functions we might need. */
6204
6205void
6206gnat_install_builtins (void)
6207{
6208 install_builtin_elementary_types ();
6209 install_builtin_function_types ();
6210 install_builtin_attributes ();
6211
6212 /* Install builtins used by generic middle-end pieces first. Some of these
6213 know about internal specificities and control attributes accordingly, for
6214 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6215 the generic definition from builtins.def. */
384c400a 6216 build_common_builtin_nodes ();
a1ab4c31
AC
6217
6218 /* Now, install the target specific builtins, such as the AltiVec family on
6219 ppc, and the common set as exposed by builtins.def. */
6220 targetm.init_builtins ();
6221 install_builtin_functions ();
6222}
6223
6224#include "gt-ada-utils.h"
6225#include "gtype-ada.h"
This page took 3.472177 seconds and 5 git commands to generate.