]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
defaults.h (GO_IF_MODE_DEPENDENT_ADDRESS): Provide empty default.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
10069d53 9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26/* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28#define GCC_DIAG_STYLE __gcc_cdiag__
29
30#include "config.h"
31#include "system.h"
32#include "coretypes.h"
33#include "tm.h"
34#include "tree.h"
35#include "flags.h"
36#include "defaults.h"
37#include "toplev.h"
38#include "output.h"
39#include "ggc.h"
40#include "debug.h"
41#include "convert.h"
42#include "target.h"
43#include "function.h"
44#include "cgraph.h"
45#include "tree-inline.h"
46#include "tree-iterator.h"
47#include "gimple.h"
48#include "tree-dump.h"
49#include "pointer-set.h"
50#include "langhooks.h"
62298c61 51#include "rtl.h"
a1ab4c31
AC
52
53#include "ada.h"
54#include "types.h"
55#include "atree.h"
56#include "elists.h"
57#include "namet.h"
58#include "nlists.h"
59#include "stringt.h"
60#include "uintp.h"
61#include "fe.h"
62#include "sinfo.h"
63#include "einfo.h"
64#include "ada-tree.h"
65#include "gigi.h"
66
67#ifndef MAX_FIXED_MODE_SIZE
68#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
69#endif
70
71#ifndef MAX_BITS_PER_WORD
72#define MAX_BITS_PER_WORD BITS_PER_WORD
73#endif
74
75/* If nonzero, pretend we are allocating at global level. */
76int force_global;
77
78/* Tree nodes for the various types and decls we create. */
79tree gnat_std_decls[(int) ADT_LAST];
80
81/* Functions to call for each of the possible raise reasons. */
82tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
83
84/* Forward declarations for handlers of attributes. */
85static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
92static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
93static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
94
95/* Fake handler for attributes we don't properly support, typically because
96 they'd require dragging a lot of the common-c front-end circuitry. */
97static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
98
99/* Table of machine-independent internal attributes for Ada. We support
100 this minimal set of attributes to accommodate the needs of builtins. */
101const struct attribute_spec gnat_internal_attribute_table[] =
102{
103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
104 { "const", 0, 0, true, false, false, handle_const_attribute },
105 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
106 { "pure", 0, 0, true, false, false, handle_pure_attribute },
107 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
108 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
109 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
110 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
111 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
112 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
113
114 /* ??? format and format_arg are heavy and not supported, which actually
115 prevents support for stdio builtins, which we however declare as part
116 of the common builtins.def contents. */
117 { "format", 3, 3, false, true, true, fake_attribute_handler },
118 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
119
120 { NULL, 0, 0, false, false, false, NULL }
121};
122
123/* Associates a GNAT tree node to a GCC tree node. It is used in
124 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
125 of `save_gnu_tree' for more info. */
126static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
127
128#define GET_GNU_TREE(GNAT_ENTITY) \
129 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
130
131#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
132 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
133
134#define PRESENT_GNU_TREE(GNAT_ENTITY) \
135 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
136
137/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
138static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
139
140#define GET_DUMMY_NODE(GNAT_ENTITY) \
141 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
142
143#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
144 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
145
146#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
147 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
148
149/* This variable keeps a table for types for each precision so that we only
150 allocate each of them once. Signed and unsigned types are kept separate.
151
152 Note that these types are only used when fold-const requests something
153 special. Perhaps we should NOT share these types; we'll see how it
154 goes later. */
155static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
156
157/* Likewise for float types, but record these by mode. */
158static GTY(()) tree float_types[NUM_MACHINE_MODES];
159
160/* For each binding contour we allocate a binding_level structure to indicate
161 the binding depth. */
162
163struct gnat_binding_level GTY((chain_next ("%h.chain")))
164{
165 /* The binding level containing this one (the enclosing binding level). */
166 struct gnat_binding_level *chain;
167 /* The BLOCK node for this level. */
168 tree block;
169 /* If nonzero, the setjmp buffer that needs to be updated for any
170 variable-sized definition within this context. */
171 tree jmpbuf_decl;
172};
173
174/* The binding level currently in effect. */
175static GTY(()) struct gnat_binding_level *current_binding_level;
176
177/* A chain of gnat_binding_level structures awaiting reuse. */
178static GTY((deletable)) struct gnat_binding_level *free_binding_level;
179
180/* An array of global declarations. */
181static GTY(()) VEC(tree,gc) *global_decls;
182
183/* An array of builtin function declarations. */
184static GTY(()) VEC(tree,gc) *builtin_decls;
185
186/* An array of global renaming pointers. */
187static GTY(()) VEC(tree,gc) *global_renaming_pointers;
188
189/* A chain of unused BLOCK nodes. */
190static GTY((deletable)) tree free_block_chain;
191
a1ab4c31
AC
192static tree merge_sizes (tree, tree, tree, bool, bool);
193static tree compute_related_constant (tree, tree);
194static tree split_plus (tree, tree *);
195static void gnat_gimplify_function (tree);
196static tree float_type_for_precision (int, enum machine_mode);
197static tree convert_to_fat_pointer (tree, tree);
198static tree convert_to_thin_pointer (tree, tree);
199static tree make_descriptor_field (const char *,tree, tree, tree);
200static bool potential_alignment_gap (tree, tree, tree);
201\f
202/* Initialize the association of GNAT nodes to GCC trees. */
203
204void
205init_gnat_to_gnu (void)
206{
207 associate_gnat_to_gnu
208 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
209}
210
211/* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
212 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
1e17ef87 213 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
a1ab4c31
AC
214
215 If GNU_DECL is zero, a previous association is to be reset. */
216
217void
218save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
219{
220 /* Check that GNAT_ENTITY is not already defined and that it is being set
221 to something which is a decl. Raise gigi 401 if not. Usually, this
222 means GNAT_ENTITY is defined twice, but occasionally is due to some
223 Gigi problem. */
224 gcc_assert (!(gnu_decl
225 && (PRESENT_GNU_TREE (gnat_entity)
226 || (!no_check && !DECL_P (gnu_decl)))));
227
228 SET_GNU_TREE (gnat_entity, gnu_decl);
229}
230
231/* GNAT_ENTITY is a GNAT tree node for a defining identifier.
232 Return the ..._DECL node that was associated with it. If there is no tree
233 node associated with GNAT_ENTITY, abort.
234
235 In some cases, such as delayed elaboration or expressions that need to
236 be elaborated only once, GNAT_ENTITY is really not an entity. */
237
238tree
239get_gnu_tree (Entity_Id gnat_entity)
240{
241 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
242 return GET_GNU_TREE (gnat_entity);
243}
244
245/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
246
247bool
248present_gnu_tree (Entity_Id gnat_entity)
249{
250 return PRESENT_GNU_TREE (gnat_entity);
251}
252\f
253/* Initialize the association of GNAT nodes to GCC trees as dummies. */
254
255void
256init_dummy_type (void)
257{
258 dummy_node_table
259 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
260}
261
262/* Make a dummy type corresponding to GNAT_TYPE. */
263
264tree
265make_dummy_type (Entity_Id gnat_type)
266{
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
268 tree gnu_type;
269
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
273
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
278
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
282
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
284 an ENUMERAL_TYPE. */
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
287 : ENUMERAL_TYPE);
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
290 TYPE_STUB_DECL (gnu_type)
291 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a1ab4c31 292 if (AGGREGATE_TYPE_P (gnu_type))
10069d53 293 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
a1ab4c31
AC
294
295 SET_DUMMY_NODE (gnat_underlying, gnu_type);
296
297 return gnu_type;
298}
299\f
300/* Return nonzero if we are currently in the global binding level. */
301
302int
303global_bindings_p (void)
304{
305 return ((force_global || !current_function_decl) ? -1 : 0);
306}
307
308/* Enter a new binding level. */
309
310void
311gnat_pushlevel ()
312{
313 struct gnat_binding_level *newlevel = NULL;
314
315 /* Reuse a struct for this binding level, if there is one. */
316 if (free_binding_level)
317 {
318 newlevel = free_binding_level;
319 free_binding_level = free_binding_level->chain;
320 }
321 else
322 newlevel
323 = (struct gnat_binding_level *)
324 ggc_alloc (sizeof (struct gnat_binding_level));
325
326 /* Use a free BLOCK, if any; otherwise, allocate one. */
327 if (free_block_chain)
328 {
329 newlevel->block = free_block_chain;
330 free_block_chain = BLOCK_CHAIN (free_block_chain);
331 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
332 }
333 else
334 newlevel->block = make_node (BLOCK);
335
336 /* Point the BLOCK we just made to its parent. */
337 if (current_binding_level)
338 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
339
340 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
341 TREE_USED (newlevel->block) = 1;
342
343 /* Add this level to the front of the chain (stack) of levels that are
344 active. */
345 newlevel->chain = current_binding_level;
346 newlevel->jmpbuf_decl = NULL_TREE;
347 current_binding_level = newlevel;
348}
349
350/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
351 and point FNDECL to this BLOCK. */
352
353void
354set_current_block_context (tree fndecl)
355{
356 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
357 DECL_INITIAL (fndecl) = current_binding_level->block;
358}
359
360/* Set the jmpbuf_decl for the current binding level to DECL. */
361
362void
363set_block_jmpbuf_decl (tree decl)
364{
365 current_binding_level->jmpbuf_decl = decl;
366}
367
368/* Get the jmpbuf_decl, if any, for the current binding level. */
369
370tree
371get_block_jmpbuf_decl ()
372{
373 return current_binding_level->jmpbuf_decl;
374}
375
376/* Exit a binding level. Set any BLOCK into the current code group. */
377
378void
379gnat_poplevel ()
380{
381 struct gnat_binding_level *level = current_binding_level;
382 tree block = level->block;
383
384 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
385 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
386
387 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
388 are no variables free the block and merge its subblocks into those of its
389 parent block. Otherwise, add it to the list of its parent. */
390 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
391 ;
392 else if (BLOCK_VARS (block) == NULL_TREE)
393 {
394 BLOCK_SUBBLOCKS (level->chain->block)
395 = chainon (BLOCK_SUBBLOCKS (block),
396 BLOCK_SUBBLOCKS (level->chain->block));
397 BLOCK_CHAIN (block) = free_block_chain;
398 free_block_chain = block;
399 }
400 else
401 {
402 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
403 BLOCK_SUBBLOCKS (level->chain->block) = block;
404 TREE_USED (block) = 1;
405 set_block_for_group (block);
406 }
407
408 /* Free this binding structure. */
409 current_binding_level = level->chain;
410 level->chain = free_binding_level;
411 free_binding_level = level;
412}
413
414\f
415/* Records a ..._DECL node DECL as belonging to the current lexical scope
416 and uses GNAT_NODE for location information and propagating flags. */
417
418void
419gnat_pushdecl (tree decl, Node_Id gnat_node)
420{
421 /* If this decl is public external or at toplevel, there is no context.
422 But PARM_DECLs always go in the level of its function. */
423 if (TREE_CODE (decl) != PARM_DECL
424 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
425 || global_bindings_p ()))
426 DECL_CONTEXT (decl) = 0;
427 else
428 {
429 DECL_CONTEXT (decl) = current_function_decl;
430
431 /* Functions imported in another function are not really nested. */
432 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
433 DECL_NO_STATIC_CHAIN (decl) = 1;
434 }
435
436 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
437
438 /* Set the location of DECL and emit a declaration for it. */
439 if (Present (gnat_node))
440 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
441 add_decl_expr (decl, gnat_node);
442
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later. Put global variables in the
445 globals list and builtin functions in a dedicated list to speed up
446 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
447 the list, as they will cause trouble with the debugger and aren't needed
448 anyway. */
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 {
452 if (global_bindings_p ())
453 {
454 VEC_safe_push (tree, gc, global_decls, decl);
455
456 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
457 VEC_safe_push (tree, gc, builtin_decls, decl);
458 }
459 else
460 {
461 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
462 BLOCK_VARS (current_binding_level->block) = decl;
463 }
464 }
465
466 /* For the declaration of a type, set its name if it either is not already
10069d53 467 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
468 We'd rather have the type named with a real name and all the pointer
469 types to the same object have the same POINTER_TYPE node. Code in the
470 equivalent function of c-decl.c makes a copy of the type node here, but
471 that may cause us trouble with incomplete types. We make an exception
472 for fat pointer types because the compiler automatically builds them
473 for unconstrained array types and the debugger uses them to represent
474 both these and pointers to these. */
475 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
476 {
477 tree t = TREE_TYPE (decl);
478
10069d53 479 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
a1ab4c31
AC
480 ;
481 else if (TYPE_FAT_POINTER_P (t))
482 {
483 tree tt = build_variant_type_copy (t);
484 TYPE_NAME (tt) = decl;
485 TREE_USED (tt) = TREE_USED (t);
486 TREE_TYPE (decl) = tt;
487 DECL_ORIGINAL_TYPE (decl) = t;
488 t = NULL_TREE;
489 }
490 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
491 ;
492 else
493 t = NULL_TREE;
494
495 /* Propagate the name to all the variants. This is needed for
496 the type qualifiers machinery to work properly. */
497 if (t)
498 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
499 TYPE_NAME (t) = decl;
500 }
501}
502\f
503/* Do little here. Set up the standard declarations later after the
504 front end has been run. */
505
506void
507gnat_init_decl_processing (void)
508{
509 /* Make the binding_level structure for global names. */
510 current_function_decl = 0;
511 current_binding_level = 0;
512 free_binding_level = 0;
513 gnat_pushlevel ();
514
515 build_common_tree_nodes (true, true);
516
517 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
b4680ca1
EB
518 corresponding to the width of Pmode. In most cases when ptr_mode
519 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
520 But we get far better code using the width of Pmode. */
521 size_type_node = gnat_type_for_mode (Pmode, 0);
a1ab4c31 522 set_sizetype (size_type_node);
01ddebf2
EB
523
524 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
525 boolean_type_node = make_node (BOOLEAN_TYPE);
526 TYPE_PRECISION (boolean_type_node) = 1;
527 fixup_unsigned_type (boolean_type_node);
b4680ca1 528 TYPE_RM_SIZE (boolean_type_node) = bitsize_int (1);
01ddebf2 529
a1ab4c31
AC
530 build_common_tree_nodes_2 (0);
531
532 ptr_void_type_node = build_pointer_type (void_type_node);
533}
10069d53
EB
534\f
535/* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
a1ab4c31
AC
536
537void
10069d53 538record_builtin_type (const char *name, tree type)
a1ab4c31 539{
10069d53 540 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
a1ab4c31 541
10069d53 542 gnat_pushdecl (type_decl, Empty);
a1ab4c31 543
10069d53
EB
544 if (debug_hooks->type_decl)
545 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
546}
547\f
548/* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
549 finish constructing the record or union type. If REP_LEVEL is zero, this
550 record has no representation clause and so will be entirely laid out here.
551 If REP_LEVEL is one, this record has a representation clause and has been
552 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
553 this record is derived from a parent record and thus inherits its layout;
554 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
555 true, the record type is expected to be modified afterwards so it will
556 not be sent to the back-end for finalization. */
557
558void
559finish_record_type (tree record_type, tree fieldlist, int rep_level,
560 bool do_not_finalize)
561{
562 enum tree_code code = TREE_CODE (record_type);
563 tree name = TYPE_NAME (record_type);
564 tree ada_size = bitsize_zero_node;
565 tree size = bitsize_zero_node;
566 bool had_size = TYPE_SIZE (record_type) != 0;
567 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
568 bool had_align = TYPE_ALIGN (record_type) != 0;
569 tree field;
570
a1ab4c31 571 TYPE_FIELDS (record_type) = fieldlist;
a1ab4c31 572
10069d53
EB
573 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
574 generate debug info and have a parallel type. */
575 if (name && TREE_CODE (name) == TYPE_DECL)
576 name = DECL_NAME (name);
577 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
578
579 /* Globally initialize the record first. If this is a rep'ed record,
580 that just means some initializations; otherwise, layout the record. */
581 if (rep_level > 0)
582 {
583 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
6f9f0ce3 584 SET_TYPE_MODE (record_type, BLKmode);
a1ab4c31
AC
585
586 if (!had_size_unit)
587 TYPE_SIZE_UNIT (record_type) = size_zero_node;
588 if (!had_size)
589 TYPE_SIZE (record_type) = bitsize_zero_node;
590
591 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
592 out just like a UNION_TYPE, since the size will be fixed. */
593 else if (code == QUAL_UNION_TYPE)
594 code = UNION_TYPE;
595 }
596 else
597 {
598 /* Ensure there isn't a size already set. There can be in an error
599 case where there is a rep clause but all fields have errors and
600 no longer have a position. */
601 TYPE_SIZE (record_type) = 0;
602 layout_type (record_type);
603 }
604
605 /* At this point, the position and size of each field is known. It was
606 either set before entry by a rep clause, or by laying out the type above.
607
608 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
609 to compute the Ada size; the GCC size and alignment (for rep'ed records
610 that are not padding types); and the mode (for rep'ed records). We also
611 clear the DECL_BIT_FIELD indication for the cases we know have not been
612 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
613
614 if (code == QUAL_UNION_TYPE)
615 fieldlist = nreverse (fieldlist);
616
617 for (field = fieldlist; field; field = TREE_CHAIN (field))
618 {
619 tree type = TREE_TYPE (field);
620 tree pos = bit_position (field);
621 tree this_size = DECL_SIZE (field);
622 tree this_ada_size;
623
624 if ((TREE_CODE (type) == RECORD_TYPE
625 || TREE_CODE (type) == UNION_TYPE
626 || TREE_CODE (type) == QUAL_UNION_TYPE)
627 && !TYPE_IS_FAT_POINTER_P (type)
628 && !TYPE_CONTAINS_TEMPLATE_P (type)
629 && TYPE_ADA_SIZE (type))
630 this_ada_size = TYPE_ADA_SIZE (type);
631 else
632 this_ada_size = this_size;
633
634 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
635 if (DECL_BIT_FIELD (field)
636 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
637 {
638 unsigned int align = TYPE_ALIGN (type);
639
640 /* In the general case, type alignment is required. */
641 if (value_factor_p (pos, align))
642 {
643 /* The enclosing record type must be sufficiently aligned.
644 Otherwise, if no alignment was specified for it and it
645 has been laid out already, bump its alignment to the
646 desired one if this is compatible with its size. */
647 if (TYPE_ALIGN (record_type) >= align)
648 {
649 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
650 DECL_BIT_FIELD (field) = 0;
651 }
652 else if (!had_align
653 && rep_level == 0
654 && value_factor_p (TYPE_SIZE (record_type), align))
655 {
656 TYPE_ALIGN (record_type) = align;
657 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
658 DECL_BIT_FIELD (field) = 0;
659 }
660 }
661
662 /* In the non-strict alignment case, only byte alignment is. */
663 if (!STRICT_ALIGNMENT
664 && DECL_BIT_FIELD (field)
665 && value_factor_p (pos, BITS_PER_UNIT))
666 DECL_BIT_FIELD (field) = 0;
667 }
668
669 /* If we still have DECL_BIT_FIELD set at this point, we know the field
670 is technically not addressable. Except that it can actually be
671 addressed if the field is BLKmode and happens to be properly
672 aligned. */
673 DECL_NONADDRESSABLE_P (field)
674 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
675
676 /* A type must be as aligned as its most aligned field that is not
677 a bit-field. But this is already enforced by layout_type. */
678 if (rep_level > 0 && !DECL_BIT_FIELD (field))
679 TYPE_ALIGN (record_type)
680 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
681
682 switch (code)
683 {
684 case UNION_TYPE:
685 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
686 size = size_binop (MAX_EXPR, size, this_size);
687 break;
688
689 case QUAL_UNION_TYPE:
690 ada_size
691 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
692 this_ada_size, ada_size);
693 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
694 this_size, size);
695 break;
696
697 case RECORD_TYPE:
698 /* Since we know here that all fields are sorted in order of
699 increasing bit position, the size of the record is one
700 higher than the ending bit of the last field processed
701 unless we have a rep clause, since in that case we might
702 have a field outside a QUAL_UNION_TYPE that has a higher ending
703 position. So use a MAX in that case. Also, if this field is a
704 QUAL_UNION_TYPE, we need to take into account the previous size in
705 the case of empty variants. */
706 ada_size
707 = merge_sizes (ada_size, pos, this_ada_size,
708 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
709 size
710 = merge_sizes (size, pos, this_size,
711 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
712 break;
713
714 default:
715 gcc_unreachable ();
716 }
717 }
718
719 if (code == QUAL_UNION_TYPE)
720 nreverse (fieldlist);
721
63787194
EB
722 /* If the type is discriminated, it can be used to access all its
723 constrained subtypes, so force structural equality checks. */
724 if (CONTAINS_PLACEHOLDER_P (size))
725 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
726
a1ab4c31
AC
727 if (rep_level < 2)
728 {
729 /* If this is a padding record, we never want to make the size smaller
730 than what was specified in it, if any. */
731 if (TREE_CODE (record_type) == RECORD_TYPE
732 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
733 size = TYPE_SIZE (record_type);
734
735 /* Now set any of the values we've just computed that apply. */
736 if (!TYPE_IS_FAT_POINTER_P (record_type)
737 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
738 SET_TYPE_ADA_SIZE (record_type, ada_size);
739
740 if (rep_level > 0)
741 {
742 tree size_unit = had_size_unit
743 ? TYPE_SIZE_UNIT (record_type)
744 : convert (sizetype,
745 size_binop (CEIL_DIV_EXPR, size,
746 bitsize_unit_node));
747 unsigned int align = TYPE_ALIGN (record_type);
748
749 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
750 TYPE_SIZE_UNIT (record_type)
751 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
752
753 compute_record_mode (record_type);
754 }
755 }
756
757 if (!do_not_finalize)
758 rest_of_record_type_compilation (record_type);
759}
760
761/* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
762 the debug information associated with it. It need not be invoked
763 directly in most cases since finish_record_type takes care of doing
764 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
765
766void
767rest_of_record_type_compilation (tree record_type)
768{
769 tree fieldlist = TYPE_FIELDS (record_type);
770 tree field;
771 enum tree_code code = TREE_CODE (record_type);
772 bool var_size = false;
773
774 for (field = fieldlist; field; field = TREE_CHAIN (field))
775 {
776 /* We need to make an XVE/XVU record if any field has variable size,
777 whether or not the record does. For example, if we have a union,
778 it may be that all fields, rounded up to the alignment, have the
779 same size, in which case we'll use that size. But the debug
780 output routines (except Dwarf2) won't be able to output the fields,
781 so we need to make the special record. */
782 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
783 /* If a field has a non-constant qualifier, the record will have
784 variable size too. */
785 || (code == QUAL_UNION_TYPE
786 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
787 {
788 var_size = true;
789 break;
790 }
791 }
792
793 /* If this record is of variable size, rename it so that the
794 debugger knows it is and make a new, parallel, record
795 that tells the debugger how the record is laid out. See
796 exp_dbug.ads. But don't do this for records that are padding
797 since they confuse GDB. */
798 if (var_size
799 && !(TREE_CODE (record_type) == RECORD_TYPE
800 && TYPE_IS_PADDING_P (record_type)))
801 {
802 tree new_record_type
803 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
804 ? UNION_TYPE : TREE_CODE (record_type));
0fb2335d 805 tree orig_name = TYPE_NAME (record_type), new_name;
a1ab4c31 806 tree last_pos = bitsize_zero_node;
0fb2335d 807 tree old_field, prev_old_field = NULL_TREE;
a1ab4c31 808
0fb2335d
EB
809 if (TREE_CODE (orig_name) == TYPE_DECL)
810 orig_name = DECL_NAME (orig_name);
811
812 new_name
813 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
814 ? "XVU" : "XVE");
815 TYPE_NAME (new_record_type) = new_name;
a1ab4c31
AC
816 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
817 TYPE_STUB_DECL (new_record_type)
0fb2335d 818 = create_type_stub_decl (new_name, new_record_type);
a1ab4c31
AC
819 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
820 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
821 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
822 TYPE_SIZE_UNIT (new_record_type)
823 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
824
825 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
826
827 /* Now scan all the fields, replacing each field with a new
828 field corresponding to the new encoding. */
829 for (old_field = TYPE_FIELDS (record_type); old_field;
830 old_field = TREE_CHAIN (old_field))
831 {
832 tree field_type = TREE_TYPE (old_field);
833 tree field_name = DECL_NAME (old_field);
834 tree new_field;
835 tree curpos = bit_position (old_field);
836 bool var = false;
837 unsigned int align = 0;
838 tree pos;
839
840 /* See how the position was modified from the last position.
841
842 There are two basic cases we support: a value was added
843 to the last position or the last position was rounded to
844 a boundary and they something was added. Check for the
845 first case first. If not, see if there is any evidence
846 of rounding. If so, round the last position and try
847 again.
848
849 If this is a union, the position can be taken as zero. */
850
851 /* Some computations depend on the shape of the position expression,
852 so strip conversions to make sure it's exposed. */
853 curpos = remove_conversions (curpos, true);
854
855 if (TREE_CODE (new_record_type) == UNION_TYPE)
856 pos = bitsize_zero_node, align = 0;
857 else
858 pos = compute_related_constant (curpos, last_pos);
859
860 if (!pos && TREE_CODE (curpos) == MULT_EXPR
861 && host_integerp (TREE_OPERAND (curpos, 1), 1))
862 {
863 tree offset = TREE_OPERAND (curpos, 0);
864 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
865
866 /* An offset which is a bitwise AND with a negative power of 2
867 means an alignment corresponding to this power of 2. */
868 offset = remove_conversions (offset, true);
869 if (TREE_CODE (offset) == BIT_AND_EXPR
870 && host_integerp (TREE_OPERAND (offset, 1), 0)
871 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
872 {
873 unsigned int pow
874 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
875 if (exact_log2 (pow) > 0)
876 align *= pow;
877 }
878
879 pos = compute_related_constant (curpos,
880 round_up (last_pos, align));
881 }
882 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
883 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
884 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
885 && host_integerp (TREE_OPERAND
886 (TREE_OPERAND (curpos, 0), 1),
887 1))
888 {
889 align
890 = tree_low_cst
891 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
892 pos = compute_related_constant (curpos,
893 round_up (last_pos, align));
894 }
895 else if (potential_alignment_gap (prev_old_field, old_field,
896 pos))
897 {
898 align = TYPE_ALIGN (field_type);
899 pos = compute_related_constant (curpos,
900 round_up (last_pos, align));
901 }
902
903 /* If we can't compute a position, set it to zero.
904
905 ??? We really should abort here, but it's too much work
906 to get this correct for all cases. */
907
908 if (!pos)
909 pos = bitsize_zero_node;
910
911 /* See if this type is variable-sized and make a pointer type
912 and indicate the indirection if so. Beware that the debug
913 back-end may adjust the position computed above according
914 to the alignment of the field type, i.e. the pointer type
915 in this case, if we don't preventively counter that. */
916 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
917 {
918 field_type = build_pointer_type (field_type);
919 if (align != 0 && TYPE_ALIGN (field_type) > align)
920 {
921 field_type = copy_node (field_type);
922 TYPE_ALIGN (field_type) = align;
923 }
924 var = true;
925 }
926
927 /* Make a new field name, if necessary. */
928 if (var || align != 0)
929 {
930 char suffix[16];
931
932 if (align != 0)
933 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
934 align / BITS_PER_UNIT);
935 else
936 strcpy (suffix, "XVL");
937
0fb2335d 938 field_name = concat_name (field_name, suffix);
a1ab4c31
AC
939 }
940
941 new_field = create_field_decl (field_name, field_type,
942 new_record_type, 0,
943 DECL_SIZE (old_field), pos, 0);
944 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
945 TYPE_FIELDS (new_record_type) = new_field;
946
947 /* If old_field is a QUAL_UNION_TYPE, take its size as being
948 zero. The only time it's not the last field of the record
949 is when there are other components at fixed positions after
950 it (meaning there was a rep clause for every field) and we
951 want to be able to encode them. */
952 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
953 (TREE_CODE (TREE_TYPE (old_field))
954 == QUAL_UNION_TYPE)
955 ? bitsize_zero_node
956 : DECL_SIZE (old_field));
957 prev_old_field = old_field;
958 }
959
960 TYPE_FIELDS (new_record_type)
961 = nreverse (TYPE_FIELDS (new_record_type));
962
963 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
964 }
965
966 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
967}
968
969/* Append PARALLEL_TYPE on the chain of parallel types for decl. */
970
971void
972add_parallel_type (tree decl, tree parallel_type)
973{
974 tree d = decl;
975
976 while (DECL_PARALLEL_TYPE (d))
977 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
978
979 SET_DECL_PARALLEL_TYPE (d, parallel_type);
980}
981
982/* Return the parallel type associated to a type, if any. */
983
984tree
985get_parallel_type (tree type)
986{
987 if (TYPE_STUB_DECL (type))
988 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
989 else
990 return NULL_TREE;
991}
992
993/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
994 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
995 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
996 replace a value of zero with the old size. If HAS_REP is true, we take the
997 MAX of the end position of this field with LAST_SIZE. In all other cases,
998 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
999
1000static tree
1001merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1002 bool has_rep)
1003{
1004 tree type = TREE_TYPE (last_size);
1005 tree new;
1006
1007 if (!special || TREE_CODE (size) != COND_EXPR)
1008 {
1009 new = size_binop (PLUS_EXPR, first_bit, size);
1010 if (has_rep)
1011 new = size_binop (MAX_EXPR, last_size, new);
1012 }
1013
1014 else
1015 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1016 integer_zerop (TREE_OPERAND (size, 1))
1017 ? last_size : merge_sizes (last_size, first_bit,
1018 TREE_OPERAND (size, 1),
1019 1, has_rep),
1020 integer_zerop (TREE_OPERAND (size, 2))
1021 ? last_size : merge_sizes (last_size, first_bit,
1022 TREE_OPERAND (size, 2),
1023 1, has_rep));
1024
1025 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1026 when fed through substitute_in_expr) into thinking that a constant
1027 size is not constant. */
1028 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1029 new = TREE_OPERAND (new, 0);
1030
1031 return new;
1032}
1033
1034/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1035 related by the addition of a constant. Return that constant if so. */
1036
1037static tree
1038compute_related_constant (tree op0, tree op1)
1039{
1040 tree op0_var, op1_var;
1041 tree op0_con = split_plus (op0, &op0_var);
1042 tree op1_con = split_plus (op1, &op1_var);
1043 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1044
1045 if (operand_equal_p (op0_var, op1_var, 0))
1046 return result;
1047 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1048 return result;
1049 else
1050 return 0;
1051}
1052
1053/* Utility function of above to split a tree OP which may be a sum, into a
1054 constant part, which is returned, and a variable part, which is stored
1055 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1056 bitsizetype. */
1057
1058static tree
1059split_plus (tree in, tree *pvar)
1060{
1061 /* Strip NOPS in order to ease the tree traversal and maximize the
1062 potential for constant or plus/minus discovery. We need to be careful
1063 to always return and set *pvar to bitsizetype trees, but it's worth
1064 the effort. */
1065 STRIP_NOPS (in);
1066
1067 *pvar = convert (bitsizetype, in);
1068
1069 if (TREE_CODE (in) == INTEGER_CST)
1070 {
1071 *pvar = bitsize_zero_node;
1072 return convert (bitsizetype, in);
1073 }
1074 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1075 {
1076 tree lhs_var, rhs_var;
1077 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1078 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1079
1080 if (lhs_var == TREE_OPERAND (in, 0)
1081 && rhs_var == TREE_OPERAND (in, 1))
1082 return bitsize_zero_node;
1083
1084 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1085 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1086 }
1087 else
1088 return bitsize_zero_node;
1089}
1090\f
1091/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1092 subprogram. If it is void_type_node, then we are dealing with a procedure,
1093 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1094 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1095 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1096 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1097 object. RETURNS_BY_REF is true if the function returns by reference.
1098 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1099 first parameter) the address of the place to copy its result. */
1100
1101tree
1102create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1103 bool returns_unconstrained, bool returns_by_ref,
1104 bool returns_by_target_ptr)
1105{
1106 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1107 the subprogram formal parameters. This list is generated by traversing the
1108 input list of PARM_DECL nodes. */
1109 tree param_type_list = NULL;
1110 tree param_decl;
1111 tree type;
1112
1113 for (param_decl = param_decl_list; param_decl;
1114 param_decl = TREE_CHAIN (param_decl))
1115 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1116 param_type_list);
1117
1118 /* The list of the function parameter types has to be terminated by the void
1119 type to signal to the back-end that we are not dealing with a variable
1120 parameter subprogram, but that the subprogram has a fixed number of
1121 parameters. */
1122 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1123
1124 /* The list of argument types has been created in reverse
1125 so nreverse it. */
1126 param_type_list = nreverse (param_type_list);
1127
1128 type = build_function_type (return_type, param_type_list);
1129
1130 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1131 or the new type should, make a copy of TYPE. Likewise for
1132 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1133 if (TYPE_CI_CO_LIST (type) || cico_list
1134 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1135 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1136 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1137 type = copy_type (type);
1138
1139 TYPE_CI_CO_LIST (type) = cico_list;
1140 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1141 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1142 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1143 return type;
1144}
1145\f
1146/* Return a copy of TYPE but safe to modify in any way. */
1147
1148tree
1149copy_type (tree type)
1150{
1151 tree new = copy_node (type);
1152
1153 /* copy_node clears this field instead of copying it, because it is
1154 aliased with TREE_CHAIN. */
1155 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1156
1157 TYPE_POINTER_TO (new) = 0;
1158 TYPE_REFERENCE_TO (new) = 0;
1159 TYPE_MAIN_VARIANT (new) = new;
1160 TYPE_NEXT_VARIANT (new) = 0;
1161
1162 return new;
1163}
1164\f
1165/* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1166 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1167 the decl. */
1168
1169tree
1170create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1171{
1172 /* First build a type for the desired range. */
1173 tree type = build_index_2_type (min, max);
1174
1175 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1176 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1177 is set, but not to INDEX, make a copy of this type with the requested
1178 index type. Note that we have no way of sharing these types, but that's
1179 only a small hole. */
1180 if (TYPE_INDEX_TYPE (type) == index)
1181 return type;
1182 else if (TYPE_INDEX_TYPE (type))
1183 type = copy_type (type);
1184
1185 SET_TYPE_INDEX_TYPE (type, index);
1186 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1187 return type;
1188}
1189\f
10069d53
EB
1190/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1191 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1192 its data type. */
1193
1194tree
1195create_type_stub_decl (tree type_name, tree type)
1196{
1197 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1198 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1199 emitted in DWARF. */
1200 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1201 DECL_ARTIFICIAL (type_decl) = 1;
1202 return type_decl;
1203}
1204
1205/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1206 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1207 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1208 true if we need to write debug information about this type. GNAT_NODE
1209 is used for the position of the decl. */
a1ab4c31
AC
1210
1211tree
1212create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1213 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1214{
a1ab4c31 1215 enum tree_code code = TREE_CODE (type);
10069d53
EB
1216 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1217 tree type_decl;
a1ab4c31 1218
10069d53
EB
1219 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1220 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 1221
10069d53
EB
1222 /* If the type hasn't been named yet, we're naming it; preserve an existing
1223 TYPE_STUB_DECL that has been attached to it for some purpose. */
1224 if (!named && TYPE_STUB_DECL (type))
1225 {
1226 type_decl = TYPE_STUB_DECL (type);
1227 DECL_NAME (type_decl) = type_name;
1228 }
1229 else
1230 type_decl = build_decl (TYPE_DECL, type_name, type);
a1ab4c31 1231
10069d53
EB
1232 DECL_ARTIFICIAL (type_decl) = artificial_p;
1233 gnat_pushdecl (type_decl, gnat_node);
a1ab4c31
AC
1234 process_attributes (type_decl, attr_list);
1235
10069d53
EB
1236 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1237 This causes the name to be also viewed as a "tag" by the debug
1238 back-end, with the advantage that no DW_TAG_typedef is emitted
1239 for artificial "tagged" types in DWARF. */
1240 if (!named)
1241 TYPE_STUB_DECL (type) = type_decl;
1242
1243 /* Pass the type declaration to the debug back-end unless this is an
ac53d5f2
EB
1244 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1245 type for which debugging information was not requested, or else an
1246 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1247 handled separately. And do not pass dummy types either. */
a1ab4c31
AC
1248 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1249 DECL_IGNORED_P (type_decl) = 1;
1250 else if (code != ENUMERAL_TYPE
1251 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1252 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
ac53d5f2
EB
1253 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1254 && !(code == RECORD_TYPE
1255 && TYPE_IS_DUMMY_P
1256 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
a1ab4c31
AC
1257 rest_of_type_decl_compilation (type_decl);
1258
1259 return type_decl;
1260}
10069d53 1261\f
a1ab4c31
AC
1262/* Return a VAR_DECL or CONST_DECL node.
1263
1264 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1265 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1266 the GCC tree for an optional initial expression; NULL_TREE if none.
1267
1268 CONST_FLAG is true if this variable is constant, in which case we might
1269 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1270
1271 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1272 definition to be made visible outside of the current compilation unit, for
1273 instance variable definitions in a package specification.
1274
1e17ef87 1275 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
1276 opposed to a definition: no storage is to be allocated for the variable).
1277
1278 STATIC_FLAG is only relevant when not at top level. In that case
1279 it indicates whether to always allocate storage to the variable.
1280
1281 GNAT_NODE is used for the position of the decl. */
1282
1283tree
1284create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1285 bool const_flag, bool public_flag, bool extern_flag,
1286 bool static_flag, bool const_decl_allowed_p,
1287 struct attrib *attr_list, Node_Id gnat_node)
1288{
1289 bool init_const
1290 = (var_init != 0
1291 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1292 && (global_bindings_p () || static_flag
1293 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1294 : TREE_CONSTANT (var_init)));
1295
1296 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1297 case the initializer may be used in-lieu of the DECL node (as done in
1298 Identifier_to_gnu). This is useful to prevent the need of elaboration
1299 code when an identifier for which such a decl is made is in turn used as
1300 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1301 but extra constraints apply to this choice (see below) and are not
1302 relevant to the distinction we wish to make. */
1303 bool constant_p = const_flag && init_const;
1304
1305 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1306 and may be used for scalars in general but not for aggregates. */
1307 tree var_decl
1308 = build_decl ((constant_p && const_decl_allowed_p
1309 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1310 var_name, type);
1311
1312 /* If this is external, throw away any initializations (they will be done
1313 elsewhere) unless this is a constant for which we would like to remain
1314 able to get the initializer. If we are defining a global here, leave a
1315 constant initialization and save any variable elaborations for the
1316 elaboration routine. If we are just annotating types, throw away the
1317 initialization if it isn't a constant. */
1318 if ((extern_flag && !constant_p)
1319 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1320 var_init = NULL_TREE;
1321
1322 /* At the global level, an initializer requiring code to be generated
1323 produces elaboration statements. Check that such statements are allowed,
1324 that is, not violating a No_Elaboration_Code restriction. */
1325 if (global_bindings_p () && var_init != 0 && ! init_const)
1326 Check_Elaboration_Code_Allowed (gnat_node);
1327
1328 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1329 try to fiddle with DECL_COMMON. However, on platforms that don't
1330 support global BSS sections, uninitialized global variables would
1331 go in DATA instead, thus increasing the size of the executable. */
1332 if (!flag_no_common
1333 && TREE_CODE (var_decl) == VAR_DECL
1334 && !have_global_bss_p ())
1335 DECL_COMMON (var_decl) = 1;
1336 DECL_INITIAL (var_decl) = var_init;
1337 TREE_READONLY (var_decl) = const_flag;
1338 DECL_EXTERNAL (var_decl) = extern_flag;
1339 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1340 TREE_CONSTANT (var_decl) = constant_p;
1341 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1342 = TYPE_VOLATILE (type);
1343
1344 /* If it's public and not external, always allocate storage for it.
1345 At the global binding level we need to allocate static storage for the
1346 variable if and only if it's not external. If we are not at the top level
1347 we allocate automatic storage unless requested not to. */
1348 TREE_STATIC (var_decl)
1349 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1350
1351 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1352 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1353
1354 process_attributes (var_decl, attr_list);
1355
1356 /* Add this decl to the current binding level. */
1357 gnat_pushdecl (var_decl, gnat_node);
1358
1359 if (TREE_SIDE_EFFECTS (var_decl))
1360 TREE_ADDRESSABLE (var_decl) = 1;
1361
1362 if (TREE_CODE (var_decl) != CONST_DECL)
1363 {
1364 if (global_bindings_p ())
1365 rest_of_decl_compilation (var_decl, true, 0);
1366 }
1367 else
1368 expand_decl (var_decl);
1369
1370 return var_decl;
1371}
1372\f
1373/* Return true if TYPE, an aggregate type, contains (or is) an array. */
1374
1375static bool
1376aggregate_type_contains_array_p (tree type)
1377{
1378 switch (TREE_CODE (type))
1379 {
1380 case RECORD_TYPE:
1381 case UNION_TYPE:
1382 case QUAL_UNION_TYPE:
1383 {
1384 tree field;
1385 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1386 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1387 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1388 return true;
1389 return false;
1390 }
1391
1392 case ARRAY_TYPE:
1393 return true;
1394
1395 default:
1396 gcc_unreachable ();
1397 }
1398}
1399
1400/* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1401 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1402 this field is in a record type with a "pragma pack". If SIZE is nonzero
1403 it is the specified size for this field. If POS is nonzero, it is the bit
1404 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1405 the address of this field for aliasing purposes. If it is negative, we
1406 should not make a bitfield, which is used by make_aligning_type. */
1407
1408tree
1409create_field_decl (tree field_name, tree field_type, tree record_type,
1410 int packed, tree size, tree pos, int addressable)
1411{
1412 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1413
1414 DECL_CONTEXT (field_decl) = record_type;
1415 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1416
1417 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1418 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1419 Likewise for an aggregate without specified position that contains an
1420 array, because in this case slices of variable length of this array
1421 must be handled by GCC and variable-sized objects need to be aligned
1422 to at least a byte boundary. */
1423 if (packed && (TYPE_MODE (field_type) == BLKmode
1424 || (!pos
1425 && AGGREGATE_TYPE_P (field_type)
1426 && aggregate_type_contains_array_p (field_type))))
1427 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1428
1429 /* If a size is specified, use it. Otherwise, if the record type is packed
1430 compute a size to use, which may differ from the object's natural size.
1431 We always set a size in this case to trigger the checks for bitfield
1432 creation below, which is typically required when no position has been
1433 specified. */
1434 if (size)
1435 size = convert (bitsizetype, size);
1436 else if (packed == 1)
1437 {
1438 size = rm_size (field_type);
1439
1440 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1441 byte. */
1442 if (TREE_CODE (size) == INTEGER_CST
1443 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1444 size = round_up (size, BITS_PER_UNIT);
1445 }
1446
1447 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1448 specified for two reasons: first if the size differs from the natural
1449 size. Second, if the alignment is insufficient. There are a number of
1450 ways the latter can be true.
1451
1452 We never make a bitfield if the type of the field has a nonconstant size,
1453 because no such entity requiring bitfield operations should reach here.
1454
1455 We do *preventively* make a bitfield when there might be the need for it
1456 but we don't have all the necessary information to decide, as is the case
1457 of a field with no specified position in a packed record.
1458
1459 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1460 in layout_decl or finish_record_type to clear the bit_field indication if
1461 it is in fact not needed. */
1462 if (addressable >= 0
1463 && size
1464 && TREE_CODE (size) == INTEGER_CST
1465 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1466 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1467 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1468 || packed
1469 || (TYPE_ALIGN (record_type) != 0
1470 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1471 {
1472 DECL_BIT_FIELD (field_decl) = 1;
1473 DECL_SIZE (field_decl) = size;
1474 if (!packed && !pos)
1475 DECL_ALIGN (field_decl)
1476 = (TYPE_ALIGN (record_type) != 0
1477 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1478 : TYPE_ALIGN (field_type));
1479 }
1480
1481 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1482
1483 /* Bump the alignment if need be, either for bitfield/packing purposes or
1484 to satisfy the type requirements if no such consideration applies. When
1485 we get the alignment from the type, indicate if this is from an explicit
1486 user request, which prevents stor-layout from lowering it later on. */
1487 {
d9223014 1488 unsigned int bit_align
a1ab4c31
AC
1489 = (DECL_BIT_FIELD (field_decl) ? 1
1490 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1491
1492 if (bit_align > DECL_ALIGN (field_decl))
1493 DECL_ALIGN (field_decl) = bit_align;
1494 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1495 {
1496 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1497 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1498 }
1499 }
1500
1501 if (pos)
1502 {
1503 /* We need to pass in the alignment the DECL is known to have.
1504 This is the lowest-order bit set in POS, but no more than
1505 the alignment of the record, if one is specified. Note
1506 that an alignment of 0 is taken as infinite. */
1507 unsigned int known_align;
1508
1509 if (host_integerp (pos, 1))
1510 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1511 else
1512 known_align = BITS_PER_UNIT;
1513
1514 if (TYPE_ALIGN (record_type)
1515 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1516 known_align = TYPE_ALIGN (record_type);
1517
1518 layout_decl (field_decl, known_align);
1519 SET_DECL_OFFSET_ALIGN (field_decl,
1520 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1521 : BITS_PER_UNIT);
1522 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1523 &DECL_FIELD_BIT_OFFSET (field_decl),
1524 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
1525 }
1526
1527 /* In addition to what our caller says, claim the field is addressable if we
1528 know that its type is not suitable.
1529
1530 The field may also be "technically" nonaddressable, meaning that even if
1531 we attempt to take the field's address we will actually get the address
1532 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1533 value we have at this point is not accurate enough, so we don't account
1534 for this here and let finish_record_type decide. */
4c5a0615 1535 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
1536 addressable = 1;
1537
1538 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1539
1540 return field_decl;
1541}
1542\f
1543/* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1544 PARAM_TYPE is its type. READONLY is true if the parameter is
1545 readonly (either an In parameter or an address of a pass-by-ref
1546 parameter). */
1547
1548tree
1549create_param_decl (tree param_name, tree param_type, bool readonly)
1550{
1551 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1552
1553 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1554 lead to various ABI violations. */
1555 if (targetm.calls.promote_prototypes (param_type)
1556 && (TREE_CODE (param_type) == INTEGER_TYPE
01ddebf2
EB
1557 || TREE_CODE (param_type) == ENUMERAL_TYPE
1558 || TREE_CODE (param_type) == BOOLEAN_TYPE)
a1ab4c31
AC
1559 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1560 {
1561 /* We have to be careful about biased types here. Make a subtype
1562 of integer_type_node with the proper biasing. */
1563 if (TREE_CODE (param_type) == INTEGER_TYPE
1564 && TYPE_BIASED_REPRESENTATION_P (param_type))
1565 {
1566 param_type
1567 = copy_type (build_range_type (integer_type_node,
1568 TYPE_MIN_VALUE (param_type),
1569 TYPE_MAX_VALUE (param_type)));
1570
1571 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1572 }
1573 else
1574 param_type = integer_type_node;
1575 }
1576
1577 DECL_ARG_TYPE (param_decl) = param_type;
1578 TREE_READONLY (param_decl) = readonly;
1579 return param_decl;
1580}
1581\f
1582/* Given a DECL and ATTR_LIST, process the listed attributes. */
1583
1584void
1585process_attributes (tree decl, struct attrib *attr_list)
1586{
1587 for (; attr_list; attr_list = attr_list->next)
1588 switch (attr_list->type)
1589 {
1590 case ATTR_MACHINE_ATTRIBUTE:
1591 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1592 NULL_TREE),
1593 ATTR_FLAG_TYPE_IN_PLACE);
1594 break;
1595
1596 case ATTR_LINK_ALIAS:
1597 if (! DECL_EXTERNAL (decl))
1598 {
1599 TREE_STATIC (decl) = 1;
1600 assemble_alias (decl, attr_list->name);
1601 }
1602 break;
1603
1604 case ATTR_WEAK_EXTERNAL:
1605 if (SUPPORTS_WEAK)
1606 declare_weak (decl);
1607 else
1608 post_error ("?weak declarations not supported on this target",
1609 attr_list->error_point);
1610 break;
1611
1612 case ATTR_LINK_SECTION:
1613 if (targetm.have_named_sections)
1614 {
1615 DECL_SECTION_NAME (decl)
1616 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1617 IDENTIFIER_POINTER (attr_list->name));
1618 DECL_COMMON (decl) = 0;
1619 }
1620 else
1621 post_error ("?section attributes are not supported for this target",
1622 attr_list->error_point);
1623 break;
1624
1625 case ATTR_LINK_CONSTRUCTOR:
1626 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1627 TREE_USED (decl) = 1;
1628 break;
1629
1630 case ATTR_LINK_DESTRUCTOR:
1631 DECL_STATIC_DESTRUCTOR (decl) = 1;
1632 TREE_USED (decl) = 1;
1633 break;
40a14772
TG
1634
1635 case ATTR_THREAD_LOCAL_STORAGE:
62298c61
TG
1636 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1637 DECL_COMMON (decl) = 0;
40a14772 1638 break;
a1ab4c31
AC
1639 }
1640}
1641\f
1642/* Record a global renaming pointer. */
1643
1644void
1645record_global_renaming_pointer (tree decl)
1646{
1647 gcc_assert (DECL_RENAMED_OBJECT (decl));
1648 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1649}
1650
1651/* Invalidate the global renaming pointers. */
1652
1653void
1654invalidate_global_renaming_pointers (void)
1655{
1656 unsigned int i;
1657 tree iter;
1658
1659 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1660 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1661
1662 VEC_free (tree, gc, global_renaming_pointers);
1663}
1664
1665/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1666 a power of 2. */
1667
1668bool
1669value_factor_p (tree value, HOST_WIDE_INT factor)
1670{
1671 if (host_integerp (value, 1))
1672 return tree_low_cst (value, 1) % factor == 0;
1673
1674 if (TREE_CODE (value) == MULT_EXPR)
1675 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1676 || value_factor_p (TREE_OPERAND (value, 1), factor));
1677
1678 return false;
1679}
1680
1681/* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1682 unless we can prove these 2 fields are laid out in such a way that no gap
1683 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1684 is the distance in bits between the end of PREV_FIELD and the starting
1685 position of CURR_FIELD. It is ignored if null. */
1686
1687static bool
1688potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1689{
1690 /* If this is the first field of the record, there cannot be any gap */
1691 if (!prev_field)
1692 return false;
1693
1694 /* If the previous field is a union type, then return False: The only
1695 time when such a field is not the last field of the record is when
1696 there are other components at fixed positions after it (meaning there
1697 was a rep clause for every field), in which case we don't want the
1698 alignment constraint to override them. */
1699 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1700 return false;
1701
1702 /* If the distance between the end of prev_field and the beginning of
1703 curr_field is constant, then there is a gap if the value of this
1704 constant is not null. */
1705 if (offset && host_integerp (offset, 1))
1706 return !integer_zerop (offset);
1707
1708 /* If the size and position of the previous field are constant,
1709 then check the sum of this size and position. There will be a gap
1710 iff it is not multiple of the current field alignment. */
1711 if (host_integerp (DECL_SIZE (prev_field), 1)
1712 && host_integerp (bit_position (prev_field), 1))
1713 return ((tree_low_cst (bit_position (prev_field), 1)
1714 + tree_low_cst (DECL_SIZE (prev_field), 1))
1715 % DECL_ALIGN (curr_field) != 0);
1716
1717 /* If both the position and size of the previous field are multiples
1718 of the current field alignment, there cannot be any gap. */
1719 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1720 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1721 return false;
1722
1723 /* Fallback, return that there may be a potential gap */
1724 return true;
1725}
1726
1727/* Returns a LABEL_DECL node for LABEL_NAME. */
1728
1729tree
1730create_label_decl (tree label_name)
1731{
1732 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1733
1734 DECL_CONTEXT (label_decl) = current_function_decl;
1735 DECL_MODE (label_decl) = VOIDmode;
1736 DECL_SOURCE_LOCATION (label_decl) = input_location;
1737
1738 return label_decl;
1739}
1740\f
1741/* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1742 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1743 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1744 PARM_DECL nodes chained through the TREE_CHAIN field).
1745
1746 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1747 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1748
1749tree
1750create_subprog_decl (tree subprog_name, tree asm_name,
1751 tree subprog_type, tree param_decl_list, bool inline_flag,
1752 bool public_flag, bool extern_flag,
1753 struct attrib *attr_list, Node_Id gnat_node)
1754{
1755 tree return_type = TREE_TYPE (subprog_type);
1756 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1757
d84b344a
EB
1758 /* If this is a non-inline function nested inside an inlined external
1759 function, we cannot honor both requests without cloning the nested
1760 function in the current unit since it is private to the other unit.
1761 We could inline the nested function as well but it's probably better
1762 to err on the side of too little inlining. */
1763 if (!inline_flag
1764 && current_function_decl
1765 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 1766 && DECL_EXTERNAL (current_function_decl))
d84b344a 1767 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31
AC
1768
1769 DECL_EXTERNAL (subprog_decl) = extern_flag;
1770 TREE_PUBLIC (subprog_decl) = public_flag;
1771 TREE_STATIC (subprog_decl) = 1;
1772 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1773 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1774 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
d84b344a 1775 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
a1ab4c31
AC
1776 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1777 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1778 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1779 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1780
1781 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1782 target by-reference return mechanism. This is not supported all the
1783 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1784 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1785 the RESULT_DECL instead - see gnat_genericize for more details. */
1786 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1787 {
1788 tree result_decl = DECL_RESULT (subprog_decl);
1789
1790 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1791 DECL_BY_REFERENCE (result_decl) = 1;
1792 }
1793
a1ab4c31
AC
1794 if (asm_name)
1795 {
1796 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1797
1798 /* The expand_main_function circuitry expects "main_identifier_node" to
1799 designate the DECL_NAME of the 'main' entry point, in turn expected
1800 to be declared as the "main" function literally by default. Ada
1801 program entry points are typically declared with a different name
1802 within the binder generated file, exported as 'main' to satisfy the
1803 system expectations. Redirect main_identifier_node in this case. */
1804 if (asm_name == main_identifier_node)
1805 main_identifier_node = DECL_NAME (subprog_decl);
1806 }
1807
1808 process_attributes (subprog_decl, attr_list);
1809
1810 /* Add this decl to the current binding level. */
1811 gnat_pushdecl (subprog_decl, gnat_node);
1812
1813 /* Output the assembler code and/or RTL for the declaration. */
1814 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1815
1816 return subprog_decl;
1817}
1818\f
1819/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1820 body. This routine needs to be invoked before processing the declarations
1821 appearing in the subprogram. */
1822
1823void
1824begin_subprog_body (tree subprog_decl)
1825{
1826 tree param_decl;
1827
1828 current_function_decl = subprog_decl;
1829 announce_function (subprog_decl);
1830
1831 /* Enter a new binding level and show that all the parameters belong to
1832 this function. */
1833 gnat_pushlevel ();
1834 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1835 param_decl = TREE_CHAIN (param_decl))
1836 DECL_CONTEXT (param_decl) = subprog_decl;
1837
1838 make_decl_rtl (subprog_decl);
1839
1840 /* We handle pending sizes via the elaboration of types, so we don't need to
1841 save them. This causes them to be marked as part of the outer function
1842 and then discarded. */
1843 get_pending_sizes ();
1844}
1845
1846
1847/* Helper for the genericization callback. Return a dereference of VAL
1848 if it is of a reference type. */
1849
1850static tree
1851convert_from_reference (tree val)
1852{
1853 tree value_type, ref;
1854
1855 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1856 return val;
1857
1858 value_type = TREE_TYPE (TREE_TYPE (val));
1859 ref = build1 (INDIRECT_REF, value_type, val);
1860
1861 /* See if what we reference is CONST or VOLATILE, which requires
1862 looking into array types to get to the component type. */
1863
1864 while (TREE_CODE (value_type) == ARRAY_TYPE)
1865 value_type = TREE_TYPE (value_type);
1866
1867 TREE_READONLY (ref)
1868 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1869 TREE_THIS_VOLATILE (ref)
1870 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1871
1872 TREE_SIDE_EFFECTS (ref)
1873 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1874
1875 return ref;
1876}
1877
1878/* Helper for the genericization callback. Returns true if T denotes
1879 a RESULT_DECL with DECL_BY_REFERENCE set. */
1880
1881static inline bool
1882is_byref_result (tree t)
1883{
1884 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1885}
1886
1887
1888/* Tree walking callback for gnat_genericize. Currently ...
1889
1890 o Adjust references to the function's DECL_RESULT if it is marked
1891 DECL_BY_REFERENCE and so has had its type turned into a reference
1892 type at the end of the function compilation. */
1893
1894static tree
1895gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1896{
1897 /* This implementation is modeled after what the C++ front-end is
1898 doing, basis of the downstream passes behavior. */
1899
1900 tree stmt = *stmt_p;
1901 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1902
1903 /* If we have a direct mention of the result decl, dereference. */
1904 if (is_byref_result (stmt))
1905 {
1906 *stmt_p = convert_from_reference (stmt);
1907 *walk_subtrees = 0;
1908 return NULL;
1909 }
1910
1911 /* Otherwise, no need to walk the same tree twice. */
1912 if (pointer_set_contains (p_set, stmt))
1913 {
1914 *walk_subtrees = 0;
1915 return NULL_TREE;
1916 }
1917
1918 /* If we are taking the address of what now is a reference, just get the
1919 reference value. */
1920 if (TREE_CODE (stmt) == ADDR_EXPR
1921 && is_byref_result (TREE_OPERAND (stmt, 0)))
1922 {
1923 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1924 *walk_subtrees = 0;
1925 }
1926
1927 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1928 else if (TREE_CODE (stmt) == RETURN_EXPR
1929 && TREE_OPERAND (stmt, 0)
1930 && is_byref_result (TREE_OPERAND (stmt, 0)))
1931 *walk_subtrees = 0;
1932
1933 /* Don't look inside trees that cannot embed references of interest. */
1934 else if (IS_TYPE_OR_DECL_P (stmt))
1935 *walk_subtrees = 0;
1936
1937 pointer_set_insert (p_set, *stmt_p);
1938
1939 return NULL;
1940}
1941
1942/* Perform lowering of Ada trees to GENERIC. In particular:
1943
1944 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1945 and adjust all the references to this decl accordingly. */
1946
1947static void
1948gnat_genericize (tree fndecl)
1949{
1950 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1951 was handled by simply setting TREE_ADDRESSABLE on the result type.
1952 Everything required to actually pass by invisible ref using the target
1953 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1954
1955 This doesn't work with GCC 4 any more for several reasons. First, the
1956 gimplification process might need the creation of temporaries of this
1957 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1958 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1959 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1960 be explicitly accounted for by the front-end in the function body.
1961
1962 We achieve the complete transformation in two steps:
1963
1964 1/ create_subprog_decl performs early attribute tweaks: it clears
1965 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1966 the result decl. The former ensures that the bit isn't set in the GCC
1967 tree saved for the function, so prevents ICEs on temporary creation.
1968 The latter we use here to trigger the rest of the processing.
1969
1970 2/ This function performs the type transformation on the result decl
1971 and adjusts all the references to this decl from the function body
1972 accordingly.
1973
1974 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1975 strategy, which escapes the gimplifier temporary creation issues by
1976 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1977 on simple specific support code in aggregate_value_p to look at the
1978 target function result decl explicitly. */
1979
1980 struct pointer_set_t *p_set;
1981 tree decl_result = DECL_RESULT (fndecl);
1982
1983 if (!DECL_BY_REFERENCE (decl_result))
1984 return;
1985
1986 /* Make the DECL_RESULT explicitly by-reference and adjust all the
1987 occurrences in the function body using the common tree-walking facility.
1988 We want to see every occurrence of the result decl to adjust the
1989 referencing tree, so need to use our own pointer set to control which
1990 trees should be visited again or not. */
1991
1992 p_set = pointer_set_create ();
1993
1994 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
1995 TREE_ADDRESSABLE (decl_result) = 0;
1996 relayout_decl (decl_result);
1997
1998 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
1999
2000 pointer_set_destroy (p_set);
2001}
2002
2003/* Finish the definition of the current subprogram BODY and compile it all the
2004 way to assembler language output. ELAB_P tells if this is called for an
2005 elaboration routine, to be entirely discarded if empty. */
2006
2007void
2008end_subprog_body (tree body, bool elab_p)
2009{
2010 tree fndecl = current_function_decl;
2011
2012 /* Mark the BLOCK for this level as being for this function and pop the
2013 level. Since the vars in it are the parameters, clear them. */
2014 BLOCK_VARS (current_binding_level->block) = 0;
2015 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2016 DECL_INITIAL (fndecl) = current_binding_level->block;
2017 gnat_poplevel ();
2018
a1ab4c31
AC
2019 /* We handle pending sizes via the elaboration of types, so we don't
2020 need to save them. */
2021 get_pending_sizes ();
2022
2023 /* Mark the RESULT_DECL as being in this subprogram. */
2024 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2025
2026 DECL_SAVED_TREE (fndecl) = body;
2027
2028 current_function_decl = DECL_CONTEXT (fndecl);
2029 set_cfun (NULL);
2030
2031 /* We cannot track the location of errors past this point. */
2032 error_gnat_node = Empty;
2033
2034 /* If we're only annotating types, don't actually compile this function. */
2035 if (type_annotate_only)
2036 return;
2037
2038 /* Perform the required pre-gimplification transformations on the tree. */
2039 gnat_genericize (fndecl);
2040
2041 /* We do different things for nested and non-nested functions.
2042 ??? This should be in cgraph. */
2043 if (!DECL_CONTEXT (fndecl))
2044 {
2045 gnat_gimplify_function (fndecl);
2046
2047 /* If this is an empty elaboration proc, just discard the node.
2048 Otherwise, compile further. */
2049 if (elab_p && empty_body_p (gimple_body (fndecl)))
2050 cgraph_remove_node (cgraph_node (fndecl));
2051 else
2052 cgraph_finalize_function (fndecl, false);
2053 }
2054 else
2055 /* Register this function with cgraph just far enough to get it
2056 added to our parent's nested function list. */
2057 (void) cgraph_node (fndecl);
2058}
2059
2060/* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2061
2062static void
2063gnat_gimplify_function (tree fndecl)
2064{
2065 struct cgraph_node *cgn;
2066
2067 dump_function (TDI_original, fndecl);
2068 gimplify_function_tree (fndecl);
2069 dump_function (TDI_generic, fndecl);
2070
2071 /* Convert all nested functions to GIMPLE now. We do things in this order
2072 so that items like VLA sizes are expanded properly in the context of the
2073 correct function. */
2074 cgn = cgraph_node (fndecl);
2075 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2076 gnat_gimplify_function (cgn->decl);
2077}
a1ab4c31
AC
2078
2079tree
2080gnat_builtin_function (tree decl)
2081{
2082 gnat_pushdecl (decl, Empty);
2083 return decl;
2084}
2085
2086/* Return an integer type with the number of bits of precision given by
2087 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2088 it is a signed type. */
2089
2090tree
2091gnat_type_for_size (unsigned precision, int unsignedp)
2092{
2093 tree t;
2094 char type_name[20];
2095
2096 if (precision <= 2 * MAX_BITS_PER_WORD
2097 && signed_and_unsigned_types[precision][unsignedp])
2098 return signed_and_unsigned_types[precision][unsignedp];
2099
2100 if (unsignedp)
2101 t = make_unsigned_type (precision);
2102 else
2103 t = make_signed_type (precision);
2104
2105 if (precision <= 2 * MAX_BITS_PER_WORD)
2106 signed_and_unsigned_types[precision][unsignedp] = t;
2107
2108 if (!TYPE_NAME (t))
2109 {
2110 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2111 TYPE_NAME (t) = get_identifier (type_name);
2112 }
2113
2114 return t;
2115}
2116
2117/* Likewise for floating-point types. */
2118
2119static tree
2120float_type_for_precision (int precision, enum machine_mode mode)
2121{
2122 tree t;
2123 char type_name[20];
2124
2125 if (float_types[(int) mode])
2126 return float_types[(int) mode];
2127
2128 float_types[(int) mode] = t = make_node (REAL_TYPE);
2129 TYPE_PRECISION (t) = precision;
2130 layout_type (t);
2131
2132 gcc_assert (TYPE_MODE (t) == mode);
2133 if (!TYPE_NAME (t))
2134 {
2135 sprintf (type_name, "FLOAT_%d", precision);
2136 TYPE_NAME (t) = get_identifier (type_name);
2137 }
2138
2139 return t;
2140}
2141
2142/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2143 an unsigned type; otherwise a signed type is returned. */
2144
2145tree
2146gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2147{
2148 if (mode == BLKmode)
2149 return NULL_TREE;
2150 else if (mode == VOIDmode)
2151 return void_type_node;
2152 else if (COMPLEX_MODE_P (mode))
2153 return NULL_TREE;
2154 else if (SCALAR_FLOAT_MODE_P (mode))
2155 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2156 else if (SCALAR_INT_MODE_P (mode))
2157 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2158 else
2159 return NULL_TREE;
2160}
2161
2162/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2163
2164tree
2165gnat_unsigned_type (tree type_node)
2166{
2167 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2168
2169 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2170 {
2171 type = copy_node (type);
2172 TREE_TYPE (type) = type_node;
2173 }
2174 else if (TREE_TYPE (type_node)
2175 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2176 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2177 {
2178 type = copy_node (type);
2179 TREE_TYPE (type) = TREE_TYPE (type_node);
2180 }
2181
2182 return type;
2183}
2184
2185/* Return the signed version of a TYPE_NODE, a scalar type. */
2186
2187tree
2188gnat_signed_type (tree type_node)
2189{
2190 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2191
2192 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2193 {
2194 type = copy_node (type);
2195 TREE_TYPE (type) = type_node;
2196 }
2197 else if (TREE_TYPE (type_node)
2198 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2199 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2200 {
2201 type = copy_node (type);
2202 TREE_TYPE (type) = TREE_TYPE (type_node);
2203 }
2204
2205 return type;
2206}
2207
2208/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2209 transparently converted to each other. */
2210
2211int
2212gnat_types_compatible_p (tree t1, tree t2)
2213{
2214 enum tree_code code;
2215
2216 /* This is the default criterion. */
2217 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2218 return 1;
2219
2220 /* We only check structural equivalence here. */
2221 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2222 return 0;
2223
2224 /* Array types are also compatible if they are constrained and have
2225 the same component type and the same domain. */
2226 if (code == ARRAY_TYPE
2227 && TREE_TYPE (t1) == TREE_TYPE (t2)
0adef32b
JJ
2228 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2229 || (TYPE_DOMAIN (t1)
b4680ca1 2230 && TYPE_DOMAIN (t2)
0adef32b
JJ
2231 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2232 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2233 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2234 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
a1ab4c31
AC
2235 return 1;
2236
2237 /* Padding record types are also compatible if they pad the same
2238 type and have the same constant size. */
2239 if (code == RECORD_TYPE
2240 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2241 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2242 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2243 return 1;
2244
2245 return 0;
2246}
2247\f
2248/* EXP is an expression for the size of an object. If this size contains
2249 discriminant references, replace them with the maximum (if MAX_P) or
2250 minimum (if !MAX_P) possible value of the discriminant. */
2251
2252tree
2253max_size (tree exp, bool max_p)
2254{
2255 enum tree_code code = TREE_CODE (exp);
2256 tree type = TREE_TYPE (exp);
2257
2258 switch (TREE_CODE_CLASS (code))
2259 {
2260 case tcc_declaration:
2261 case tcc_constant:
2262 return exp;
2263
2264 case tcc_vl_exp:
2265 if (code == CALL_EXPR)
2266 {
2267 tree *argarray;
2268 int i, n = call_expr_nargs (exp);
2269 gcc_assert (n > 0);
2270
2271 argarray = (tree *) alloca (n * sizeof (tree));
2272 for (i = 0; i < n; i++)
2273 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2274 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2275 }
2276 break;
2277
2278 case tcc_reference:
2279 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2280 modify. Otherwise, we treat it like a variable. */
2281 if (!CONTAINS_PLACEHOLDER_P (exp))
2282 return exp;
2283
2284 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2285 return
2286 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2287
2288 case tcc_comparison:
2289 return max_p ? size_one_node : size_zero_node;
2290
2291 case tcc_unary:
2292 case tcc_binary:
2293 case tcc_expression:
2294 switch (TREE_CODE_LENGTH (code))
2295 {
2296 case 1:
2297 if (code == NON_LVALUE_EXPR)
2298 return max_size (TREE_OPERAND (exp, 0), max_p);
2299 else
2300 return
2301 fold_build1 (code, type,
2302 max_size (TREE_OPERAND (exp, 0),
2303 code == NEGATE_EXPR ? !max_p : max_p));
2304
2305 case 2:
2306 if (code == COMPOUND_EXPR)
2307 return max_size (TREE_OPERAND (exp, 1), max_p);
2308
2309 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2310 may provide a tighter bound on max_size. */
2311 if (code == MINUS_EXPR
2312 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2313 {
2314 tree lhs = fold_build2 (MINUS_EXPR, type,
2315 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2316 TREE_OPERAND (exp, 1));
2317 tree rhs = fold_build2 (MINUS_EXPR, type,
2318 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2319 TREE_OPERAND (exp, 1));
2320 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2321 max_size (lhs, max_p),
2322 max_size (rhs, max_p));
2323 }
2324
2325 {
2326 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2327 tree rhs = max_size (TREE_OPERAND (exp, 1),
2328 code == MINUS_EXPR ? !max_p : max_p);
2329
2330 /* Special-case wanting the maximum value of a MIN_EXPR.
2331 In that case, if one side overflows, return the other.
2332 sizetype is signed, but we know sizes are non-negative.
2333 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2334 overflowing or the maximum possible value and the RHS
2335 a variable. */
2336 if (max_p
2337 && code == MIN_EXPR
2338 && TREE_CODE (rhs) == INTEGER_CST
2339 && TREE_OVERFLOW (rhs))
2340 return lhs;
2341 else if (max_p
2342 && code == MIN_EXPR
2343 && TREE_CODE (lhs) == INTEGER_CST
2344 && TREE_OVERFLOW (lhs))
2345 return rhs;
2346 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2347 && ((TREE_CODE (lhs) == INTEGER_CST
2348 && TREE_OVERFLOW (lhs))
2349 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2350 && !TREE_CONSTANT (rhs))
2351 return lhs;
2352 else
2353 return fold_build2 (code, type, lhs, rhs);
2354 }
2355
2356 case 3:
2357 if (code == SAVE_EXPR)
2358 return exp;
2359 else if (code == COND_EXPR)
2360 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2361 max_size (TREE_OPERAND (exp, 1), max_p),
2362 max_size (TREE_OPERAND (exp, 2), max_p));
2363 }
2364
2365 /* Other tree classes cannot happen. */
2366 default:
2367 break;
2368 }
2369
2370 gcc_unreachable ();
2371}
2372\f
2373/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2374 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2375 Return a constructor for the template. */
2376
2377tree
2378build_template (tree template_type, tree array_type, tree expr)
2379{
2380 tree template_elts = NULL_TREE;
2381 tree bound_list = NULL_TREE;
2382 tree field;
2383
2384 while (TREE_CODE (array_type) == RECORD_TYPE
2385 && (TYPE_IS_PADDING_P (array_type)
2386 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2387 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2388
2389 if (TREE_CODE (array_type) == ARRAY_TYPE
2390 || (TREE_CODE (array_type) == INTEGER_TYPE
2391 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2392 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2393
2394 /* First make the list for a CONSTRUCTOR for the template. Go down the
2395 field list of the template instead of the type chain because this
2396 array might be an Ada array of arrays and we can't tell where the
2397 nested arrays stop being the underlying object. */
2398
2399 for (field = TYPE_FIELDS (template_type); field;
2400 (bound_list
2401 ? (bound_list = TREE_CHAIN (bound_list))
2402 : (array_type = TREE_TYPE (array_type))),
2403 field = TREE_CHAIN (TREE_CHAIN (field)))
2404 {
2405 tree bounds, min, max;
2406
2407 /* If we have a bound list, get the bounds from there. Likewise
2408 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2409 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2410 This will give us a maximum range. */
2411 if (bound_list)
2412 bounds = TREE_VALUE (bound_list);
2413 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2414 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2415 else if (expr && TREE_CODE (expr) == PARM_DECL
2416 && DECL_BY_COMPONENT_PTR_P (expr))
2417 bounds = TREE_TYPE (field);
2418 else
2419 gcc_unreachable ();
2420
2421 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2422 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2423
2424 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2425 substitute it from OBJECT. */
2426 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2427 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2428
2429 template_elts = tree_cons (TREE_CHAIN (field), max,
2430 tree_cons (field, min, template_elts));
2431 }
2432
2433 return gnat_build_constructor (template_type, nreverse (template_elts));
2434}
2435\f
6ca2b0a0 2436/* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
a1ab4c31
AC
2437 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2438 in the type contains in its DECL_INITIAL the expression to use when
2439 a constructor is made for the type. GNAT_ENTITY is an entity used
2440 to print out an error message if the mechanism cannot be applied to
2441 an object of that type and also for the name. */
2442
2443tree
d628c015 2444build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
2445{
2446 tree record_type = make_node (RECORD_TYPE);
2447 tree pointer32_type;
2448 tree field_list = 0;
2449 int class;
2450 int dtype = 0;
2451 tree inner_type;
2452 int ndim;
2453 int i;
2454 tree *idx_arr;
2455 tree tem;
2456
2457 /* If TYPE is an unconstrained array, use the underlying array type. */
2458 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2459 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2460
2461 /* If this is an array, compute the number of dimensions in the array,
2462 get the index types, and point to the inner type. */
2463 if (TREE_CODE (type) != ARRAY_TYPE)
2464 ndim = 0;
2465 else
2466 for (ndim = 1, inner_type = type;
2467 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2468 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2469 ndim++, inner_type = TREE_TYPE (inner_type))
2470 ;
2471
2472 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2473
d628c015 2474 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
2475 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2476 for (i = ndim - 1, inner_type = type;
2477 i >= 0;
2478 i--, inner_type = TREE_TYPE (inner_type))
2479 idx_arr[i] = TYPE_DOMAIN (inner_type);
2480 else
2481 for (i = 0, inner_type = type;
2482 i < ndim;
2483 i++, inner_type = TREE_TYPE (inner_type))
2484 idx_arr[i] = TYPE_DOMAIN (inner_type);
2485
2486 /* Now get the DTYPE value. */
2487 switch (TREE_CODE (type))
2488 {
2489 case INTEGER_TYPE:
2490 case ENUMERAL_TYPE:
01ddebf2 2491 case BOOLEAN_TYPE:
a1ab4c31
AC
2492 if (TYPE_VAX_FLOATING_POINT_P (type))
2493 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2494 {
2495 case 6:
2496 dtype = 10;
2497 break;
2498 case 9:
2499 dtype = 11;
2500 break;
2501 case 15:
2502 dtype = 27;
2503 break;
2504 }
2505 else
2506 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2507 {
2508 case 8:
2509 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2510 break;
2511 case 16:
2512 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2513 break;
2514 case 32:
2515 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2516 break;
2517 case 64:
2518 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2519 break;
2520 case 128:
2521 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2522 break;
2523 }
2524 break;
2525
2526 case REAL_TYPE:
2527 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2528 break;
2529
2530 case COMPLEX_TYPE:
2531 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2532 && TYPE_VAX_FLOATING_POINT_P (type))
2533 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2534 {
2535 case 6:
2536 dtype = 12;
2537 break;
2538 case 9:
2539 dtype = 13;
2540 break;
2541 case 15:
2542 dtype = 29;
2543 }
2544 else
2545 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2546 break;
2547
2548 case ARRAY_TYPE:
2549 dtype = 14;
2550 break;
2551
2552 default:
2553 break;
2554 }
2555
2556 /* Get the CLASS value. */
2557 switch (mech)
2558 {
2559 case By_Descriptor_A:
d628c015 2560 case By_Short_Descriptor_A:
a1ab4c31
AC
2561 class = 4;
2562 break;
2563 case By_Descriptor_NCA:
d628c015 2564 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2565 class = 10;
2566 break;
2567 case By_Descriptor_SB:
d628c015 2568 case By_Short_Descriptor_SB:
a1ab4c31
AC
2569 class = 15;
2570 break;
2571 case By_Descriptor:
d628c015 2572 case By_Short_Descriptor:
a1ab4c31 2573 case By_Descriptor_S:
d628c015 2574 case By_Short_Descriptor_S:
a1ab4c31
AC
2575 default:
2576 class = 1;
2577 break;
2578 }
2579
2580 /* Make the type for a descriptor for VMS. The first four fields
2581 are the same for all types. */
2582
2583 field_list
2584 = chainon (field_list,
2585 make_descriptor_field
2586 ("LENGTH", gnat_type_for_size (16, 1), record_type,
d628c015
DR
2587 size_in_bytes ((mech == By_Descriptor_A ||
2588 mech == By_Short_Descriptor_A)
2589 ? inner_type : type)));
a1ab4c31
AC
2590
2591 field_list = chainon (field_list,
2592 make_descriptor_field ("DTYPE",
2593 gnat_type_for_size (8, 1),
2594 record_type, size_int (dtype)));
2595 field_list = chainon (field_list,
2596 make_descriptor_field ("CLASS",
2597 gnat_type_for_size (8, 1),
2598 record_type, size_int (class)));
2599
2600 /* Of course this will crash at run-time if the address space is not
2601 within the low 32 bits, but there is nothing else we can do. */
2602 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2603
2604 field_list
2605 = chainon (field_list,
2606 make_descriptor_field
2607 ("POINTER", pointer32_type, record_type,
2608 build_unary_op (ADDR_EXPR,
2609 pointer32_type,
2610 build0 (PLACEHOLDER_EXPR, type))));
2611
2612 switch (mech)
2613 {
2614 case By_Descriptor:
d628c015 2615 case By_Short_Descriptor:
a1ab4c31 2616 case By_Descriptor_S:
d628c015 2617 case By_Short_Descriptor_S:
a1ab4c31
AC
2618 break;
2619
2620 case By_Descriptor_SB:
d628c015 2621 case By_Short_Descriptor_SB:
a1ab4c31
AC
2622 field_list
2623 = chainon (field_list,
2624 make_descriptor_field
2625 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2626 TREE_CODE (type) == ARRAY_TYPE
2627 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2628 field_list
2629 = chainon (field_list,
2630 make_descriptor_field
2631 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2632 TREE_CODE (type) == ARRAY_TYPE
2633 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2634 break;
2635
2636 case By_Descriptor_A:
d628c015 2637 case By_Short_Descriptor_A:
a1ab4c31 2638 case By_Descriptor_NCA:
d628c015 2639 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2640 field_list = chainon (field_list,
2641 make_descriptor_field ("SCALE",
2642 gnat_type_for_size (8, 1),
2643 record_type,
2644 size_zero_node));
2645
2646 field_list = chainon (field_list,
2647 make_descriptor_field ("DIGITS",
2648 gnat_type_for_size (8, 1),
2649 record_type,
2650 size_zero_node));
2651
2652 field_list
2653 = chainon (field_list,
2654 make_descriptor_field
2655 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
d628c015
DR
2656 size_int ((mech == By_Descriptor_NCA ||
2657 mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2658 ? 0
2659 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2660 : (TREE_CODE (type) == ARRAY_TYPE
2661 && TYPE_CONVENTION_FORTRAN_P (type)
2662 ? 224 : 192))));
2663
2664 field_list = chainon (field_list,
2665 make_descriptor_field ("DIMCT",
2666 gnat_type_for_size (8, 1),
2667 record_type,
2668 size_int (ndim)));
2669
2670 field_list = chainon (field_list,
2671 make_descriptor_field ("ARSIZE",
2672 gnat_type_for_size (32, 1),
2673 record_type,
2674 size_in_bytes (type)));
2675
2676 /* Now build a pointer to the 0,0,0... element. */
2677 tem = build0 (PLACEHOLDER_EXPR, type);
2678 for (i = 0, inner_type = type; i < ndim;
2679 i++, inner_type = TREE_TYPE (inner_type))
2680 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2681 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2682 NULL_TREE, NULL_TREE);
2683
2684 field_list
2685 = chainon (field_list,
2686 make_descriptor_field
2687 ("A0",
2688 build_pointer_type_for_mode (inner_type, SImode, false),
2689 record_type,
2690 build1 (ADDR_EXPR,
2691 build_pointer_type_for_mode (inner_type, SImode,
2692 false),
2693 tem)));
2694
2695 /* Next come the addressing coefficients. */
2696 tem = size_one_node;
2697 for (i = 0; i < ndim; i++)
2698 {
2699 char fname[3];
2700 tree idx_length
2701 = size_binop (MULT_EXPR, tem,
2702 size_binop (PLUS_EXPR,
2703 size_binop (MINUS_EXPR,
2704 TYPE_MAX_VALUE (idx_arr[i]),
2705 TYPE_MIN_VALUE (idx_arr[i])),
2706 size_int (1)));
2707
d628c015
DR
2708 fname[0] = ((mech == By_Descriptor_NCA ||
2709 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31
AC
2710 fname[1] = '0' + i, fname[2] = 0;
2711 field_list
2712 = chainon (field_list,
2713 make_descriptor_field (fname,
2714 gnat_type_for_size (32, 1),
2715 record_type, idx_length));
2716
d628c015 2717 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2718 tem = idx_length;
2719 }
2720
2721 /* Finally here are the bounds. */
2722 for (i = 0; i < ndim; i++)
2723 {
2724 char fname[3];
2725
2726 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2727 field_list
2728 = chainon (field_list,
2729 make_descriptor_field
2730 (fname, gnat_type_for_size (32, 1), record_type,
2731 TYPE_MIN_VALUE (idx_arr[i])));
2732
2733 fname[0] = 'U';
2734 field_list
2735 = chainon (field_list,
2736 make_descriptor_field
2737 (fname, gnat_type_for_size (32, 1), record_type,
2738 TYPE_MAX_VALUE (idx_arr[i])));
2739 }
2740 break;
2741
2742 default:
2743 post_error ("unsupported descriptor type for &", gnat_entity);
2744 }
2745
10069d53 2746 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
a1ab4c31 2747 finish_record_type (record_type, field_list, 0, true);
a1ab4c31
AC
2748 return record_type;
2749}
2750
6ca2b0a0
DR
2751/* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2752 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2753 in the type contains in its DECL_INITIAL the expression to use when
2754 a constructor is made for the type. GNAT_ENTITY is an entity used
2755 to print out an error message if the mechanism cannot be applied to
2756 an object of that type and also for the name. */
2757
2758tree
d628c015 2759build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0
DR
2760{
2761 tree record64_type = make_node (RECORD_TYPE);
2762 tree pointer64_type;
2763 tree field_list64 = 0;
2764 int class;
2765 int dtype = 0;
2766 tree inner_type;
2767 int ndim;
2768 int i;
2769 tree *idx_arr;
2770 tree tem;
2771
2772 /* If TYPE is an unconstrained array, use the underlying array type. */
2773 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2774 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2775
2776 /* If this is an array, compute the number of dimensions in the array,
2777 get the index types, and point to the inner type. */
2778 if (TREE_CODE (type) != ARRAY_TYPE)
2779 ndim = 0;
2780 else
2781 for (ndim = 1, inner_type = type;
2782 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2783 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2784 ndim++, inner_type = TREE_TYPE (inner_type))
2785 ;
2786
2787 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2788
2789 if (mech != By_Descriptor_NCA
2790 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2791 for (i = ndim - 1, inner_type = type;
2792 i >= 0;
2793 i--, inner_type = TREE_TYPE (inner_type))
2794 idx_arr[i] = TYPE_DOMAIN (inner_type);
2795 else
2796 for (i = 0, inner_type = type;
2797 i < ndim;
2798 i++, inner_type = TREE_TYPE (inner_type))
2799 idx_arr[i] = TYPE_DOMAIN (inner_type);
2800
2801 /* Now get the DTYPE value. */
2802 switch (TREE_CODE (type))
2803 {
2804 case INTEGER_TYPE:
2805 case ENUMERAL_TYPE:
01ddebf2 2806 case BOOLEAN_TYPE:
6ca2b0a0
DR
2807 if (TYPE_VAX_FLOATING_POINT_P (type))
2808 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2809 {
2810 case 6:
2811 dtype = 10;
2812 break;
2813 case 9:
2814 dtype = 11;
2815 break;
2816 case 15:
2817 dtype = 27;
2818 break;
2819 }
2820 else
2821 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2822 {
2823 case 8:
2824 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2825 break;
2826 case 16:
2827 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2828 break;
2829 case 32:
2830 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2831 break;
2832 case 64:
2833 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2834 break;
2835 case 128:
2836 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2837 break;
2838 }
2839 break;
2840
2841 case REAL_TYPE:
2842 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2843 break;
2844
2845 case COMPLEX_TYPE:
2846 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2847 && TYPE_VAX_FLOATING_POINT_P (type))
2848 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2849 {
2850 case 6:
2851 dtype = 12;
2852 break;
2853 case 9:
2854 dtype = 13;
2855 break;
2856 case 15:
2857 dtype = 29;
2858 }
2859 else
2860 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2861 break;
2862
2863 case ARRAY_TYPE:
2864 dtype = 14;
2865 break;
2866
2867 default:
2868 break;
2869 }
2870
2871 /* Get the CLASS value. */
2872 switch (mech)
2873 {
2874 case By_Descriptor_A:
2875 class = 4;
2876 break;
2877 case By_Descriptor_NCA:
2878 class = 10;
2879 break;
2880 case By_Descriptor_SB:
2881 class = 15;
2882 break;
2883 case By_Descriptor:
2884 case By_Descriptor_S:
2885 default:
2886 class = 1;
2887 break;
2888 }
2889
2890 /* Make the type for a 64bit descriptor for VMS. The first six fields
2891 are the same for all types. */
2892
2893 field_list64 = chainon (field_list64,
2894 make_descriptor_field ("MBO",
2895 gnat_type_for_size (16, 1),
2896 record64_type, size_int (1)));
2897
2898 field_list64 = chainon (field_list64,
2899 make_descriptor_field ("DTYPE",
2900 gnat_type_for_size (8, 1),
2901 record64_type, size_int (dtype)));
2902 field_list64 = chainon (field_list64,
2903 make_descriptor_field ("CLASS",
2904 gnat_type_for_size (8, 1),
2905 record64_type, size_int (class)));
2906
2907 field_list64 = chainon (field_list64,
2908 make_descriptor_field ("MBMO",
2909 gnat_type_for_size (32, 1),
2910 record64_type, ssize_int (-1)));
2911
2912 field_list64
2913 = chainon (field_list64,
2914 make_descriptor_field
2915 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2916 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2917
2918 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2919
2920 field_list64
2921 = chainon (field_list64,
2922 make_descriptor_field
2923 ("POINTER", pointer64_type, record64_type,
2924 build_unary_op (ADDR_EXPR,
2925 pointer64_type,
2926 build0 (PLACEHOLDER_EXPR, type))));
2927
2928 switch (mech)
2929 {
2930 case By_Descriptor:
2931 case By_Descriptor_S:
2932 break;
2933
2934 case By_Descriptor_SB:
2935 field_list64
2936 = chainon (field_list64,
2937 make_descriptor_field
2938 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2939 TREE_CODE (type) == ARRAY_TYPE
2940 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2941 field_list64
2942 = chainon (field_list64,
2943 make_descriptor_field
2944 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2945 TREE_CODE (type) == ARRAY_TYPE
2946 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2947 break;
2948
2949 case By_Descriptor_A:
2950 case By_Descriptor_NCA:
2951 field_list64 = chainon (field_list64,
2952 make_descriptor_field ("SCALE",
2953 gnat_type_for_size (8, 1),
2954 record64_type,
2955 size_zero_node));
2956
2957 field_list64 = chainon (field_list64,
2958 make_descriptor_field ("DIGITS",
2959 gnat_type_for_size (8, 1),
2960 record64_type,
2961 size_zero_node));
2962
2963 field_list64
2964 = chainon (field_list64,
2965 make_descriptor_field
2966 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2967 size_int (mech == By_Descriptor_NCA
2968 ? 0
2969 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2970 : (TREE_CODE (type) == ARRAY_TYPE
2971 && TYPE_CONVENTION_FORTRAN_P (type)
2972 ? 224 : 192))));
2973
2974 field_list64 = chainon (field_list64,
2975 make_descriptor_field ("DIMCT",
2976 gnat_type_for_size (8, 1),
2977 record64_type,
2978 size_int (ndim)));
2979
2980 field_list64 = chainon (field_list64,
2981 make_descriptor_field ("MBZ",
2982 gnat_type_for_size (32, 1),
2983 record64_type,
2984 size_int (0)));
2985 field_list64 = chainon (field_list64,
2986 make_descriptor_field ("ARSIZE",
2987 gnat_type_for_size (64, 1),
2988 record64_type,
2989 size_in_bytes (type)));
2990
2991 /* Now build a pointer to the 0,0,0... element. */
2992 tem = build0 (PLACEHOLDER_EXPR, type);
2993 for (i = 0, inner_type = type; i < ndim;
2994 i++, inner_type = TREE_TYPE (inner_type))
2995 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2996 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2997 NULL_TREE, NULL_TREE);
2998
2999 field_list64
3000 = chainon (field_list64,
3001 make_descriptor_field
3002 ("A0",
3003 build_pointer_type_for_mode (inner_type, DImode, false),
3004 record64_type,
3005 build1 (ADDR_EXPR,
3006 build_pointer_type_for_mode (inner_type, DImode,
3007 false),
3008 tem)));
3009
3010 /* Next come the addressing coefficients. */
3011 tem = size_one_node;
3012 for (i = 0; i < ndim; i++)
3013 {
3014 char fname[3];
3015 tree idx_length
3016 = size_binop (MULT_EXPR, tem,
3017 size_binop (PLUS_EXPR,
3018 size_binop (MINUS_EXPR,
3019 TYPE_MAX_VALUE (idx_arr[i]),
3020 TYPE_MIN_VALUE (idx_arr[i])),
3021 size_int (1)));
3022
3023 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3024 fname[1] = '0' + i, fname[2] = 0;
3025 field_list64
3026 = chainon (field_list64,
3027 make_descriptor_field (fname,
3028 gnat_type_for_size (64, 1),
3029 record64_type, idx_length));
3030
3031 if (mech == By_Descriptor_NCA)
3032 tem = idx_length;
3033 }
3034
3035 /* Finally here are the bounds. */
3036 for (i = 0; i < ndim; i++)
3037 {
3038 char fname[3];
3039
3040 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3041 field_list64
3042 = chainon (field_list64,
3043 make_descriptor_field
3044 (fname, gnat_type_for_size (64, 1), record64_type,
3045 TYPE_MIN_VALUE (idx_arr[i])));
3046
3047 fname[0] = 'U';
3048 field_list64
3049 = chainon (field_list64,
3050 make_descriptor_field
3051 (fname, gnat_type_for_size (64, 1), record64_type,
3052 TYPE_MAX_VALUE (idx_arr[i])));
3053 }
3054 break;
3055
3056 default:
3057 post_error ("unsupported descriptor type for &", gnat_entity);
3058 }
3059
10069d53 3060 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
6ca2b0a0 3061 finish_record_type (record64_type, field_list64, 0, true);
6ca2b0a0
DR
3062 return record64_type;
3063}
3064
a1ab4c31
AC
3065/* Utility routine for above code to make a field. */
3066
3067static tree
3068make_descriptor_field (const char *name, tree type,
3069 tree rec_type, tree initial)
3070{
3071 tree field
3072 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3073
3074 DECL_INITIAL (field) = initial;
3075 return field;
3076}
3077
d628c015
DR
3078/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3079 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3080 which the VMS descriptor is passed. */
a1ab4c31
AC
3081
3082static tree
d628c015
DR
3083convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3084{
3085 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3086 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3087 /* The CLASS field is the 3rd field in the descriptor. */
3088 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3089 /* The POINTER field is the 6th field in the descriptor. */
3090 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3091
3092 /* Retrieve the value of the POINTER field. */
3093 tree gnu_expr64
3094 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3095
3096 if (POINTER_TYPE_P (gnu_type))
3097 return convert (gnu_type, gnu_expr64);
3098
3099 else if (TYPE_FAT_POINTER_P (gnu_type))
3100 {
3101 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3102 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3103 tree template_type = TREE_TYPE (p_bounds_type);
3104 tree min_field = TYPE_FIELDS (template_type);
3105 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3106 tree template, template_addr, aflags, dimct, t, u;
3107 /* See the head comment of build_vms_descriptor. */
3108 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3109 tree lfield, ufield;
3110
3111 /* Convert POINTER to the type of the P_ARRAY field. */
3112 gnu_expr64 = convert (p_array_type, gnu_expr64);
3113
3114 switch (iclass)
3115 {
3116 case 1: /* Class S */
3117 case 15: /* Class SB */
3118 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3119 t = TREE_CHAIN (TREE_CHAIN (class));
3120 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3121 t = tree_cons (min_field,
3122 convert (TREE_TYPE (min_field), integer_one_node),
3123 tree_cons (max_field,
3124 convert (TREE_TYPE (max_field), t),
3125 NULL_TREE));
3126 template = gnat_build_constructor (template_type, t);
3127 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3128
3129 /* For class S, we are done. */
3130 if (iclass == 1)
3131 break;
3132
3133 /* Test that we really have a SB descriptor, like DEC Ada. */
3134 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3135 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3136 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3137 /* If so, there is already a template in the descriptor and
3138 it is located right after the POINTER field. The fields are
3139 64bits so they must be repacked. */
3140 t = TREE_CHAIN (pointer64);
3141 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3142 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3143
3144 t = TREE_CHAIN (t);
3145 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3146 ufield = convert
3147 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3148
3149 /* Build the template in the form of a constructor. */
3150 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3151 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3152 ufield, NULL_TREE));
3153 template = gnat_build_constructor (template_type, t);
3154
3155 /* Otherwise use the {1, LENGTH} template we build above. */
3156 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3157 build_unary_op (ADDR_EXPR, p_bounds_type,
3158 template),
3159 template_addr);
3160 break;
3161
3162 case 4: /* Class A */
3163 /* The AFLAGS field is the 3rd field after the pointer in the
3164 descriptor. */
3165 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3166 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3167 /* The DIMCT field is the next field in the descriptor after
3168 aflags. */
3169 t = TREE_CHAIN (t);
3170 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3171 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3172 or FL_COEFF or FL_BOUNDS not set. */
3173 u = build_int_cst (TREE_TYPE (aflags), 192);
3174 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3175 build_binary_op (NE_EXPR, integer_type_node,
3176 dimct,
3177 convert (TREE_TYPE (dimct),
3178 size_one_node)),
3179 build_binary_op (NE_EXPR, integer_type_node,
3180 build2 (BIT_AND_EXPR,
3181 TREE_TYPE (aflags),
3182 aflags, u),
3183 u));
3184 /* There is already a template in the descriptor and it is located
3185 in block 3. The fields are 64bits so they must be repacked. */
3186 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3187 (t)))));
3188 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3189 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3190
3191 t = TREE_CHAIN (t);
3192 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3193 ufield = convert
3194 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3195
3196 /* Build the template in the form of a constructor. */
3197 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3198 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3199 ufield, NULL_TREE));
3200 template = gnat_build_constructor (template_type, t);
3201 template = build3 (COND_EXPR, p_bounds_type, u,
3202 build_call_raise (CE_Length_Check_Failed, Empty,
3203 N_Raise_Constraint_Error),
3204 template);
3205 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3206 break;
3207
3208 case 10: /* Class NCA */
3209 default:
3210 post_error ("unsupported descriptor type for &", gnat_subprog);
3211 template_addr = integer_zero_node;
3212 break;
3213 }
3214
3215 /* Build the fat pointer in the form of a constructor. */
3216 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3217 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3218 template_addr, NULL_TREE));
3219 return gnat_build_constructor (gnu_type, t);
3220 }
3221
3222 else
3223 gcc_unreachable ();
3224}
3225
3226/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3227 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3228 which the VMS descriptor is passed. */
3229
3230static tree
3231convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3232{
3233 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3234 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3235 /* The CLASS field is the 3rd field in the descriptor. */
3236 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3237 /* The POINTER field is the 4th field in the descriptor. */
3238 tree pointer = TREE_CHAIN (class);
3239
3240 /* Retrieve the value of the POINTER field. */
d628c015 3241 tree gnu_expr32
a1ab4c31
AC
3242 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3243
3244 if (POINTER_TYPE_P (gnu_type))
d628c015 3245 return convert (gnu_type, gnu_expr32);
a1ab4c31
AC
3246
3247 else if (TYPE_FAT_POINTER_P (gnu_type))
3248 {
3249 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3250 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3251 tree template_type = TREE_TYPE (p_bounds_type);
3252 tree min_field = TYPE_FIELDS (template_type);
3253 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3254 tree template, template_addr, aflags, dimct, t, u;
3255 /* See the head comment of build_vms_descriptor. */
3256 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3257
3258 /* Convert POINTER to the type of the P_ARRAY field. */
d628c015 3259 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31
AC
3260
3261 switch (iclass)
3262 {
3263 case 1: /* Class S */
3264 case 15: /* Class SB */
3265 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3266 t = TYPE_FIELDS (desc_type);
3267 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3268 t = tree_cons (min_field,
3269 convert (TREE_TYPE (min_field), integer_one_node),
3270 tree_cons (max_field,
3271 convert (TREE_TYPE (max_field), t),
3272 NULL_TREE));
3273 template = gnat_build_constructor (template_type, t);
3274 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3275
3276 /* For class S, we are done. */
3277 if (iclass == 1)
3278 break;
3279
3280 /* Test that we really have a SB descriptor, like DEC Ada. */
3281 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3282 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3283 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3284 /* If so, there is already a template in the descriptor and
3285 it is located right after the POINTER field. */
3286 t = TREE_CHAIN (pointer);
3287 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3288 /* Otherwise use the {1, LENGTH} template we build above. */
3289 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3290 build_unary_op (ADDR_EXPR, p_bounds_type,
3291 template),
3292 template_addr);
3293 break;
3294
3295 case 4: /* Class A */
3296 /* The AFLAGS field is the 7th field in the descriptor. */
3297 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3298 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3299 /* The DIMCT field is the 8th field in the descriptor. */
3300 t = TREE_CHAIN (t);
3301 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3302 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3303 or FL_COEFF or FL_BOUNDS not set. */
3304 u = build_int_cst (TREE_TYPE (aflags), 192);
3305 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3306 build_binary_op (NE_EXPR, integer_type_node,
3307 dimct,
3308 convert (TREE_TYPE (dimct),
3309 size_one_node)),
3310 build_binary_op (NE_EXPR, integer_type_node,
3311 build2 (BIT_AND_EXPR,
3312 TREE_TYPE (aflags),
3313 aflags, u),
3314 u));
a1ab4c31
AC
3315 /* There is already a template in the descriptor and it is
3316 located at the start of block 3 (12th field). */
3317 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3318 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
d628c015
DR
3319 template = build3 (COND_EXPR, p_bounds_type, u,
3320 build_call_raise (CE_Length_Check_Failed, Empty,
3321 N_Raise_Constraint_Error),
3322 template);
a1ab4c31
AC
3323 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3324 break;
3325
3326 case 10: /* Class NCA */
3327 default:
3328 post_error ("unsupported descriptor type for &", gnat_subprog);
3329 template_addr = integer_zero_node;
3330 break;
3331 }
3332
3333 /* Build the fat pointer in the form of a constructor. */
d628c015 3334 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
a1ab4c31
AC
3335 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3336 template_addr, NULL_TREE));
d628c015 3337
a1ab4c31
AC
3338 return gnat_build_constructor (gnu_type, t);
3339 }
3340
3341 else
3342 gcc_unreachable ();
3343}
3344
a981c964
EB
3345/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3346 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3347 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3348 VMS descriptor is passed. */
d628c015
DR
3349
3350static tree
a981c964
EB
3351convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3352 Entity_Id gnat_subprog)
d628c015
DR
3353{
3354 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3355 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3356 tree mbo = TYPE_FIELDS (desc_type);
3357 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3358 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
a981c964 3359 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 3360
a981c964
EB
3361 /* If the field name is not MBO, it must be 32-bit and no alternate.
3362 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 3363 if (strcmp (mbostr, "MBO") != 0)
d628c015
DR
3364 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3365
a981c964 3366 /* Build the test for 64-bit descriptor. */
d628c015
DR
3367 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3368 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964
EB
3369 is64bit
3370 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3371 build_binary_op (EQ_EXPR, integer_type_node,
3372 convert (integer_type_node, mbo),
3373 integer_one_node),
3374 build_binary_op (EQ_EXPR, integer_type_node,
3375 convert (integer_type_node, mbmo),
3376 integer_minus_one_node));
3377
3378 /* Build the 2 possible end results. */
3379 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3380 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3381 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3382
3383 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015
DR
3384}
3385
a1ab4c31
AC
3386/* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3387 and the GNAT node GNAT_SUBPROG. */
3388
3389void
3390build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3391{
3392 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3393 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3394 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3395 tree gnu_body;
3396
3397 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3398 gnu_param_list = NULL_TREE;
3399
3400 begin_subprog_body (gnu_stub_decl);
3401 gnat_pushlevel ();
3402
3403 start_stmt_group ();
3404
3405 /* Loop over the parameters of the stub and translate any of them
3406 passed by descriptor into a by reference one. */
3407 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3408 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3409 gnu_stub_param;
3410 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3411 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3412 {
3413 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
a981c964
EB
3414 gnu_param
3415 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3416 gnu_stub_param,
3417 DECL_PARM_ALT_TYPE (gnu_stub_param),
3418 gnat_subprog);
a1ab4c31
AC
3419 else
3420 gnu_param = gnu_stub_param;
3421
3422 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3423 }
3424
3425 gnu_body = end_stmt_group ();
3426
3427 /* Invoke the internal subprogram. */
3428 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3429 gnu_subprog);
3430 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3431 gnu_subprog_addr,
3432 nreverse (gnu_param_list));
3433
3434 /* Propagate the return value, if any. */
3435 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3436 append_to_statement_list (gnu_subprog_call, &gnu_body);
3437 else
3438 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3439 gnu_subprog_call),
3440 &gnu_body);
3441
3442 gnat_poplevel ();
3443
3444 allocate_struct_function (gnu_stub_decl, false);
3445 end_subprog_body (gnu_body, false);
3446}
3447\f
3448/* Build a type to be used to represent an aliased object whose nominal
3449 type is an unconstrained array. This consists of a RECORD_TYPE containing
3450 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3451 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3452 is used to represent an arbitrary unconstrained object. Use NAME
3453 as the name of the record. */
3454
3455tree
3456build_unc_object_type (tree template_type, tree object_type, tree name)
3457{
3458 tree type = make_node (RECORD_TYPE);
3459 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3460 template_type, type, 0, 0, 0, 1);
3461 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3462 type, 0, 0, 0, 1);
3463
3464 TYPE_NAME (type) = name;
3465 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3466 finish_record_type (type,
3467 chainon (chainon (NULL_TREE, template_field),
3468 array_field),
3469 0, false);
3470
3471 return type;
3472}
3473
3474/* Same, taking a thin or fat pointer type instead of a template type. */
3475
3476tree
3477build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3478 tree name)
3479{
3480 tree template_type;
3481
3482 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3483
3484 template_type
3485 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3486 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3487 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3488 return build_unc_object_type (template_type, object_type, name);
3489}
3490
3491/* Shift the component offsets within an unconstrained object TYPE to make it
3492 suitable for use as a designated type for thin pointers. */
3493
3494void
3495shift_unc_components_for_thin_pointers (tree type)
3496{
3497 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3498 allocated past the BOUNDS template. The designated type is adjusted to
3499 have ARRAY at position zero and the template at a negative offset, so
3500 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3501
3502 tree bounds_field = TYPE_FIELDS (type);
3503 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3504
3505 DECL_FIELD_OFFSET (bounds_field)
3506 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3507
3508 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3509 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3510}
3511\f
229077b0
EB
3512/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3513 In the normal case this is just two adjustments, but we have more to
3514 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3515
3516void
3517update_pointer_to (tree old_type, tree new_type)
3518{
3519 tree ptr = TYPE_POINTER_TO (old_type);
3520 tree ref = TYPE_REFERENCE_TO (old_type);
3521 tree ptr1, ref1;
3522 tree type;
3523
3524 /* If this is the main variant, process all the other variants first. */
3525 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3526 for (type = TYPE_NEXT_VARIANT (old_type); type;
3527 type = TYPE_NEXT_VARIANT (type))
3528 update_pointer_to (type, new_type);
3529
229077b0 3530 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3531 if (!ptr && !ref)
3532 return;
3533
3534 /* Merge the old type qualifiers in the new type.
3535
3536 Each old variant has qualifiers for specific reasons, and the new
229077b0 3537 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3538 information grabbed at some point, and merging the two simply unifies
3539 these inputs into the final type description.
3540
3541 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3542 type designating it; after the designated type's freeze, we get here with
3543 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3544 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3545 designated type, because that's what it really is.
3546
229077b0
EB
3547 We might also get here for a non-dummy OLD_TYPE variant with different
3548 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3549 to private record type elaboration (see the comments around the call to
229077b0
EB
3550 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3551 the qualifiers in those cases too, to avoid accidentally discarding the
3552 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3553 new_type
3554 = build_qualified_type (new_type,
3555 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3556
3557 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3558 if (old_type == new_type)
3559 return;
3560
3561 /* Otherwise, first handle the simple case. */
3562 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3563 {
3564 TYPE_POINTER_TO (new_type) = ptr;
3565 TYPE_REFERENCE_TO (new_type) = ref;
3566
3567 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3568 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3569 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3570 TREE_TYPE (ptr1) = new_type;
3571
3572 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3573 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3574 ref1 = TYPE_NEXT_VARIANT (ref1))
3575 TREE_TYPE (ref1) = new_type;
3576 }
3577
229077b0 3578 /* Now deal with the unconstrained array case. In this case the "pointer"
a1ab4c31
AC
3579 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3580 Turn them into pointers to the correct types using update_pointer_to. */
229077b0 3581 else if (!TYPE_FAT_POINTER_P (ptr))
a1ab4c31
AC
3582 gcc_unreachable ();
3583
3584 else
3585 {
3586 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3587 tree array_field = TYPE_FIELDS (ptr);
3588 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3589 tree new_ptr = TYPE_POINTER_TO (new_type);
3590 tree new_ref;
3591 tree var;
3592
3593 /* Make pointers to the dummy template point to the real template. */
3594 update_pointer_to
3595 (TREE_TYPE (TREE_TYPE (bounds_field)),
3596 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3597
3598 /* The references to the template bounds present in the array type
229077b0
EB
3599 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3600 are updating PTR to make it a full replacement for NEW_PTR as
3601 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3602 to make it of type PTR. */
a1ab4c31
AC
3603 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3604 build0 (PLACEHOLDER_EXPR, ptr),
3605 bounds_field, NULL_TREE);
3606
229077b0 3607 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
77022fa8 3608 to the dummy array point to it. */
a1ab4c31
AC
3609 update_pointer_to
3610 (TREE_TYPE (TREE_TYPE (array_field)),
3611 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3612 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3613
229077b0 3614 /* Make PTR the pointer to NEW_TYPE. */
a1ab4c31
AC
3615 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3616 = TREE_TYPE (new_type) = ptr;
3617
3618 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3619 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3620
3621 /* Now handle updating the allocation record, what the thin pointer
3622 points to. Update all pointers from the old record into the new
3623 one, update the type of the array field, and recompute the size. */
3624 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3625
3626 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3627 = TREE_TYPE (TREE_TYPE (array_field));
3628
3629 /* The size recomputation needs to account for alignment constraints, so
3630 we let layout_type work it out. This will reset the field offsets to
3631 what they would be in a regular record, so we shift them back to what
3632 we want them to be for a thin pointer designated type afterwards. */
3633 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3634 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3635 TYPE_SIZE (new_obj_rec) = 0;
3636 layout_type (new_obj_rec);
3637
3638 shift_unc_components_for_thin_pointers (new_obj_rec);
3639
3640 /* We are done, at last. */
3641 rest_of_record_type_compilation (ptr);
3642 }
3643}
3644\f
8df2e902
EB
3645/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3646 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3647
3648static tree
3649convert_to_fat_pointer (tree type, tree expr)
3650{
3651 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
8df2e902 3652 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3653 tree etype = TREE_TYPE (expr);
8df2e902 3654 tree template;
a1ab4c31 3655
8df2e902
EB
3656 /* If EXPR is null, make a fat pointer that contains null pointers to the
3657 template and array. */
a1ab4c31
AC
3658 if (integer_zerop (expr))
3659 return
3660 gnat_build_constructor
3661 (type,
3662 tree_cons (TYPE_FIELDS (type),
8df2e902 3663 convert (p_array_type, expr),
a1ab4c31
AC
3664 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3665 convert (build_pointer_type (template_type),
3666 expr),
3667 NULL_TREE)));
3668
8df2e902 3669 /* If EXPR is a thin pointer, make template and data from the record.. */
a1ab4c31
AC
3670 else if (TYPE_THIN_POINTER_P (etype))
3671 {
3672 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3673
3674 expr = save_expr (expr);
3675 if (TREE_CODE (expr) == ADDR_EXPR)
3676 expr = TREE_OPERAND (expr, 0);
3677 else
3678 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3679
3680 template = build_component_ref (expr, NULL_TREE, fields, false);
3681 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3682 build_component_ref (expr, NULL_TREE,
3683 TREE_CHAIN (fields), false));
3684 }
8df2e902
EB
3685
3686 /* Otherwise, build the constructor for the template. */
a1ab4c31 3687 else
a1ab4c31
AC
3688 template = build_template (template_type, TREE_TYPE (etype), expr);
3689
8df2e902 3690 /* The final result is a constructor for the fat pointer.
a1ab4c31 3691
8df2e902
EB
3692 If EXPR is an argument of a foreign convention subprogram, the type it
3693 points to is directly the component type. In this case, the expression
a1ab4c31 3694 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 3695 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 3696 required, for instance because it ensures that possible later folding of
8df2e902 3697 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
3698 same type as the initial reference.
3699
8df2e902
EB
3700 Note that the call to "build_template" above is still fine because it
3701 will only refer to the provided TEMPLATE_TYPE in this case. */
3702 return
3703 gnat_build_constructor
3704 (type,
3705 tree_cons (TYPE_FIELDS (type),
3706 convert (p_array_type, expr),
3707 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3708 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3709 NULL_TREE)));
a1ab4c31
AC
3710}
3711\f
3712/* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3713 is something that is a fat pointer, so convert to it first if it EXPR
3714 is not already a fat pointer. */
3715
3716static tree
3717convert_to_thin_pointer (tree type, tree expr)
3718{
3719 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3720 expr
3721 = convert_to_fat_pointer
3722 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3723
3724 /* We get the pointer to the data and use a NOP_EXPR to make it the
3725 proper GCC type. */
3726 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3727 false);
3728 expr = build1 (NOP_EXPR, type, expr);
3729
3730 return expr;
3731}
3732\f
3733/* Create an expression whose value is that of EXPR,
3734 converted to type TYPE. The TREE_TYPE of the value
3735 is always TYPE. This function implements all reasonable
3736 conversions; callers should filter out those that are
3737 not permitted by the language being compiled. */
3738
3739tree
3740convert (tree type, tree expr)
3741{
3742 enum tree_code code = TREE_CODE (type);
3743 tree etype = TREE_TYPE (expr);
3744 enum tree_code ecode = TREE_CODE (etype);
3745
3746 /* If EXPR is already the right type, we are done. */
3747 if (type == etype)
3748 return expr;
3749
3750 /* If both input and output have padding and are of variable size, do this
3751 as an unchecked conversion. Likewise if one is a mere variant of the
3752 other, so we avoid a pointless unpad/repad sequence. */
3753 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3754 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3755 && (!TREE_CONSTANT (TYPE_SIZE (type))
3756 || !TREE_CONSTANT (TYPE_SIZE (etype))
3757 || gnat_types_compatible_p (type, etype)
3758 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3759 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3760 ;
3761
3762 /* If the output type has padding, convert to the inner type and
3763 make a constructor to build the record. */
3764 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3765 {
3766 /* If we previously converted from another type and our type is
3767 of variable size, remove the conversion to avoid the need for
3768 variable-size temporaries. Likewise for a conversion between
3769 original and packable version. */
3770 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3771 && (!TREE_CONSTANT (TYPE_SIZE (type))
3772 || (ecode == RECORD_TYPE
3773 && TYPE_NAME (etype)
3774 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3775 expr = TREE_OPERAND (expr, 0);
3776
3777 /* If we are just removing the padding from expr, convert the original
3778 object if we have variable size in order to avoid the need for some
3779 variable-size temporaries. Likewise if the padding is a mere variant
3780 of the other, so we avoid a pointless unpad/repad sequence. */
3781 if (TREE_CODE (expr) == COMPONENT_REF
3782 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3783 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3784 && (!TREE_CONSTANT (TYPE_SIZE (type))
3785 || gnat_types_compatible_p (type,
3786 TREE_TYPE (TREE_OPERAND (expr, 0)))
3787 || (ecode == RECORD_TYPE
3788 && TYPE_NAME (etype)
3789 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3790 return convert (type, TREE_OPERAND (expr, 0));
3791
3792 /* If the result type is a padded type with a self-referentially-sized
3793 field and the expression type is a record, do this as an
3794 unchecked conversion. */
3795 else if (TREE_CODE (etype) == RECORD_TYPE
3796 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3797 return unchecked_convert (type, expr, false);
3798
3799 else
3800 return
3801 gnat_build_constructor (type,
3802 tree_cons (TYPE_FIELDS (type),
3803 convert (TREE_TYPE
3804 (TYPE_FIELDS (type)),
3805 expr),
3806 NULL_TREE));
3807 }
3808
3809 /* If the input type has padding, remove it and convert to the output type.
3810 The conditions ordering is arranged to ensure that the output type is not
3811 a padding type here, as it is not clear whether the conversion would
3812 always be correct if this was to happen. */
3813 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3814 {
3815 tree unpadded;
3816
3817 /* If we have just converted to this padded type, just get the
3818 inner expression. */
3819 if (TREE_CODE (expr) == CONSTRUCTOR
3820 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3821 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3822 == TYPE_FIELDS (etype))
3823 unpadded
3824 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3825
3826 /* Otherwise, build an explicit component reference. */
3827 else
3828 unpadded
3829 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3830
3831 return convert (type, unpadded);
3832 }
3833
3834 /* If the input is a biased type, adjust first. */
3835 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3836 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3837 fold_convert (TREE_TYPE (etype),
3838 expr),
3839 TYPE_MIN_VALUE (etype)));
3840
3841 /* If the input is a justified modular type, we need to extract the actual
3842 object before converting it to any other type with the exceptions of an
3843 unconstrained array or of a mere type variant. It is useful to avoid the
3844 extraction and conversion in the type variant case because it could end
3845 up replacing a VAR_DECL expr by a constructor and we might be about the
3846 take the address of the result. */
3847 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3848 && code != UNCONSTRAINED_ARRAY_TYPE
3849 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3850 return convert (type, build_component_ref (expr, NULL_TREE,
3851 TYPE_FIELDS (etype), false));
3852
3853 /* If converting to a type that contains a template, convert to the data
3854 type and then build the template. */
3855 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3856 {
3857 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3858
3859 /* If the source already has a template, get a reference to the
3860 associated array only, as we are going to rebuild a template
3861 for the target type anyway. */
3862 expr = maybe_unconstrained_array (expr);
3863
3864 return
3865 gnat_build_constructor
3866 (type,
3867 tree_cons (TYPE_FIELDS (type),
3868 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3869 obj_type, NULL_TREE),
3870 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3871 convert (obj_type, expr), NULL_TREE)));
3872 }
3873
3874 /* There are some special cases of expressions that we process
3875 specially. */
3876 switch (TREE_CODE (expr))
3877 {
3878 case ERROR_MARK:
3879 return expr;
3880
3881 case NULL_EXPR:
3882 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3883 conversion in gnat_expand_expr. NULL_EXPR does not represent
3884 and actual value, so no conversion is needed. */
3885 expr = copy_node (expr);
3886 TREE_TYPE (expr) = type;
3887 return expr;
3888
3889 case STRING_CST:
3890 /* If we are converting a STRING_CST to another constrained array type,
3891 just make a new one in the proper type. */
3892 if (code == ecode && AGGREGATE_TYPE_P (etype)
3893 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3894 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3895 {
3896 expr = copy_node (expr);
3897 TREE_TYPE (expr) = type;
3898 return expr;
3899 }
3900 break;
3901
3902 case CONSTRUCTOR:
3903 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3904 a new one in the proper type. */
3905 if (code == ecode && gnat_types_compatible_p (type, etype))
3906 {
3907 expr = copy_node (expr);
3908 TREE_TYPE (expr) = type;
3909 return expr;
3910 }
3911
3912 /* Likewise for a conversion between original and packable version, but
3913 we have to work harder in order to preserve type consistency. */
3914 if (code == ecode
3915 && code == RECORD_TYPE
3916 && TYPE_NAME (type) == TYPE_NAME (etype))
3917 {
3918 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3919 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3920 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3921 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3922 unsigned HOST_WIDE_INT idx;
3923 tree index, value;
3924
3925 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3926 {
3927 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3928 /* We expect only simple constructors. Otherwise, punt. */
3929 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3930 break;
3931 elt->index = field;
3932 elt->value = convert (TREE_TYPE (field), value);
3933 efield = TREE_CHAIN (efield);
3934 field = TREE_CHAIN (field);
3935 }
3936
3937 if (idx == len)
3938 {
3939 expr = copy_node (expr);
3940 TREE_TYPE (expr) = type;
3941 CONSTRUCTOR_ELTS (expr) = v;
3942 return expr;
3943 }
3944 }
3945 break;
3946
3947 case UNCONSTRAINED_ARRAY_REF:
3948 /* Convert this to the type of the inner array by getting the address of
3949 the array from the template. */
3950 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3951 build_component_ref (TREE_OPERAND (expr, 0),
3952 get_identifier ("P_ARRAY"),
3953 NULL_TREE, false));
3954 etype = TREE_TYPE (expr);
3955 ecode = TREE_CODE (etype);
3956 break;
3957
3958 case VIEW_CONVERT_EXPR:
3959 {
3960 /* GCC 4.x is very sensitive to type consistency overall, and view
3961 conversions thus are very frequent. Even though just "convert"ing
3962 the inner operand to the output type is fine in most cases, it
3963 might expose unexpected input/output type mismatches in special
3964 circumstances so we avoid such recursive calls when we can. */
3965 tree op0 = TREE_OPERAND (expr, 0);
3966
3967 /* If we are converting back to the original type, we can just
3968 lift the input conversion. This is a common occurrence with
3969 switches back-and-forth amongst type variants. */
3970 if (type == TREE_TYPE (op0))
3971 return op0;
3972
3973 /* Otherwise, if we're converting between two aggregate types, we
3974 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3975 in place or to just convert the inner expression. */
3976 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3977 {
3978 /* If we are converting between mere variants, we can just
3979 substitute the VIEW_CONVERT_EXPR in place. */
3980 if (gnat_types_compatible_p (type, etype))
3981 return build1 (VIEW_CONVERT_EXPR, type, op0);
3982
3983 /* Otherwise, we may just bypass the input view conversion unless
3984 one of the types is a fat pointer, which is handled by
3985 specialized code below which relies on exact type matching. */
3986 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3987 return convert (type, op0);
3988 }
3989 }
3990 break;
3991
3992 case INDIRECT_REF:
3993 /* If both types are record types, just convert the pointer and
3994 make a new INDIRECT_REF.
3995
3996 ??? Disable this for now since it causes problems with the
3997 code in build_binary_op for MODIFY_EXPR which wants to
3998 strip off conversions. But that code really is a mess and
3999 we need to do this a much better way some time. */
4000 if (0
4001 && (TREE_CODE (type) == RECORD_TYPE
4002 || TREE_CODE (type) == UNION_TYPE)
4003 && (TREE_CODE (etype) == RECORD_TYPE
4004 || TREE_CODE (etype) == UNION_TYPE)
4005 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4006 return build_unary_op (INDIRECT_REF, NULL_TREE,
4007 convert (build_pointer_type (type),
4008 TREE_OPERAND (expr, 0)));
4009 break;
4010
4011 default:
4012 break;
4013 }
4014
4015 /* Check for converting to a pointer to an unconstrained array. */
4016 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4017 return convert_to_fat_pointer (type, expr);
4018
4019 /* If we are converting between two aggregate types that are mere
4020 variants, just make a VIEW_CONVERT_EXPR. */
4021 else if (code == ecode
4022 && AGGREGATE_TYPE_P (type)
4023 && gnat_types_compatible_p (type, etype))
4024 return build1 (VIEW_CONVERT_EXPR, type, expr);
4025
4026 /* In all other cases of related types, make a NOP_EXPR. */
4027 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4028 || (code == INTEGER_CST && ecode == INTEGER_CST
4029 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4030 return fold_convert (type, expr);
4031
4032 switch (code)
4033 {
4034 case VOID_TYPE:
4035 return fold_build1 (CONVERT_EXPR, type, expr);
4036
a1ab4c31
AC
4037 case INTEGER_TYPE:
4038 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4039 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4040 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4041 return unchecked_convert (type, expr, false);
4042 else if (TYPE_BIASED_REPRESENTATION_P (type))
4043 return fold_convert (type,
4044 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4045 convert (TREE_TYPE (type), expr),
4046 TYPE_MIN_VALUE (type)));
4047
4048 /* ... fall through ... */
4049
4050 case ENUMERAL_TYPE:
01ddebf2 4051 case BOOLEAN_TYPE:
a1ab4c31
AC
4052 /* If we are converting an additive expression to an integer type
4053 with lower precision, be wary of the optimization that can be
4054 applied by convert_to_integer. There are 2 problematic cases:
4055 - if the first operand was originally of a biased type,
4056 because we could be recursively called to convert it
4057 to an intermediate type and thus rematerialize the
4058 additive operator endlessly,
4059 - if the expression contains a placeholder, because an
4060 intermediate conversion that changes the sign could
4061 be inserted and thus introduce an artificial overflow
4062 at compile time when the placeholder is substituted. */
4063 if (code == INTEGER_TYPE
4064 && ecode == INTEGER_TYPE
4065 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4066 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4067 {
4068 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4069
4070 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4071 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4072 || CONTAINS_PLACEHOLDER_P (expr))
4073 return build1 (NOP_EXPR, type, expr);
4074 }
4075
4076 return fold (convert_to_integer (type, expr));
4077
4078 case POINTER_TYPE:
4079 case REFERENCE_TYPE:
4080 /* If converting between two pointers to records denoting
4081 both a template and type, adjust if needed to account
4082 for any differing offsets, since one might be negative. */
4083 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4084 {
4085 tree bit_diff
4086 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4087 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4088 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4089 sbitsize_int (BITS_PER_UNIT));
4090
4091 expr = build1 (NOP_EXPR, type, expr);
4092 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4093 if (integer_zerop (byte_diff))
4094 return expr;
4095
4096 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4097 fold (convert (sizetype, byte_diff)));
4098 }
4099
4100 /* If converting to a thin pointer, handle specially. */
4101 if (TYPE_THIN_POINTER_P (type)
4102 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4103 return convert_to_thin_pointer (type, expr);
4104
4105 /* If converting fat pointer to normal pointer, get the pointer to the
4106 array and then convert it. */
4107 else if (TYPE_FAT_POINTER_P (etype))
4108 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4109 NULL_TREE, false);
4110
4111 return fold (convert_to_pointer (type, expr));
4112
4113 case REAL_TYPE:
4114 return fold (convert_to_real (type, expr));
4115
4116 case RECORD_TYPE:
4117 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4118 return
4119 gnat_build_constructor
4120 (type, tree_cons (TYPE_FIELDS (type),
4121 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4122 NULL_TREE));
4123
4124 /* ... fall through ... */
4125
4126 case ARRAY_TYPE:
4127 /* In these cases, assume the front-end has validated the conversion.
4128 If the conversion is valid, it will be a bit-wise conversion, so
4129 it can be viewed as an unchecked conversion. */
4130 return unchecked_convert (type, expr, false);
4131
4132 case UNION_TYPE:
4133 /* This is a either a conversion between a tagged type and some
4134 subtype, which we have to mark as a UNION_TYPE because of
4135 overlapping fields or a conversion of an Unchecked_Union. */
4136 return unchecked_convert (type, expr, false);
4137
4138 case UNCONSTRAINED_ARRAY_TYPE:
4139 /* If EXPR is a constrained array, take its address, convert it to a
4140 fat pointer, and then dereference it. Likewise if EXPR is a
4141 record containing both a template and a constrained array.
4142 Note that a record representing a justified modular type
4143 always represents a packed constrained array. */
4144 if (ecode == ARRAY_TYPE
4145 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4146 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4147 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4148 return
4149 build_unary_op
4150 (INDIRECT_REF, NULL_TREE,
4151 convert_to_fat_pointer (TREE_TYPE (type),
4152 build_unary_op (ADDR_EXPR,
4153 NULL_TREE, expr)));
4154
4155 /* Do something very similar for converting one unconstrained
4156 array to another. */
4157 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4158 return
4159 build_unary_op (INDIRECT_REF, NULL_TREE,
4160 convert (TREE_TYPE (type),
4161 build_unary_op (ADDR_EXPR,
4162 NULL_TREE, expr)));
4163 else
4164 gcc_unreachable ();
4165
4166 case COMPLEX_TYPE:
4167 return fold (convert_to_complex (type, expr));
4168
4169 default:
4170 gcc_unreachable ();
4171 }
4172}
4173\f
4174/* Remove all conversions that are done in EXP. This includes converting
4175 from a padded type or to a justified modular type. If TRUE_ADDRESS
4176 is true, always return the address of the containing object even if
4177 the address is not bit-aligned. */
4178
4179tree
4180remove_conversions (tree exp, bool true_address)
4181{
4182 switch (TREE_CODE (exp))
4183 {
4184 case CONSTRUCTOR:
4185 if (true_address
4186 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4187 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4188 return
4189 remove_conversions (VEC_index (constructor_elt,
4190 CONSTRUCTOR_ELTS (exp), 0)->value,
4191 true);
4192 break;
4193
4194 case COMPONENT_REF:
4195 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4196 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4197 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4198 break;
4199
4200 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4201 CASE_CONVERT:
4202 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4203
4204 default:
4205 break;
4206 }
4207
4208 return exp;
4209}
4210\f
4211/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4212 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4213 likewise return an expression pointing to the underlying array. */
4214
4215tree
4216maybe_unconstrained_array (tree exp)
4217{
4218 enum tree_code code = TREE_CODE (exp);
4219 tree new;
4220
4221 switch (TREE_CODE (TREE_TYPE (exp)))
4222 {
4223 case UNCONSTRAINED_ARRAY_TYPE:
4224 if (code == UNCONSTRAINED_ARRAY_REF)
4225 {
4226 new
4227 = build_unary_op (INDIRECT_REF, NULL_TREE,
4228 build_component_ref (TREE_OPERAND (exp, 0),
4229 get_identifier ("P_ARRAY"),
4230 NULL_TREE, false));
4231 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4232 return new;
4233 }
4234
4235 else if (code == NULL_EXPR)
4236 return build1 (NULL_EXPR,
4237 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4238 (TREE_TYPE (TREE_TYPE (exp))))),
4239 TREE_OPERAND (exp, 0));
4240
4241 case RECORD_TYPE:
4242 /* If this is a padded type, convert to the unpadded type and see if
4243 it contains a template. */
4244 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4245 {
4246 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4247 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4248 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4249 return
4250 build_component_ref (new, NULL_TREE,
4251 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4252 0);
4253 }
4254 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4255 return
4256 build_component_ref (exp, NULL_TREE,
4257 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4258 break;
4259
4260 default:
4261 break;
4262 }
4263
4264 return exp;
4265}
4266\f
afcea859
EB
4267/* Return true if EXPR is an expression that can be folded as an operand
4268 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4269 the rationale. */
4270
4271static bool
4272can_fold_for_view_convert_p (tree expr)
4273{
4274 tree t1, t2;
4275
4276 /* The folder will fold NOP_EXPRs between integral types with the same
4277 precision (in the middle-end's sense). We cannot allow it if the
4278 types don't have the same precision in the Ada sense as well. */
4279 if (TREE_CODE (expr) != NOP_EXPR)
4280 return true;
4281
4282 t1 = TREE_TYPE (expr);
4283 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4284
4285 /* Defer to the folder for non-integral conversions. */
4286 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4287 return true;
4288
4289 /* Only fold conversions that preserve both precisions. */
4290 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4291 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4292 return true;
4293
4294 return false;
4295}
4296
a1ab4c31 4297/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4298 If NOTRUNC_P is true, truncation operations should be suppressed.
4299
4300 Special care is required with (source or target) integral types whose
4301 precision is not equal to their size, to make sure we fetch or assign
4302 the value bits whose location might depend on the endianness, e.g.
4303
4304 Rmsize : constant := 8;
4305 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4306
4307 type Bit_Array is array (1 .. Rmsize) of Boolean;
4308 pragma Pack (Bit_Array);
4309
4310 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4311
4312 Value : Int := 2#1000_0001#;
4313 Vbits : Bit_Array := To_Bit_Array (Value);
4314
4315 we expect the 8 bits at Vbits'Address to always contain Value, while
4316 their original location depends on the endianness, at Value'Address
4317 on a little-endian architecture but not on a big-endian one.
4318
4319 ??? There is a problematic discrepancy between what is called precision
4320 here (and more generally throughout gigi) for integral types and what is
4321 called precision in the middle-end. In the former case it's the RM size
4322 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4323 latter case, the hitch being that they are not equal when they matter,
4324 that is when the number of value bits is not equal to the type's size:
4325 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4326 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4327
4328 The consequence is that gigi must duplicate code bridging the gap between
4329 the type's size and its precision that exists for TYPE_PRECISION in the
4330 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4331 wary of transformations applied in the middle-end based on TYPE_PRECISION
4332 because this value doesn't reflect the actual precision for Ada. */
a1ab4c31
AC
4333
4334tree
4335unchecked_convert (tree type, tree expr, bool notrunc_p)
4336{
4337 tree etype = TREE_TYPE (expr);
4338
4339 /* If the expression is already the right type, we are done. */
4340 if (etype == type)
4341 return expr;
4342
4343 /* If both types types are integral just do a normal conversion.
4344 Likewise for a conversion to an unconstrained array. */
4345 if ((((INTEGRAL_TYPE_P (type)
4346 && !(TREE_CODE (type) == INTEGER_TYPE
4347 && TYPE_VAX_FLOATING_POINT_P (type)))
4348 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4349 || (TREE_CODE (type) == RECORD_TYPE
4350 && TYPE_JUSTIFIED_MODULAR_P (type)))
4351 && ((INTEGRAL_TYPE_P (etype)
4352 && !(TREE_CODE (etype) == INTEGER_TYPE
4353 && TYPE_VAX_FLOATING_POINT_P (etype)))
4354 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4355 || (TREE_CODE (etype) == RECORD_TYPE
4356 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4357 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4358 {
a1ab4c31
AC
4359 if (TREE_CODE (etype) == INTEGER_TYPE
4360 && TYPE_BIASED_REPRESENTATION_P (etype))
4361 {
4362 tree ntype = copy_type (etype);
a1ab4c31
AC
4363 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4364 TYPE_MAIN_VARIANT (ntype) = ntype;
4365 expr = build1 (NOP_EXPR, ntype, expr);
4366 }
4367
4368 if (TREE_CODE (type) == INTEGER_TYPE
4369 && TYPE_BIASED_REPRESENTATION_P (type))
4370 {
afcea859 4371 tree rtype = copy_type (type);
a1ab4c31
AC
4372 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4373 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4374 expr = convert (rtype, expr);
4375 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4376 }
4377
afcea859
EB
4378 /* We have another special case: if we are unchecked converting either
4379 a subtype or a type with limited range into a base type, we need to
4380 ensure that VRP doesn't propagate range information because this
4381 conversion may be done precisely to validate that the object is
4382 within the range it is supposed to have. */
a1ab4c31
AC
4383 else if (TREE_CODE (expr) != INTEGER_CST
4384 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4385 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4386 || TREE_CODE (etype) == ENUMERAL_TYPE
4387 || TREE_CODE (etype) == BOOLEAN_TYPE))
4388 {
4389 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4390 in order not to be deemed an useless type conversion, it must
4391 be from subtype to base type.
4392
afcea859
EB
4393 Therefore we first do the bulk of the conversion to a subtype of
4394 the final type. And this conversion must itself not be deemed
4395 useless if the source type is not a subtype because, otherwise,
4396 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4397 why we toggle the unsigned flag in this conversion, which is
4398 harmless since the final conversion is only a reinterpretation
4399 of the bit pattern.
4400
a1ab4c31
AC
4401 ??? This may raise addressability and/or aliasing issues because
4402 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4403 address of its operand to be taken if it is deemed addressable
4404 and not already in GIMPLE form. */
afcea859
EB
4405 tree rtype
4406 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
a1ab4c31
AC
4407 rtype = copy_type (rtype);
4408 TYPE_MAIN_VARIANT (rtype) = rtype;
4409 TREE_TYPE (rtype) = type;
afcea859
EB
4410 expr = convert (rtype, expr);
4411 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4412 }
4413
afcea859
EB
4414 else
4415 expr = convert (type, expr);
a1ab4c31
AC
4416 }
4417
afcea859
EB
4418 /* If we are converting to an integral type whose precision is not equal
4419 to its size, first unchecked convert to a record that contains an
4420 object of the output type. Then extract the field. */
a1ab4c31
AC
4421 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4422 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4423 GET_MODE_BITSIZE (TYPE_MODE (type))))
4424 {
4425 tree rec_type = make_node (RECORD_TYPE);
4426 tree field = create_field_decl (get_identifier ("OBJ"), type,
4427 rec_type, 1, 0, 0, 0);
4428
4429 TYPE_FIELDS (rec_type) = field;
4430 layout_type (rec_type);
4431
4432 expr = unchecked_convert (rec_type, expr, notrunc_p);
4433 expr = build_component_ref (expr, NULL_TREE, field, 0);
4434 }
4435
afcea859
EB
4436 /* Similarly if we are converting from an integral type whose precision
4437 is not equal to its size. */
a1ab4c31
AC
4438 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4439 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4440 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4441 {
4442 tree rec_type = make_node (RECORD_TYPE);
4443 tree field
4444 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4445 1, 0, 0, 0);
4446
4447 TYPE_FIELDS (rec_type) = field;
4448 layout_type (rec_type);
4449
4450 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4451 expr = unchecked_convert (type, expr, notrunc_p);
4452 }
4453
4454 /* We have a special case when we are converting between two
4455 unconstrained array types. In that case, take the address,
4456 convert the fat pointer types, and dereference. */
4457 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4458 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4459 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4460 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4461 build_unary_op (ADDR_EXPR, NULL_TREE,
4462 expr)));
4463 else
4464 {
4465 expr = maybe_unconstrained_array (expr);
4466 etype = TREE_TYPE (expr);
afcea859
EB
4467 if (can_fold_for_view_convert_p (expr))
4468 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4469 else
4470 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4471 }
4472
afcea859
EB
4473 /* If the result is an integral type whose precision is not equal to its
4474 size, sign- or zero-extend the result. We need not do this if the input
4475 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
4476 is a biased type or if both the input and output are unsigned. */
4477 if (!notrunc_p
4478 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4479 && !(TREE_CODE (type) == INTEGER_TYPE
4480 && TYPE_BIASED_REPRESENTATION_P (type))
4481 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4482 GET_MODE_BITSIZE (TYPE_MODE (type)))
4483 && !(INTEGRAL_TYPE_P (etype)
4484 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4485 && operand_equal_p (TYPE_RM_SIZE (type),
4486 (TYPE_RM_SIZE (etype) != 0
4487 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4488 0))
4489 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4490 {
4491 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4492 TYPE_UNSIGNED (type));
4493 tree shift_expr
4494 = convert (base_type,
4495 size_binop (MINUS_EXPR,
4496 bitsize_int
4497 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4498 TYPE_RM_SIZE (type)));
4499 expr
4500 = convert (type,
4501 build_binary_op (RSHIFT_EXPR, base_type,
4502 build_binary_op (LSHIFT_EXPR, base_type,
4503 convert (base_type, expr),
4504 shift_expr),
4505 shift_expr));
4506 }
4507
4508 /* An unchecked conversion should never raise Constraint_Error. The code
4509 below assumes that GCC's conversion routines overflow the same way that
4510 the underlying hardware does. This is probably true. In the rare case
4511 when it is false, we can rely on the fact that such conversions are
4512 erroneous anyway. */
4513 if (TREE_CODE (expr) == INTEGER_CST)
4514 TREE_OVERFLOW (expr) = 0;
4515
4516 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4517 show no longer constant. */
4518 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4519 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4520 OEP_ONLY_CONST))
4521 TREE_CONSTANT (expr) = 0;
4522
4523 return expr;
4524}
4525\f
4526/* Return the appropriate GCC tree code for the specified GNAT type,
4527 the latter being a record type as predicated by Is_Record_Type. */
4528
4529enum tree_code
4530tree_code_for_record_type (Entity_Id gnat_type)
4531{
4532 Node_Id component_list
4533 = Component_List (Type_Definition
4534 (Declaration_Node
4535 (Implementation_Base_Type (gnat_type))));
4536 Node_Id component;
4537
4538 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4539 we have a non-discriminant field outside a variant. In either case,
4540 it's a RECORD_TYPE. */
4541
4542 if (!Is_Unchecked_Union (gnat_type))
4543 return RECORD_TYPE;
4544
4545 for (component = First_Non_Pragma (Component_Items (component_list));
4546 Present (component);
4547 component = Next_Non_Pragma (component))
4548 if (Ekind (Defining_Entity (component)) == E_Component)
4549 return RECORD_TYPE;
4550
4551 return UNION_TYPE;
4552}
4553
4554/* Return true if GNU_TYPE is suitable as the type of a non-aliased
4555 component of an aggregate type. */
4556
4557bool
4558type_for_nonaliased_component_p (tree gnu_type)
4559{
4560 /* If the type is passed by reference, we may have pointers to the
4561 component so it cannot be made non-aliased. */
4562 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4563 return false;
4564
4565 /* We used to say that any component of aggregate type is aliased
4566 because the front-end may take 'Reference of it. The front-end
4567 has been enhanced in the meantime so as to use a renaming instead
4568 in most cases, but the back-end can probably take the address of
4569 such a component too so we go for the conservative stance.
4570
4571 For instance, we might need the address of any array type, even
4572 if normally passed by copy, to construct a fat pointer if the
4573 component is used as an actual for an unconstrained formal.
4574
4575 Likewise for record types: even if a specific record subtype is
4576 passed by copy, the parent type might be passed by ref (e.g. if
4577 it's of variable size) and we might take the address of a child
4578 component to pass to a parent formal. We have no way to check
4579 for such conditions here. */
4580 if (AGGREGATE_TYPE_P (gnu_type))
4581 return false;
4582
4583 return true;
4584}
4585
4586/* Perform final processing on global variables. */
4587
4588void
4589gnat_write_global_declarations (void)
4590{
4591 /* Proceed to optimize and emit assembly.
4592 FIXME: shouldn't be the front end's responsibility to call this. */
4593 cgraph_optimize ();
4594
4595 /* Emit debug info for all global declarations. */
4596 emit_debug_global_declarations (VEC_address (tree, global_decls),
4597 VEC_length (tree, global_decls));
4598}
4599
4600/* ************************************************************************
4601 * * GCC builtins support *
4602 * ************************************************************************ */
4603
4604/* The general scheme is fairly simple:
4605
4606 For each builtin function/type to be declared, gnat_install_builtins calls
4607 internal facilities which eventually get to gnat_push_decl, which in turn
4608 tracks the so declared builtin function decls in the 'builtin_decls' global
4609 datastructure. When an Intrinsic subprogram declaration is processed, we
4610 search this global datastructure to retrieve the associated BUILT_IN DECL
4611 node. */
4612
4613/* Search the chain of currently available builtin declarations for a node
4614 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4615 found, if any, or NULL_TREE otherwise. */
4616tree
4617builtin_decl_for (tree name)
4618{
4619 unsigned i;
4620 tree decl;
4621
4622 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4623 if (DECL_NAME (decl) == name)
4624 return decl;
4625
4626 return NULL_TREE;
4627}
4628
4629/* The code below eventually exposes gnat_install_builtins, which declares
4630 the builtin types and functions we might need, either internally or as
4631 user accessible facilities.
4632
4633 ??? This is a first implementation shot, still in rough shape. It is
4634 heavily inspired from the "C" family implementation, with chunks copied
4635 verbatim from there.
4636
4637 Two obvious TODO candidates are
4638 o Use a more efficient name/decl mapping scheme
4639 o Devise a middle-end infrastructure to avoid having to copy
4640 pieces between front-ends. */
4641
4642/* ----------------------------------------------------------------------- *
4643 * BUILTIN ELEMENTARY TYPES *
4644 * ----------------------------------------------------------------------- */
4645
4646/* Standard data types to be used in builtin argument declarations. */
4647
4648enum c_tree_index
4649{
4650 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4651 CTI_STRING_TYPE,
4652 CTI_CONST_STRING_TYPE,
4653
4654 CTI_MAX
4655};
4656
4657static tree c_global_trees[CTI_MAX];
4658
4659#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4660#define string_type_node c_global_trees[CTI_STRING_TYPE]
4661#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4662
4663/* ??? In addition some attribute handlers, we currently don't support a
4664 (small) number of builtin-types, which in turns inhibits support for a
4665 number of builtin functions. */
4666#define wint_type_node void_type_node
4667#define intmax_type_node void_type_node
4668#define uintmax_type_node void_type_node
4669
4670/* Build the void_list_node (void_type_node having been created). */
4671
4672static tree
4673build_void_list_node (void)
4674{
4675 tree t = build_tree_list (NULL_TREE, void_type_node);
4676 return t;
4677}
4678
4679/* Used to help initialize the builtin-types.def table. When a type of
4680 the correct size doesn't exist, use error_mark_node instead of NULL.
4681 The later results in segfaults even when a decl using the type doesn't
4682 get invoked. */
4683
4684static tree
4685builtin_type_for_size (int size, bool unsignedp)
4686{
4687 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4688 return type ? type : error_mark_node;
4689}
4690
4691/* Build/push the elementary type decls that builtin functions/types
4692 will need. */
4693
4694static void
4695install_builtin_elementary_types (void)
4696{
4697 signed_size_type_node = size_type_node;
4698 pid_type_node = integer_type_node;
4699 void_list_node = build_void_list_node ();
4700
4701 string_type_node = build_pointer_type (char_type_node);
4702 const_string_type_node
4703 = build_pointer_type (build_qualified_type
4704 (char_type_node, TYPE_QUAL_CONST));
4705}
4706
4707/* ----------------------------------------------------------------------- *
4708 * BUILTIN FUNCTION TYPES *
4709 * ----------------------------------------------------------------------- */
4710
4711/* Now, builtin function types per se. */
4712
4713enum c_builtin_type
4714{
4715#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4716#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4717#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4718#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4719#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4720#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4721#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4722#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4723#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4724#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4725#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4726#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4727#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4728#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4729#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4730 NAME,
4731#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4732#include "builtin-types.def"
4733#undef DEF_PRIMITIVE_TYPE
4734#undef DEF_FUNCTION_TYPE_0
4735#undef DEF_FUNCTION_TYPE_1
4736#undef DEF_FUNCTION_TYPE_2
4737#undef DEF_FUNCTION_TYPE_3
4738#undef DEF_FUNCTION_TYPE_4
4739#undef DEF_FUNCTION_TYPE_5
4740#undef DEF_FUNCTION_TYPE_6
4741#undef DEF_FUNCTION_TYPE_7
4742#undef DEF_FUNCTION_TYPE_VAR_0
4743#undef DEF_FUNCTION_TYPE_VAR_1
4744#undef DEF_FUNCTION_TYPE_VAR_2
4745#undef DEF_FUNCTION_TYPE_VAR_3
4746#undef DEF_FUNCTION_TYPE_VAR_4
4747#undef DEF_FUNCTION_TYPE_VAR_5
4748#undef DEF_POINTER_TYPE
4749 BT_LAST
4750};
4751
4752typedef enum c_builtin_type builtin_type;
4753
4754/* A temporary array used in communication with def_fn_type. */
4755static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4756
4757/* A helper function for install_builtin_types. Build function type
4758 for DEF with return type RET and N arguments. If VAR is true, then the
4759 function should be variadic after those N arguments.
4760
4761 Takes special care not to ICE if any of the types involved are
4762 error_mark_node, which indicates that said type is not in fact available
4763 (see builtin_type_for_size). In which case the function type as a whole
4764 should be error_mark_node. */
4765
4766static void
4767def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4768{
4769 tree args = NULL, t;
4770 va_list list;
4771 int i;
4772
4773 va_start (list, n);
4774 for (i = 0; i < n; ++i)
4775 {
4776 builtin_type a = va_arg (list, builtin_type);
4777 t = builtin_types[a];
4778 if (t == error_mark_node)
4779 goto egress;
4780 args = tree_cons (NULL_TREE, t, args);
4781 }
4782 va_end (list);
4783
4784 args = nreverse (args);
4785 if (!var)
4786 args = chainon (args, void_list_node);
4787
4788 t = builtin_types[ret];
4789 if (t == error_mark_node)
4790 goto egress;
4791 t = build_function_type (t, args);
4792
4793 egress:
4794 builtin_types[def] = t;
4795}
4796
4797/* Build the builtin function types and install them in the builtin_types
4798 array for later use in builtin function decls. */
4799
4800static void
4801install_builtin_function_types (void)
4802{
4803 tree va_list_ref_type_node;
4804 tree va_list_arg_type_node;
4805
4806 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4807 {
4808 va_list_arg_type_node = va_list_ref_type_node =
4809 build_pointer_type (TREE_TYPE (va_list_type_node));
4810 }
4811 else
4812 {
4813 va_list_arg_type_node = va_list_type_node;
4814 va_list_ref_type_node = build_reference_type (va_list_type_node);
4815 }
4816
4817#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4818 builtin_types[ENUM] = VALUE;
4819#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4820 def_fn_type (ENUM, RETURN, 0, 0);
4821#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4822 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4823#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4824 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4825#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4826 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4827#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4828 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4829#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4830 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4831#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4832 ARG6) \
4833 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4834#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4835 ARG6, ARG7) \
4836 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4837#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4838 def_fn_type (ENUM, RETURN, 1, 0);
4839#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4840 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4841#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4842 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4843#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4844 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4845#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4846 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4847#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4848 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4849#define DEF_POINTER_TYPE(ENUM, TYPE) \
4850 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4851
4852#include "builtin-types.def"
4853
4854#undef DEF_PRIMITIVE_TYPE
4855#undef DEF_FUNCTION_TYPE_1
4856#undef DEF_FUNCTION_TYPE_2
4857#undef DEF_FUNCTION_TYPE_3
4858#undef DEF_FUNCTION_TYPE_4
4859#undef DEF_FUNCTION_TYPE_5
4860#undef DEF_FUNCTION_TYPE_6
4861#undef DEF_FUNCTION_TYPE_VAR_0
4862#undef DEF_FUNCTION_TYPE_VAR_1
4863#undef DEF_FUNCTION_TYPE_VAR_2
4864#undef DEF_FUNCTION_TYPE_VAR_3
4865#undef DEF_FUNCTION_TYPE_VAR_4
4866#undef DEF_FUNCTION_TYPE_VAR_5
4867#undef DEF_POINTER_TYPE
4868 builtin_types[(int) BT_LAST] = NULL_TREE;
4869}
4870
4871/* ----------------------------------------------------------------------- *
4872 * BUILTIN ATTRIBUTES *
4873 * ----------------------------------------------------------------------- */
4874
4875enum built_in_attribute
4876{
4877#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4878#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4879#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4880#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4881#include "builtin-attrs.def"
4882#undef DEF_ATTR_NULL_TREE
4883#undef DEF_ATTR_INT
4884#undef DEF_ATTR_IDENT
4885#undef DEF_ATTR_TREE_LIST
4886 ATTR_LAST
4887};
4888
4889static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4890
4891static void
4892install_builtin_attributes (void)
4893{
4894 /* Fill in the built_in_attributes array. */
4895#define DEF_ATTR_NULL_TREE(ENUM) \
4896 built_in_attributes[(int) ENUM] = NULL_TREE;
4897#define DEF_ATTR_INT(ENUM, VALUE) \
4898 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4899#define DEF_ATTR_IDENT(ENUM, STRING) \
4900 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4901#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4902 built_in_attributes[(int) ENUM] \
4903 = tree_cons (built_in_attributes[(int) PURPOSE], \
4904 built_in_attributes[(int) VALUE], \
4905 built_in_attributes[(int) CHAIN]);
4906#include "builtin-attrs.def"
4907#undef DEF_ATTR_NULL_TREE
4908#undef DEF_ATTR_INT
4909#undef DEF_ATTR_IDENT
4910#undef DEF_ATTR_TREE_LIST
4911}
4912
4913/* Handle a "const" attribute; arguments as in
4914 struct attribute_spec.handler. */
4915
4916static tree
4917handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4918 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4919 bool *no_add_attrs)
4920{
4921 if (TREE_CODE (*node) == FUNCTION_DECL)
4922 TREE_READONLY (*node) = 1;
4923 else
4924 *no_add_attrs = true;
4925
4926 return NULL_TREE;
4927}
4928
4929/* Handle a "nothrow" attribute; arguments as in
4930 struct attribute_spec.handler. */
4931
4932static tree
4933handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4934 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4935 bool *no_add_attrs)
4936{
4937 if (TREE_CODE (*node) == FUNCTION_DECL)
4938 TREE_NOTHROW (*node) = 1;
4939 else
4940 *no_add_attrs = true;
4941
4942 return NULL_TREE;
4943}
4944
4945/* Handle a "pure" attribute; arguments as in
4946 struct attribute_spec.handler. */
4947
4948static tree
4949handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4950 int ARG_UNUSED (flags), bool *no_add_attrs)
4951{
4952 if (TREE_CODE (*node) == FUNCTION_DECL)
4953 DECL_PURE_P (*node) = 1;
4954 /* ??? TODO: Support types. */
4955 else
4956 {
4957 warning (OPT_Wattributes, "%qE attribute ignored", name);
4958 *no_add_attrs = true;
4959 }
4960
4961 return NULL_TREE;
4962}
4963
4964/* Handle a "no vops" attribute; arguments as in
4965 struct attribute_spec.handler. */
4966
4967static tree
4968handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4969 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4970 bool *ARG_UNUSED (no_add_attrs))
4971{
4972 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4973 DECL_IS_NOVOPS (*node) = 1;
4974 return NULL_TREE;
4975}
4976
4977/* Helper for nonnull attribute handling; fetch the operand number
4978 from the attribute argument list. */
4979
4980static bool
4981get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4982{
4983 /* Verify the arg number is a constant. */
4984 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4985 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4986 return false;
4987
4988 *valp = TREE_INT_CST_LOW (arg_num_expr);
4989 return true;
4990}
4991
4992/* Handle the "nonnull" attribute. */
4993static tree
4994handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
4995 tree args, int ARG_UNUSED (flags),
4996 bool *no_add_attrs)
4997{
4998 tree type = *node;
4999 unsigned HOST_WIDE_INT attr_arg_num;
5000
5001 /* If no arguments are specified, all pointer arguments should be
5002 non-null. Verify a full prototype is given so that the arguments
5003 will have the correct types when we actually check them later. */
5004 if (!args)
5005 {
5006 if (!TYPE_ARG_TYPES (type))
5007 {
5008 error ("nonnull attribute without arguments on a non-prototype");
5009 *no_add_attrs = true;
5010 }
5011 return NULL_TREE;
5012 }
5013
5014 /* Argument list specified. Verify that each argument number references
5015 a pointer argument. */
5016 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5017 {
5018 tree argument;
5019 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5020
5021 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5022 {
5023 error ("nonnull argument has invalid operand number (argument %lu)",
5024 (unsigned long) attr_arg_num);
5025 *no_add_attrs = true;
5026 return NULL_TREE;
5027 }
5028
5029 argument = TYPE_ARG_TYPES (type);
5030 if (argument)
5031 {
5032 for (ck_num = 1; ; ck_num++)
5033 {
5034 if (!argument || ck_num == arg_num)
5035 break;
5036 argument = TREE_CHAIN (argument);
5037 }
5038
5039 if (!argument
5040 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5041 {
5042 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5043 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5044 *no_add_attrs = true;
5045 return NULL_TREE;
5046 }
5047
5048 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5049 {
5050 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5051 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5052 *no_add_attrs = true;
5053 return NULL_TREE;
5054 }
5055 }
5056 }
5057
5058 return NULL_TREE;
5059}
5060
5061/* Handle a "sentinel" attribute. */
5062
5063static tree
5064handle_sentinel_attribute (tree *node, tree name, tree args,
5065 int ARG_UNUSED (flags), bool *no_add_attrs)
5066{
5067 tree params = TYPE_ARG_TYPES (*node);
5068
5069 if (!params)
5070 {
5071 warning (OPT_Wattributes,
5072 "%qE attribute requires prototypes with named arguments", name);
5073 *no_add_attrs = true;
5074 }
5075 else
5076 {
5077 while (TREE_CHAIN (params))
5078 params = TREE_CHAIN (params);
5079
5080 if (VOID_TYPE_P (TREE_VALUE (params)))
5081 {
5082 warning (OPT_Wattributes,
5083 "%qE attribute only applies to variadic functions", name);
5084 *no_add_attrs = true;
5085 }
5086 }
5087
5088 if (args)
5089 {
5090 tree position = TREE_VALUE (args);
5091
5092 if (TREE_CODE (position) != INTEGER_CST)
5093 {
5094 warning (0, "requested position is not an integer constant");
5095 *no_add_attrs = true;
5096 }
5097 else
5098 {
5099 if (tree_int_cst_lt (position, integer_zero_node))
5100 {
5101 warning (0, "requested position is less than zero");
5102 *no_add_attrs = true;
5103 }
5104 }
5105 }
5106
5107 return NULL_TREE;
5108}
5109
5110/* Handle a "noreturn" attribute; arguments as in
5111 struct attribute_spec.handler. */
5112
5113static tree
5114handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5115 int ARG_UNUSED (flags), bool *no_add_attrs)
5116{
5117 tree type = TREE_TYPE (*node);
5118
5119 /* See FIXME comment in c_common_attribute_table. */
5120 if (TREE_CODE (*node) == FUNCTION_DECL)
5121 TREE_THIS_VOLATILE (*node) = 1;
5122 else if (TREE_CODE (type) == POINTER_TYPE
5123 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5124 TREE_TYPE (*node)
5125 = build_pointer_type
5126 (build_type_variant (TREE_TYPE (type),
5127 TYPE_READONLY (TREE_TYPE (type)), 1));
5128 else
5129 {
5130 warning (OPT_Wattributes, "%qE attribute ignored", name);
5131 *no_add_attrs = true;
5132 }
5133
5134 return NULL_TREE;
5135}
5136
5137/* Handle a "malloc" attribute; arguments as in
5138 struct attribute_spec.handler. */
5139
5140static tree
5141handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5142 int ARG_UNUSED (flags), bool *no_add_attrs)
5143{
5144 if (TREE_CODE (*node) == FUNCTION_DECL
5145 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5146 DECL_IS_MALLOC (*node) = 1;
5147 else
5148 {
5149 warning (OPT_Wattributes, "%qE attribute ignored", name);
5150 *no_add_attrs = true;
5151 }
5152
5153 return NULL_TREE;
5154}
5155
5156/* Fake handler for attributes we don't properly support. */
5157
5158tree
5159fake_attribute_handler (tree * ARG_UNUSED (node),
5160 tree ARG_UNUSED (name),
5161 tree ARG_UNUSED (args),
5162 int ARG_UNUSED (flags),
5163 bool * ARG_UNUSED (no_add_attrs))
5164{
5165 return NULL_TREE;
5166}
5167
5168/* Handle a "type_generic" attribute. */
5169
5170static tree
5171handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5172 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5173 bool * ARG_UNUSED (no_add_attrs))
5174{
5175 tree params;
b4680ca1 5176
a1ab4c31
AC
5177 /* Ensure we have a function type. */
5178 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
b4680ca1 5179
a1ab4c31
AC
5180 params = TYPE_ARG_TYPES (*node);
5181 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5182 params = TREE_CHAIN (params);
5183
5184 /* Ensure we have a variadic function. */
5185 gcc_assert (!params);
5186
5187 return NULL_TREE;
5188}
5189
5190/* ----------------------------------------------------------------------- *
5191 * BUILTIN FUNCTIONS *
5192 * ----------------------------------------------------------------------- */
5193
5194/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5195 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5196 if nonansi_p and flag_no_nonansi_builtin. */
5197
5198static void
5199def_builtin_1 (enum built_in_function fncode,
5200 const char *name,
5201 enum built_in_class fnclass,
5202 tree fntype, tree libtype,
5203 bool both_p, bool fallback_p,
5204 bool nonansi_p ATTRIBUTE_UNUSED,
5205 tree fnattrs, bool implicit_p)
5206{
5207 tree decl;
5208 const char *libname;
5209
5210 /* Preserve an already installed decl. It most likely was setup in advance
5211 (e.g. as part of the internal builtins) for specific reasons. */
5212 if (built_in_decls[(int) fncode] != NULL_TREE)
5213 return;
5214
5215 gcc_assert ((!both_p && !fallback_p)
5216 || !strncmp (name, "__builtin_",
5217 strlen ("__builtin_")));
5218
5219 libname = name + strlen ("__builtin_");
5220 decl = add_builtin_function (name, fntype, fncode, fnclass,
5221 (fallback_p ? libname : NULL),
5222 fnattrs);
5223 if (both_p)
5224 /* ??? This is normally further controlled by command-line options
5225 like -fno-builtin, but we don't have them for Ada. */
5226 add_builtin_function (libname, libtype, fncode, fnclass,
5227 NULL, fnattrs);
5228
5229 built_in_decls[(int) fncode] = decl;
5230 if (implicit_p)
5231 implicit_built_in_decls[(int) fncode] = decl;
5232}
5233
5234static int flag_isoc94 = 0;
5235static int flag_isoc99 = 0;
5236
5237/* Install what the common builtins.def offers. */
5238
5239static void
5240install_builtin_functions (void)
5241{
5242#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5243 NONANSI_P, ATTRS, IMPLICIT, COND) \
5244 if (NAME && COND) \
5245 def_builtin_1 (ENUM, NAME, CLASS, \
5246 builtin_types[(int) TYPE], \
5247 builtin_types[(int) LIBTYPE], \
5248 BOTH_P, FALLBACK_P, NONANSI_P, \
5249 built_in_attributes[(int) ATTRS], IMPLICIT);
5250#include "builtins.def"
5251#undef DEF_BUILTIN
5252}
5253
5254/* ----------------------------------------------------------------------- *
5255 * BUILTIN FUNCTIONS *
5256 * ----------------------------------------------------------------------- */
5257
5258/* Install the builtin functions we might need. */
5259
5260void
5261gnat_install_builtins (void)
5262{
5263 install_builtin_elementary_types ();
5264 install_builtin_function_types ();
5265 install_builtin_attributes ();
5266
5267 /* Install builtins used by generic middle-end pieces first. Some of these
5268 know about internal specificities and control attributes accordingly, for
5269 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5270 the generic definition from builtins.def. */
5271 build_common_builtin_nodes ();
5272
5273 /* Now, install the target specific builtins, such as the AltiVec family on
5274 ppc, and the common set as exposed by builtins.def. */
5275 targetm.init_builtins ();
5276 install_builtin_functions ();
5277}
5278
5279#include "gt-ada-utils.h"
5280#include "gtype-ada.h"
This page took 0.741507 seconds and 5 git commands to generate.