]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils.c
Daily bump.
[gcc.git] / gcc / ada / gcc-interface / utils.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
10069d53 9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26/* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28#define GCC_DIAG_STYLE __gcc_cdiag__
29
30#include "config.h"
31#include "system.h"
32#include "coretypes.h"
33#include "tm.h"
34#include "tree.h"
35#include "flags.h"
36#include "defaults.h"
37#include "toplev.h"
38#include "output.h"
39#include "ggc.h"
40#include "debug.h"
41#include "convert.h"
42#include "target.h"
43#include "function.h"
44#include "cgraph.h"
45#include "tree-inline.h"
46#include "tree-iterator.h"
47#include "gimple.h"
48#include "tree-dump.h"
49#include "pointer-set.h"
50#include "langhooks.h"
62298c61 51#include "rtl.h"
a1ab4c31
AC
52
53#include "ada.h"
54#include "types.h"
55#include "atree.h"
56#include "elists.h"
57#include "namet.h"
58#include "nlists.h"
59#include "stringt.h"
60#include "uintp.h"
61#include "fe.h"
62#include "sinfo.h"
63#include "einfo.h"
64#include "ada-tree.h"
65#include "gigi.h"
66
67#ifndef MAX_FIXED_MODE_SIZE
68#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
69#endif
70
71#ifndef MAX_BITS_PER_WORD
72#define MAX_BITS_PER_WORD BITS_PER_WORD
73#endif
74
75/* If nonzero, pretend we are allocating at global level. */
76int force_global;
77
78/* Tree nodes for the various types and decls we create. */
79tree gnat_std_decls[(int) ADT_LAST];
80
81/* Functions to call for each of the possible raise reasons. */
82tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
83
84/* Forward declarations for handlers of attributes. */
85static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
92static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
93static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
94
95/* Fake handler for attributes we don't properly support, typically because
96 they'd require dragging a lot of the common-c front-end circuitry. */
97static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
98
99/* Table of machine-independent internal attributes for Ada. We support
100 this minimal set of attributes to accommodate the needs of builtins. */
101const struct attribute_spec gnat_internal_attribute_table[] =
102{
103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
104 { "const", 0, 0, true, false, false, handle_const_attribute },
105 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
106 { "pure", 0, 0, true, false, false, handle_pure_attribute },
107 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
108 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
109 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
110 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
111 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
112 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
113
114 /* ??? format and format_arg are heavy and not supported, which actually
115 prevents support for stdio builtins, which we however declare as part
116 of the common builtins.def contents. */
117 { "format", 3, 3, false, true, true, fake_attribute_handler },
118 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
119
120 { NULL, 0, 0, false, false, false, NULL }
121};
122
123/* Associates a GNAT tree node to a GCC tree node. It is used in
124 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
125 of `save_gnu_tree' for more info. */
126static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
127
128#define GET_GNU_TREE(GNAT_ENTITY) \
129 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
130
131#define SET_GNU_TREE(GNAT_ENTITY,VAL) \
132 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
133
134#define PRESENT_GNU_TREE(GNAT_ENTITY) \
135 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
136
137/* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
138static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
139
140#define GET_DUMMY_NODE(GNAT_ENTITY) \
141 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
142
143#define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
144 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
145
146#define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
147 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
148
149/* This variable keeps a table for types for each precision so that we only
150 allocate each of them once. Signed and unsigned types are kept separate.
151
152 Note that these types are only used when fold-const requests something
153 special. Perhaps we should NOT share these types; we'll see how it
154 goes later. */
155static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
156
157/* Likewise for float types, but record these by mode. */
158static GTY(()) tree float_types[NUM_MACHINE_MODES];
159
160/* For each binding contour we allocate a binding_level structure to indicate
161 the binding depth. */
162
163struct gnat_binding_level GTY((chain_next ("%h.chain")))
164{
165 /* The binding level containing this one (the enclosing binding level). */
166 struct gnat_binding_level *chain;
167 /* The BLOCK node for this level. */
168 tree block;
169 /* If nonzero, the setjmp buffer that needs to be updated for any
170 variable-sized definition within this context. */
171 tree jmpbuf_decl;
172};
173
174/* The binding level currently in effect. */
175static GTY(()) struct gnat_binding_level *current_binding_level;
176
177/* A chain of gnat_binding_level structures awaiting reuse. */
178static GTY((deletable)) struct gnat_binding_level *free_binding_level;
179
180/* An array of global declarations. */
181static GTY(()) VEC(tree,gc) *global_decls;
182
183/* An array of builtin function declarations. */
184static GTY(()) VEC(tree,gc) *builtin_decls;
185
186/* An array of global renaming pointers. */
187static GTY(()) VEC(tree,gc) *global_renaming_pointers;
188
189/* A chain of unused BLOCK nodes. */
190static GTY((deletable)) tree free_block_chain;
191
a1ab4c31
AC
192static tree merge_sizes (tree, tree, tree, bool, bool);
193static tree compute_related_constant (tree, tree);
194static tree split_plus (tree, tree *);
195static void gnat_gimplify_function (tree);
196static tree float_type_for_precision (int, enum machine_mode);
197static tree convert_to_fat_pointer (tree, tree);
198static tree convert_to_thin_pointer (tree, tree);
199static tree make_descriptor_field (const char *,tree, tree, tree);
200static bool potential_alignment_gap (tree, tree, tree);
201\f
202/* Initialize the association of GNAT nodes to GCC trees. */
203
204void
205init_gnat_to_gnu (void)
206{
207 associate_gnat_to_gnu
208 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
209}
210
211/* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
212 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
1e17ef87 213 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
a1ab4c31
AC
214
215 If GNU_DECL is zero, a previous association is to be reset. */
216
217void
218save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
219{
220 /* Check that GNAT_ENTITY is not already defined and that it is being set
221 to something which is a decl. Raise gigi 401 if not. Usually, this
222 means GNAT_ENTITY is defined twice, but occasionally is due to some
223 Gigi problem. */
224 gcc_assert (!(gnu_decl
225 && (PRESENT_GNU_TREE (gnat_entity)
226 || (!no_check && !DECL_P (gnu_decl)))));
227
228 SET_GNU_TREE (gnat_entity, gnu_decl);
229}
230
231/* GNAT_ENTITY is a GNAT tree node for a defining identifier.
232 Return the ..._DECL node that was associated with it. If there is no tree
233 node associated with GNAT_ENTITY, abort.
234
235 In some cases, such as delayed elaboration or expressions that need to
236 be elaborated only once, GNAT_ENTITY is really not an entity. */
237
238tree
239get_gnu_tree (Entity_Id gnat_entity)
240{
241 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
242 return GET_GNU_TREE (gnat_entity);
243}
244
245/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
246
247bool
248present_gnu_tree (Entity_Id gnat_entity)
249{
250 return PRESENT_GNU_TREE (gnat_entity);
251}
252\f
253/* Initialize the association of GNAT nodes to GCC trees as dummies. */
254
255void
256init_dummy_type (void)
257{
258 dummy_node_table
259 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
260}
261
262/* Make a dummy type corresponding to GNAT_TYPE. */
263
264tree
265make_dummy_type (Entity_Id gnat_type)
266{
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
268 tree gnu_type;
269
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
273
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
278
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
282
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
284 an ENUMERAL_TYPE. */
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
287 : ENUMERAL_TYPE);
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
10069d53
EB
290 TYPE_STUB_DECL (gnu_type)
291 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
a1ab4c31 292 if (AGGREGATE_TYPE_P (gnu_type))
10069d53 293 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
a1ab4c31
AC
294
295 SET_DUMMY_NODE (gnat_underlying, gnu_type);
296
297 return gnu_type;
298}
299\f
300/* Return nonzero if we are currently in the global binding level. */
301
302int
303global_bindings_p (void)
304{
305 return ((force_global || !current_function_decl) ? -1 : 0);
306}
307
308/* Enter a new binding level. */
309
310void
311gnat_pushlevel ()
312{
313 struct gnat_binding_level *newlevel = NULL;
314
315 /* Reuse a struct for this binding level, if there is one. */
316 if (free_binding_level)
317 {
318 newlevel = free_binding_level;
319 free_binding_level = free_binding_level->chain;
320 }
321 else
322 newlevel
323 = (struct gnat_binding_level *)
324 ggc_alloc (sizeof (struct gnat_binding_level));
325
326 /* Use a free BLOCK, if any; otherwise, allocate one. */
327 if (free_block_chain)
328 {
329 newlevel->block = free_block_chain;
330 free_block_chain = BLOCK_CHAIN (free_block_chain);
331 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
332 }
333 else
334 newlevel->block = make_node (BLOCK);
335
336 /* Point the BLOCK we just made to its parent. */
337 if (current_binding_level)
338 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
339
340 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
341 TREE_USED (newlevel->block) = 1;
342
343 /* Add this level to the front of the chain (stack) of levels that are
344 active. */
345 newlevel->chain = current_binding_level;
346 newlevel->jmpbuf_decl = NULL_TREE;
347 current_binding_level = newlevel;
348}
349
350/* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
351 and point FNDECL to this BLOCK. */
352
353void
354set_current_block_context (tree fndecl)
355{
356 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
357 DECL_INITIAL (fndecl) = current_binding_level->block;
358}
359
360/* Set the jmpbuf_decl for the current binding level to DECL. */
361
362void
363set_block_jmpbuf_decl (tree decl)
364{
365 current_binding_level->jmpbuf_decl = decl;
366}
367
368/* Get the jmpbuf_decl, if any, for the current binding level. */
369
370tree
371get_block_jmpbuf_decl ()
372{
373 return current_binding_level->jmpbuf_decl;
374}
375
376/* Exit a binding level. Set any BLOCK into the current code group. */
377
378void
379gnat_poplevel ()
380{
381 struct gnat_binding_level *level = current_binding_level;
382 tree block = level->block;
383
384 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
385 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
386
387 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
388 are no variables free the block and merge its subblocks into those of its
389 parent block. Otherwise, add it to the list of its parent. */
390 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
391 ;
392 else if (BLOCK_VARS (block) == NULL_TREE)
393 {
394 BLOCK_SUBBLOCKS (level->chain->block)
395 = chainon (BLOCK_SUBBLOCKS (block),
396 BLOCK_SUBBLOCKS (level->chain->block));
397 BLOCK_CHAIN (block) = free_block_chain;
398 free_block_chain = block;
399 }
400 else
401 {
402 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
403 BLOCK_SUBBLOCKS (level->chain->block) = block;
404 TREE_USED (block) = 1;
405 set_block_for_group (block);
406 }
407
408 /* Free this binding structure. */
409 current_binding_level = level->chain;
410 level->chain = free_binding_level;
411 free_binding_level = level;
412}
413
414\f
415/* Records a ..._DECL node DECL as belonging to the current lexical scope
416 and uses GNAT_NODE for location information and propagating flags. */
417
418void
419gnat_pushdecl (tree decl, Node_Id gnat_node)
420{
421 /* If this decl is public external or at toplevel, there is no context.
422 But PARM_DECLs always go in the level of its function. */
423 if (TREE_CODE (decl) != PARM_DECL
424 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
425 || global_bindings_p ()))
426 DECL_CONTEXT (decl) = 0;
427 else
428 {
429 DECL_CONTEXT (decl) = current_function_decl;
430
431 /* Functions imported in another function are not really nested. */
432 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
433 DECL_NO_STATIC_CHAIN (decl) = 1;
434 }
435
436 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
437
438 /* Set the location of DECL and emit a declaration for it. */
439 if (Present (gnat_node))
440 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
441 add_decl_expr (decl, gnat_node);
442
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later. Put global variables in the
445 globals list and builtin functions in a dedicated list to speed up
446 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
447 the list, as they will cause trouble with the debugger and aren't needed
448 anyway. */
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 {
452 if (global_bindings_p ())
453 {
454 VEC_safe_push (tree, gc, global_decls, decl);
455
456 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
457 VEC_safe_push (tree, gc, builtin_decls, decl);
458 }
459 else
460 {
461 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
462 BLOCK_VARS (current_binding_level->block) = decl;
463 }
464 }
465
466 /* For the declaration of a type, set its name if it either is not already
10069d53 467 set or if the previous type name was not derived from a source name.
a1ab4c31
AC
468 We'd rather have the type named with a real name and all the pointer
469 types to the same object have the same POINTER_TYPE node. Code in the
470 equivalent function of c-decl.c makes a copy of the type node here, but
471 that may cause us trouble with incomplete types. We make an exception
472 for fat pointer types because the compiler automatically builds them
473 for unconstrained array types and the debugger uses them to represent
474 both these and pointers to these. */
475 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
476 {
477 tree t = TREE_TYPE (decl);
478
10069d53 479 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
a1ab4c31
AC
480 ;
481 else if (TYPE_FAT_POINTER_P (t))
482 {
483 tree tt = build_variant_type_copy (t);
484 TYPE_NAME (tt) = decl;
485 TREE_USED (tt) = TREE_USED (t);
486 TREE_TYPE (decl) = tt;
487 DECL_ORIGINAL_TYPE (decl) = t;
488 t = NULL_TREE;
489 }
490 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
491 ;
492 else
493 t = NULL_TREE;
494
495 /* Propagate the name to all the variants. This is needed for
496 the type qualifiers machinery to work properly. */
497 if (t)
498 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
499 TYPE_NAME (t) = decl;
500 }
501}
502\f
503/* Do little here. Set up the standard declarations later after the
504 front end has been run. */
505
506void
507gnat_init_decl_processing (void)
508{
509 /* Make the binding_level structure for global names. */
510 current_function_decl = 0;
511 current_binding_level = 0;
512 free_binding_level = 0;
513 gnat_pushlevel ();
514
515 build_common_tree_nodes (true, true);
516
517 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
518 corresponding to the size of Pmode. In most cases when ptr_mode and
519 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
520 far better code using the width of Pmode. Make this here since we need
521 this before we can expand the GNAT types. */
522 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
523 set_sizetype (size_type_node);
01ddebf2
EB
524
525 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
526 boolean_type_node = make_node (BOOLEAN_TYPE);
527 TYPE_PRECISION (boolean_type_node) = 1;
528 fixup_unsigned_type (boolean_type_node);
529 TYPE_RM_SIZE_NUM (boolean_type_node) = bitsize_int (1);
530
a1ab4c31
AC
531 build_common_tree_nodes_2 (0);
532
533 ptr_void_type_node = build_pointer_type (void_type_node);
534}
10069d53
EB
535\f
536/* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
a1ab4c31
AC
537
538void
10069d53 539record_builtin_type (const char *name, tree type)
a1ab4c31 540{
10069d53 541 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
a1ab4c31 542
10069d53 543 gnat_pushdecl (type_decl, Empty);
a1ab4c31 544
10069d53
EB
545 if (debug_hooks->type_decl)
546 debug_hooks->type_decl (type_decl, false);
a1ab4c31
AC
547}
548\f
549/* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
550 finish constructing the record or union type. If REP_LEVEL is zero, this
551 record has no representation clause and so will be entirely laid out here.
552 If REP_LEVEL is one, this record has a representation clause and has been
553 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
554 this record is derived from a parent record and thus inherits its layout;
555 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
556 true, the record type is expected to be modified afterwards so it will
557 not be sent to the back-end for finalization. */
558
559void
560finish_record_type (tree record_type, tree fieldlist, int rep_level,
561 bool do_not_finalize)
562{
563 enum tree_code code = TREE_CODE (record_type);
564 tree name = TYPE_NAME (record_type);
565 tree ada_size = bitsize_zero_node;
566 tree size = bitsize_zero_node;
567 bool had_size = TYPE_SIZE (record_type) != 0;
568 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
569 bool had_align = TYPE_ALIGN (record_type) != 0;
570 tree field;
571
a1ab4c31 572 TYPE_FIELDS (record_type) = fieldlist;
a1ab4c31 573
10069d53
EB
574 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
575 generate debug info and have a parallel type. */
576 if (name && TREE_CODE (name) == TYPE_DECL)
577 name = DECL_NAME (name);
578 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
a1ab4c31
AC
579
580 /* Globally initialize the record first. If this is a rep'ed record,
581 that just means some initializations; otherwise, layout the record. */
582 if (rep_level > 0)
583 {
584 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
6f9f0ce3 585 SET_TYPE_MODE (record_type, BLKmode);
a1ab4c31
AC
586
587 if (!had_size_unit)
588 TYPE_SIZE_UNIT (record_type) = size_zero_node;
589 if (!had_size)
590 TYPE_SIZE (record_type) = bitsize_zero_node;
591
592 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
593 out just like a UNION_TYPE, since the size will be fixed. */
594 else if (code == QUAL_UNION_TYPE)
595 code = UNION_TYPE;
596 }
597 else
598 {
599 /* Ensure there isn't a size already set. There can be in an error
600 case where there is a rep clause but all fields have errors and
601 no longer have a position. */
602 TYPE_SIZE (record_type) = 0;
603 layout_type (record_type);
604 }
605
606 /* At this point, the position and size of each field is known. It was
607 either set before entry by a rep clause, or by laying out the type above.
608
609 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
610 to compute the Ada size; the GCC size and alignment (for rep'ed records
611 that are not padding types); and the mode (for rep'ed records). We also
612 clear the DECL_BIT_FIELD indication for the cases we know have not been
613 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
614
615 if (code == QUAL_UNION_TYPE)
616 fieldlist = nreverse (fieldlist);
617
618 for (field = fieldlist; field; field = TREE_CHAIN (field))
619 {
620 tree type = TREE_TYPE (field);
621 tree pos = bit_position (field);
622 tree this_size = DECL_SIZE (field);
623 tree this_ada_size;
624
625 if ((TREE_CODE (type) == RECORD_TYPE
626 || TREE_CODE (type) == UNION_TYPE
627 || TREE_CODE (type) == QUAL_UNION_TYPE)
628 && !TYPE_IS_FAT_POINTER_P (type)
629 && !TYPE_CONTAINS_TEMPLATE_P (type)
630 && TYPE_ADA_SIZE (type))
631 this_ada_size = TYPE_ADA_SIZE (type);
632 else
633 this_ada_size = this_size;
634
635 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
636 if (DECL_BIT_FIELD (field)
637 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
638 {
639 unsigned int align = TYPE_ALIGN (type);
640
641 /* In the general case, type alignment is required. */
642 if (value_factor_p (pos, align))
643 {
644 /* The enclosing record type must be sufficiently aligned.
645 Otherwise, if no alignment was specified for it and it
646 has been laid out already, bump its alignment to the
647 desired one if this is compatible with its size. */
648 if (TYPE_ALIGN (record_type) >= align)
649 {
650 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
651 DECL_BIT_FIELD (field) = 0;
652 }
653 else if (!had_align
654 && rep_level == 0
655 && value_factor_p (TYPE_SIZE (record_type), align))
656 {
657 TYPE_ALIGN (record_type) = align;
658 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
659 DECL_BIT_FIELD (field) = 0;
660 }
661 }
662
663 /* In the non-strict alignment case, only byte alignment is. */
664 if (!STRICT_ALIGNMENT
665 && DECL_BIT_FIELD (field)
666 && value_factor_p (pos, BITS_PER_UNIT))
667 DECL_BIT_FIELD (field) = 0;
668 }
669
670 /* If we still have DECL_BIT_FIELD set at this point, we know the field
671 is technically not addressable. Except that it can actually be
672 addressed if the field is BLKmode and happens to be properly
673 aligned. */
674 DECL_NONADDRESSABLE_P (field)
675 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
676
677 /* A type must be as aligned as its most aligned field that is not
678 a bit-field. But this is already enforced by layout_type. */
679 if (rep_level > 0 && !DECL_BIT_FIELD (field))
680 TYPE_ALIGN (record_type)
681 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
682
683 switch (code)
684 {
685 case UNION_TYPE:
686 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
687 size = size_binop (MAX_EXPR, size, this_size);
688 break;
689
690 case QUAL_UNION_TYPE:
691 ada_size
692 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
693 this_ada_size, ada_size);
694 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
695 this_size, size);
696 break;
697
698 case RECORD_TYPE:
699 /* Since we know here that all fields are sorted in order of
700 increasing bit position, the size of the record is one
701 higher than the ending bit of the last field processed
702 unless we have a rep clause, since in that case we might
703 have a field outside a QUAL_UNION_TYPE that has a higher ending
704 position. So use a MAX in that case. Also, if this field is a
705 QUAL_UNION_TYPE, we need to take into account the previous size in
706 the case of empty variants. */
707 ada_size
708 = merge_sizes (ada_size, pos, this_ada_size,
709 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
710 size
711 = merge_sizes (size, pos, this_size,
712 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
713 break;
714
715 default:
716 gcc_unreachable ();
717 }
718 }
719
720 if (code == QUAL_UNION_TYPE)
721 nreverse (fieldlist);
722
63787194
EB
723 /* If the type is discriminated, it can be used to access all its
724 constrained subtypes, so force structural equality checks. */
725 if (CONTAINS_PLACEHOLDER_P (size))
726 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
727
a1ab4c31
AC
728 if (rep_level < 2)
729 {
730 /* If this is a padding record, we never want to make the size smaller
731 than what was specified in it, if any. */
732 if (TREE_CODE (record_type) == RECORD_TYPE
733 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
734 size = TYPE_SIZE (record_type);
735
736 /* Now set any of the values we've just computed that apply. */
737 if (!TYPE_IS_FAT_POINTER_P (record_type)
738 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
739 SET_TYPE_ADA_SIZE (record_type, ada_size);
740
741 if (rep_level > 0)
742 {
743 tree size_unit = had_size_unit
744 ? TYPE_SIZE_UNIT (record_type)
745 : convert (sizetype,
746 size_binop (CEIL_DIV_EXPR, size,
747 bitsize_unit_node));
748 unsigned int align = TYPE_ALIGN (record_type);
749
750 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
751 TYPE_SIZE_UNIT (record_type)
752 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
753
754 compute_record_mode (record_type);
755 }
756 }
757
758 if (!do_not_finalize)
759 rest_of_record_type_compilation (record_type);
760}
761
762/* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
763 the debug information associated with it. It need not be invoked
764 directly in most cases since finish_record_type takes care of doing
765 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
766
767void
768rest_of_record_type_compilation (tree record_type)
769{
770 tree fieldlist = TYPE_FIELDS (record_type);
771 tree field;
772 enum tree_code code = TREE_CODE (record_type);
773 bool var_size = false;
774
775 for (field = fieldlist; field; field = TREE_CHAIN (field))
776 {
777 /* We need to make an XVE/XVU record if any field has variable size,
778 whether or not the record does. For example, if we have a union,
779 it may be that all fields, rounded up to the alignment, have the
780 same size, in which case we'll use that size. But the debug
781 output routines (except Dwarf2) won't be able to output the fields,
782 so we need to make the special record. */
783 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
784 /* If a field has a non-constant qualifier, the record will have
785 variable size too. */
786 || (code == QUAL_UNION_TYPE
787 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
788 {
789 var_size = true;
790 break;
791 }
792 }
793
794 /* If this record is of variable size, rename it so that the
795 debugger knows it is and make a new, parallel, record
796 that tells the debugger how the record is laid out. See
797 exp_dbug.ads. But don't do this for records that are padding
798 since they confuse GDB. */
799 if (var_size
800 && !(TREE_CODE (record_type) == RECORD_TYPE
801 && TYPE_IS_PADDING_P (record_type)))
802 {
803 tree new_record_type
804 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
805 ? UNION_TYPE : TREE_CODE (record_type));
806 tree orig_name = TYPE_NAME (record_type);
807 tree orig_id
808 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
809 : orig_name);
810 tree new_id
811 = concat_id_with_name (orig_id,
812 TREE_CODE (record_type) == QUAL_UNION_TYPE
813 ? "XVU" : "XVE");
814 tree last_pos = bitsize_zero_node;
815 tree old_field;
816 tree prev_old_field = 0;
817
818 TYPE_NAME (new_record_type) = new_id;
819 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
820 TYPE_STUB_DECL (new_record_type)
10069d53 821 = create_type_stub_decl (new_id, new_record_type);
a1ab4c31
AC
822 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
823 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
824 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
825 TYPE_SIZE_UNIT (new_record_type)
826 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
827
828 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
829
830 /* Now scan all the fields, replacing each field with a new
831 field corresponding to the new encoding. */
832 for (old_field = TYPE_FIELDS (record_type); old_field;
833 old_field = TREE_CHAIN (old_field))
834 {
835 tree field_type = TREE_TYPE (old_field);
836 tree field_name = DECL_NAME (old_field);
837 tree new_field;
838 tree curpos = bit_position (old_field);
839 bool var = false;
840 unsigned int align = 0;
841 tree pos;
842
843 /* See how the position was modified from the last position.
844
845 There are two basic cases we support: a value was added
846 to the last position or the last position was rounded to
847 a boundary and they something was added. Check for the
848 first case first. If not, see if there is any evidence
849 of rounding. If so, round the last position and try
850 again.
851
852 If this is a union, the position can be taken as zero. */
853
854 /* Some computations depend on the shape of the position expression,
855 so strip conversions to make sure it's exposed. */
856 curpos = remove_conversions (curpos, true);
857
858 if (TREE_CODE (new_record_type) == UNION_TYPE)
859 pos = bitsize_zero_node, align = 0;
860 else
861 pos = compute_related_constant (curpos, last_pos);
862
863 if (!pos && TREE_CODE (curpos) == MULT_EXPR
864 && host_integerp (TREE_OPERAND (curpos, 1), 1))
865 {
866 tree offset = TREE_OPERAND (curpos, 0);
867 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
868
869 /* An offset which is a bitwise AND with a negative power of 2
870 means an alignment corresponding to this power of 2. */
871 offset = remove_conversions (offset, true);
872 if (TREE_CODE (offset) == BIT_AND_EXPR
873 && host_integerp (TREE_OPERAND (offset, 1), 0)
874 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
875 {
876 unsigned int pow
877 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
878 if (exact_log2 (pow) > 0)
879 align *= pow;
880 }
881
882 pos = compute_related_constant (curpos,
883 round_up (last_pos, align));
884 }
885 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
886 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
887 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
888 && host_integerp (TREE_OPERAND
889 (TREE_OPERAND (curpos, 0), 1),
890 1))
891 {
892 align
893 = tree_low_cst
894 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
895 pos = compute_related_constant (curpos,
896 round_up (last_pos, align));
897 }
898 else if (potential_alignment_gap (prev_old_field, old_field,
899 pos))
900 {
901 align = TYPE_ALIGN (field_type);
902 pos = compute_related_constant (curpos,
903 round_up (last_pos, align));
904 }
905
906 /* If we can't compute a position, set it to zero.
907
908 ??? We really should abort here, but it's too much work
909 to get this correct for all cases. */
910
911 if (!pos)
912 pos = bitsize_zero_node;
913
914 /* See if this type is variable-sized and make a pointer type
915 and indicate the indirection if so. Beware that the debug
916 back-end may adjust the position computed above according
917 to the alignment of the field type, i.e. the pointer type
918 in this case, if we don't preventively counter that. */
919 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
920 {
921 field_type = build_pointer_type (field_type);
922 if (align != 0 && TYPE_ALIGN (field_type) > align)
923 {
924 field_type = copy_node (field_type);
925 TYPE_ALIGN (field_type) = align;
926 }
927 var = true;
928 }
929
930 /* Make a new field name, if necessary. */
931 if (var || align != 0)
932 {
933 char suffix[16];
934
935 if (align != 0)
936 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
937 align / BITS_PER_UNIT);
938 else
939 strcpy (suffix, "XVL");
940
941 field_name = concat_id_with_name (field_name, suffix);
942 }
943
944 new_field = create_field_decl (field_name, field_type,
945 new_record_type, 0,
946 DECL_SIZE (old_field), pos, 0);
947 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
948 TYPE_FIELDS (new_record_type) = new_field;
949
950 /* If old_field is a QUAL_UNION_TYPE, take its size as being
951 zero. The only time it's not the last field of the record
952 is when there are other components at fixed positions after
953 it (meaning there was a rep clause for every field) and we
954 want to be able to encode them. */
955 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
956 (TREE_CODE (TREE_TYPE (old_field))
957 == QUAL_UNION_TYPE)
958 ? bitsize_zero_node
959 : DECL_SIZE (old_field));
960 prev_old_field = old_field;
961 }
962
963 TYPE_FIELDS (new_record_type)
964 = nreverse (TYPE_FIELDS (new_record_type));
965
966 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
967 }
968
969 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
970}
971
972/* Append PARALLEL_TYPE on the chain of parallel types for decl. */
973
974void
975add_parallel_type (tree decl, tree parallel_type)
976{
977 tree d = decl;
978
979 while (DECL_PARALLEL_TYPE (d))
980 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
981
982 SET_DECL_PARALLEL_TYPE (d, parallel_type);
983}
984
985/* Return the parallel type associated to a type, if any. */
986
987tree
988get_parallel_type (tree type)
989{
990 if (TYPE_STUB_DECL (type))
991 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
992 else
993 return NULL_TREE;
994}
995
996/* Utility function of above to merge LAST_SIZE, the previous size of a record
1e17ef87
EB
997 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
998 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
999 replace a value of zero with the old size. If HAS_REP is true, we take the
1000 MAX of the end position of this field with LAST_SIZE. In all other cases,
1001 we use FIRST_BIT plus SIZE. Return an expression for the size. */
a1ab4c31
AC
1002
1003static tree
1004merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1005 bool has_rep)
1006{
1007 tree type = TREE_TYPE (last_size);
1008 tree new;
1009
1010 if (!special || TREE_CODE (size) != COND_EXPR)
1011 {
1012 new = size_binop (PLUS_EXPR, first_bit, size);
1013 if (has_rep)
1014 new = size_binop (MAX_EXPR, last_size, new);
1015 }
1016
1017 else
1018 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1019 integer_zerop (TREE_OPERAND (size, 1))
1020 ? last_size : merge_sizes (last_size, first_bit,
1021 TREE_OPERAND (size, 1),
1022 1, has_rep),
1023 integer_zerop (TREE_OPERAND (size, 2))
1024 ? last_size : merge_sizes (last_size, first_bit,
1025 TREE_OPERAND (size, 2),
1026 1, has_rep));
1027
1028 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1029 when fed through substitute_in_expr) into thinking that a constant
1030 size is not constant. */
1031 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1032 new = TREE_OPERAND (new, 0);
1033
1034 return new;
1035}
1036
1037/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1038 related by the addition of a constant. Return that constant if so. */
1039
1040static tree
1041compute_related_constant (tree op0, tree op1)
1042{
1043 tree op0_var, op1_var;
1044 tree op0_con = split_plus (op0, &op0_var);
1045 tree op1_con = split_plus (op1, &op1_var);
1046 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1047
1048 if (operand_equal_p (op0_var, op1_var, 0))
1049 return result;
1050 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1051 return result;
1052 else
1053 return 0;
1054}
1055
1056/* Utility function of above to split a tree OP which may be a sum, into a
1057 constant part, which is returned, and a variable part, which is stored
1058 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1059 bitsizetype. */
1060
1061static tree
1062split_plus (tree in, tree *pvar)
1063{
1064 /* Strip NOPS in order to ease the tree traversal and maximize the
1065 potential for constant or plus/minus discovery. We need to be careful
1066 to always return and set *pvar to bitsizetype trees, but it's worth
1067 the effort. */
1068 STRIP_NOPS (in);
1069
1070 *pvar = convert (bitsizetype, in);
1071
1072 if (TREE_CODE (in) == INTEGER_CST)
1073 {
1074 *pvar = bitsize_zero_node;
1075 return convert (bitsizetype, in);
1076 }
1077 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1078 {
1079 tree lhs_var, rhs_var;
1080 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1081 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1082
1083 if (lhs_var == TREE_OPERAND (in, 0)
1084 && rhs_var == TREE_OPERAND (in, 1))
1085 return bitsize_zero_node;
1086
1087 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1088 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1089 }
1090 else
1091 return bitsize_zero_node;
1092}
1093\f
1094/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1095 subprogram. If it is void_type_node, then we are dealing with a procedure,
1096 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1097 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1098 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1099 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1100 object. RETURNS_BY_REF is true if the function returns by reference.
1101 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1102 first parameter) the address of the place to copy its result. */
1103
1104tree
1105create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1106 bool returns_unconstrained, bool returns_by_ref,
1107 bool returns_by_target_ptr)
1108{
1109 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1110 the subprogram formal parameters. This list is generated by traversing the
1111 input list of PARM_DECL nodes. */
1112 tree param_type_list = NULL;
1113 tree param_decl;
1114 tree type;
1115
1116 for (param_decl = param_decl_list; param_decl;
1117 param_decl = TREE_CHAIN (param_decl))
1118 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1119 param_type_list);
1120
1121 /* The list of the function parameter types has to be terminated by the void
1122 type to signal to the back-end that we are not dealing with a variable
1123 parameter subprogram, but that the subprogram has a fixed number of
1124 parameters. */
1125 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1126
1127 /* The list of argument types has been created in reverse
1128 so nreverse it. */
1129 param_type_list = nreverse (param_type_list);
1130
1131 type = build_function_type (return_type, param_type_list);
1132
1133 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1134 or the new type should, make a copy of TYPE. Likewise for
1135 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1136 if (TYPE_CI_CO_LIST (type) || cico_list
1137 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1138 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1139 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1140 type = copy_type (type);
1141
1142 TYPE_CI_CO_LIST (type) = cico_list;
1143 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1144 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1145 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1146 return type;
1147}
1148\f
1149/* Return a copy of TYPE but safe to modify in any way. */
1150
1151tree
1152copy_type (tree type)
1153{
1154 tree new = copy_node (type);
1155
1156 /* copy_node clears this field instead of copying it, because it is
1157 aliased with TREE_CHAIN. */
1158 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1159
1160 TYPE_POINTER_TO (new) = 0;
1161 TYPE_REFERENCE_TO (new) = 0;
1162 TYPE_MAIN_VARIANT (new) = new;
1163 TYPE_NEXT_VARIANT (new) = 0;
1164
1165 return new;
1166}
1167\f
1168/* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1169 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1170 the decl. */
1171
1172tree
1173create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1174{
1175 /* First build a type for the desired range. */
1176 tree type = build_index_2_type (min, max);
1177
1178 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1179 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1180 is set, but not to INDEX, make a copy of this type with the requested
1181 index type. Note that we have no way of sharing these types, but that's
1182 only a small hole. */
1183 if (TYPE_INDEX_TYPE (type) == index)
1184 return type;
1185 else if (TYPE_INDEX_TYPE (type))
1186 type = copy_type (type);
1187
1188 SET_TYPE_INDEX_TYPE (type, index);
1189 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1190 return type;
1191}
1192\f
10069d53
EB
1193/* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1194 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1195 its data type. */
1196
1197tree
1198create_type_stub_decl (tree type_name, tree type)
1199{
1200 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1201 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1202 emitted in DWARF. */
1203 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1204 DECL_ARTIFICIAL (type_decl) = 1;
1205 return type_decl;
1206}
1207
1208/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1209 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1210 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1211 true if we need to write debug information about this type. GNAT_NODE
1212 is used for the position of the decl. */
a1ab4c31
AC
1213
1214tree
1215create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1216 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1217{
a1ab4c31 1218 enum tree_code code = TREE_CODE (type);
10069d53
EB
1219 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1220 tree type_decl;
a1ab4c31 1221
10069d53
EB
1222 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1223 gcc_assert (!TYPE_IS_DUMMY_P (type));
a1ab4c31 1224
10069d53
EB
1225 /* If the type hasn't been named yet, we're naming it; preserve an existing
1226 TYPE_STUB_DECL that has been attached to it for some purpose. */
1227 if (!named && TYPE_STUB_DECL (type))
1228 {
1229 type_decl = TYPE_STUB_DECL (type);
1230 DECL_NAME (type_decl) = type_name;
1231 }
1232 else
1233 type_decl = build_decl (TYPE_DECL, type_name, type);
a1ab4c31 1234
10069d53
EB
1235 DECL_ARTIFICIAL (type_decl) = artificial_p;
1236 gnat_pushdecl (type_decl, gnat_node);
a1ab4c31
AC
1237 process_attributes (type_decl, attr_list);
1238
10069d53
EB
1239 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1240 This causes the name to be also viewed as a "tag" by the debug
1241 back-end, with the advantage that no DW_TAG_typedef is emitted
1242 for artificial "tagged" types in DWARF. */
1243 if (!named)
1244 TYPE_STUB_DECL (type) = type_decl;
1245
1246 /* Pass the type declaration to the debug back-end unless this is an
1247 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, an
1248 ENUMERAL_TYPE or RECORD_TYPE which are handled separately, or a
a1ab4c31
AC
1249 type for which debugging information was not requested. */
1250 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1251 DECL_IGNORED_P (type_decl) = 1;
1252 else if (code != ENUMERAL_TYPE
1253 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1254 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1255 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1256 rest_of_type_decl_compilation (type_decl);
1257
1258 return type_decl;
1259}
10069d53 1260\f
a1ab4c31
AC
1261/* Return a VAR_DECL or CONST_DECL node.
1262
1263 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1264 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1265 the GCC tree for an optional initial expression; NULL_TREE if none.
1266
1267 CONST_FLAG is true if this variable is constant, in which case we might
1268 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1269
1270 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1271 definition to be made visible outside of the current compilation unit, for
1272 instance variable definitions in a package specification.
1273
1e17ef87 1274 EXTERN_FLAG is true when processing an external variable declaration (as
a1ab4c31
AC
1275 opposed to a definition: no storage is to be allocated for the variable).
1276
1277 STATIC_FLAG is only relevant when not at top level. In that case
1278 it indicates whether to always allocate storage to the variable.
1279
1280 GNAT_NODE is used for the position of the decl. */
1281
1282tree
1283create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1284 bool const_flag, bool public_flag, bool extern_flag,
1285 bool static_flag, bool const_decl_allowed_p,
1286 struct attrib *attr_list, Node_Id gnat_node)
1287{
1288 bool init_const
1289 = (var_init != 0
1290 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1291 && (global_bindings_p () || static_flag
1292 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1293 : TREE_CONSTANT (var_init)));
1294
1295 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1296 case the initializer may be used in-lieu of the DECL node (as done in
1297 Identifier_to_gnu). This is useful to prevent the need of elaboration
1298 code when an identifier for which such a decl is made is in turn used as
1299 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1300 but extra constraints apply to this choice (see below) and are not
1301 relevant to the distinction we wish to make. */
1302 bool constant_p = const_flag && init_const;
1303
1304 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1305 and may be used for scalars in general but not for aggregates. */
1306 tree var_decl
1307 = build_decl ((constant_p && const_decl_allowed_p
1308 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1309 var_name, type);
1310
1311 /* If this is external, throw away any initializations (they will be done
1312 elsewhere) unless this is a constant for which we would like to remain
1313 able to get the initializer. If we are defining a global here, leave a
1314 constant initialization and save any variable elaborations for the
1315 elaboration routine. If we are just annotating types, throw away the
1316 initialization if it isn't a constant. */
1317 if ((extern_flag && !constant_p)
1318 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1319 var_init = NULL_TREE;
1320
1321 /* At the global level, an initializer requiring code to be generated
1322 produces elaboration statements. Check that such statements are allowed,
1323 that is, not violating a No_Elaboration_Code restriction. */
1324 if (global_bindings_p () && var_init != 0 && ! init_const)
1325 Check_Elaboration_Code_Allowed (gnat_node);
1326
1327 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1328 try to fiddle with DECL_COMMON. However, on platforms that don't
1329 support global BSS sections, uninitialized global variables would
1330 go in DATA instead, thus increasing the size of the executable. */
1331 if (!flag_no_common
1332 && TREE_CODE (var_decl) == VAR_DECL
1333 && !have_global_bss_p ())
1334 DECL_COMMON (var_decl) = 1;
1335 DECL_INITIAL (var_decl) = var_init;
1336 TREE_READONLY (var_decl) = const_flag;
1337 DECL_EXTERNAL (var_decl) = extern_flag;
1338 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1339 TREE_CONSTANT (var_decl) = constant_p;
1340 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1341 = TYPE_VOLATILE (type);
1342
1343 /* If it's public and not external, always allocate storage for it.
1344 At the global binding level we need to allocate static storage for the
1345 variable if and only if it's not external. If we are not at the top level
1346 we allocate automatic storage unless requested not to. */
1347 TREE_STATIC (var_decl)
1348 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1349
1350 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1351 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1352
1353 process_attributes (var_decl, attr_list);
1354
1355 /* Add this decl to the current binding level. */
1356 gnat_pushdecl (var_decl, gnat_node);
1357
1358 if (TREE_SIDE_EFFECTS (var_decl))
1359 TREE_ADDRESSABLE (var_decl) = 1;
1360
1361 if (TREE_CODE (var_decl) != CONST_DECL)
1362 {
1363 if (global_bindings_p ())
1364 rest_of_decl_compilation (var_decl, true, 0);
1365 }
1366 else
1367 expand_decl (var_decl);
1368
1369 return var_decl;
1370}
1371\f
1372/* Return true if TYPE, an aggregate type, contains (or is) an array. */
1373
1374static bool
1375aggregate_type_contains_array_p (tree type)
1376{
1377 switch (TREE_CODE (type))
1378 {
1379 case RECORD_TYPE:
1380 case UNION_TYPE:
1381 case QUAL_UNION_TYPE:
1382 {
1383 tree field;
1384 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1385 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1386 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1387 return true;
1388 return false;
1389 }
1390
1391 case ARRAY_TYPE:
1392 return true;
1393
1394 default:
1395 gcc_unreachable ();
1396 }
1397}
1398
1399/* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1400 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1401 this field is in a record type with a "pragma pack". If SIZE is nonzero
1402 it is the specified size for this field. If POS is nonzero, it is the bit
1403 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1404 the address of this field for aliasing purposes. If it is negative, we
1405 should not make a bitfield, which is used by make_aligning_type. */
1406
1407tree
1408create_field_decl (tree field_name, tree field_type, tree record_type,
1409 int packed, tree size, tree pos, int addressable)
1410{
1411 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1412
1413 DECL_CONTEXT (field_decl) = record_type;
1414 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1415
1416 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1417 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1418 Likewise for an aggregate without specified position that contains an
1419 array, because in this case slices of variable length of this array
1420 must be handled by GCC and variable-sized objects need to be aligned
1421 to at least a byte boundary. */
1422 if (packed && (TYPE_MODE (field_type) == BLKmode
1423 || (!pos
1424 && AGGREGATE_TYPE_P (field_type)
1425 && aggregate_type_contains_array_p (field_type))))
1426 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1427
1428 /* If a size is specified, use it. Otherwise, if the record type is packed
1429 compute a size to use, which may differ from the object's natural size.
1430 We always set a size in this case to trigger the checks for bitfield
1431 creation below, which is typically required when no position has been
1432 specified. */
1433 if (size)
1434 size = convert (bitsizetype, size);
1435 else if (packed == 1)
1436 {
1437 size = rm_size (field_type);
1438
1439 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1440 byte. */
1441 if (TREE_CODE (size) == INTEGER_CST
1442 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1443 size = round_up (size, BITS_PER_UNIT);
1444 }
1445
1446 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1447 specified for two reasons: first if the size differs from the natural
1448 size. Second, if the alignment is insufficient. There are a number of
1449 ways the latter can be true.
1450
1451 We never make a bitfield if the type of the field has a nonconstant size,
1452 because no such entity requiring bitfield operations should reach here.
1453
1454 We do *preventively* make a bitfield when there might be the need for it
1455 but we don't have all the necessary information to decide, as is the case
1456 of a field with no specified position in a packed record.
1457
1458 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1459 in layout_decl or finish_record_type to clear the bit_field indication if
1460 it is in fact not needed. */
1461 if (addressable >= 0
1462 && size
1463 && TREE_CODE (size) == INTEGER_CST
1464 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1465 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1466 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1467 || packed
1468 || (TYPE_ALIGN (record_type) != 0
1469 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1470 {
1471 DECL_BIT_FIELD (field_decl) = 1;
1472 DECL_SIZE (field_decl) = size;
1473 if (!packed && !pos)
1474 DECL_ALIGN (field_decl)
1475 = (TYPE_ALIGN (record_type) != 0
1476 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1477 : TYPE_ALIGN (field_type));
1478 }
1479
1480 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1481
1482 /* Bump the alignment if need be, either for bitfield/packing purposes or
1483 to satisfy the type requirements if no such consideration applies. When
1484 we get the alignment from the type, indicate if this is from an explicit
1485 user request, which prevents stor-layout from lowering it later on. */
1486 {
d9223014 1487 unsigned int bit_align
a1ab4c31
AC
1488 = (DECL_BIT_FIELD (field_decl) ? 1
1489 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1490
1491 if (bit_align > DECL_ALIGN (field_decl))
1492 DECL_ALIGN (field_decl) = bit_align;
1493 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1494 {
1495 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1496 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1497 }
1498 }
1499
1500 if (pos)
1501 {
1502 /* We need to pass in the alignment the DECL is known to have.
1503 This is the lowest-order bit set in POS, but no more than
1504 the alignment of the record, if one is specified. Note
1505 that an alignment of 0 is taken as infinite. */
1506 unsigned int known_align;
1507
1508 if (host_integerp (pos, 1))
1509 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1510 else
1511 known_align = BITS_PER_UNIT;
1512
1513 if (TYPE_ALIGN (record_type)
1514 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1515 known_align = TYPE_ALIGN (record_type);
1516
1517 layout_decl (field_decl, known_align);
1518 SET_DECL_OFFSET_ALIGN (field_decl,
1519 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1520 : BITS_PER_UNIT);
1521 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1522 &DECL_FIELD_BIT_OFFSET (field_decl),
1523 DECL_OFFSET_ALIGN (field_decl), pos);
a1ab4c31
AC
1524 }
1525
1526 /* In addition to what our caller says, claim the field is addressable if we
1527 know that its type is not suitable.
1528
1529 The field may also be "technically" nonaddressable, meaning that even if
1530 we attempt to take the field's address we will actually get the address
1531 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1532 value we have at this point is not accurate enough, so we don't account
1533 for this here and let finish_record_type decide. */
4c5a0615 1534 if (!addressable && !type_for_nonaliased_component_p (field_type))
a1ab4c31
AC
1535 addressable = 1;
1536
1537 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1538
1539 return field_decl;
1540}
1541\f
1542/* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1543 PARAM_TYPE is its type. READONLY is true if the parameter is
1544 readonly (either an In parameter or an address of a pass-by-ref
1545 parameter). */
1546
1547tree
1548create_param_decl (tree param_name, tree param_type, bool readonly)
1549{
1550 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1551
1552 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1553 lead to various ABI violations. */
1554 if (targetm.calls.promote_prototypes (param_type)
1555 && (TREE_CODE (param_type) == INTEGER_TYPE
01ddebf2
EB
1556 || TREE_CODE (param_type) == ENUMERAL_TYPE
1557 || TREE_CODE (param_type) == BOOLEAN_TYPE)
a1ab4c31
AC
1558 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1559 {
1560 /* We have to be careful about biased types here. Make a subtype
1561 of integer_type_node with the proper biasing. */
1562 if (TREE_CODE (param_type) == INTEGER_TYPE
1563 && TYPE_BIASED_REPRESENTATION_P (param_type))
1564 {
1565 param_type
1566 = copy_type (build_range_type (integer_type_node,
1567 TYPE_MIN_VALUE (param_type),
1568 TYPE_MAX_VALUE (param_type)));
1569
1570 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1571 }
1572 else
1573 param_type = integer_type_node;
1574 }
1575
1576 DECL_ARG_TYPE (param_decl) = param_type;
1577 TREE_READONLY (param_decl) = readonly;
1578 return param_decl;
1579}
1580\f
1581/* Given a DECL and ATTR_LIST, process the listed attributes. */
1582
1583void
1584process_attributes (tree decl, struct attrib *attr_list)
1585{
1586 for (; attr_list; attr_list = attr_list->next)
1587 switch (attr_list->type)
1588 {
1589 case ATTR_MACHINE_ATTRIBUTE:
1590 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1591 NULL_TREE),
1592 ATTR_FLAG_TYPE_IN_PLACE);
1593 break;
1594
1595 case ATTR_LINK_ALIAS:
1596 if (! DECL_EXTERNAL (decl))
1597 {
1598 TREE_STATIC (decl) = 1;
1599 assemble_alias (decl, attr_list->name);
1600 }
1601 break;
1602
1603 case ATTR_WEAK_EXTERNAL:
1604 if (SUPPORTS_WEAK)
1605 declare_weak (decl);
1606 else
1607 post_error ("?weak declarations not supported on this target",
1608 attr_list->error_point);
1609 break;
1610
1611 case ATTR_LINK_SECTION:
1612 if (targetm.have_named_sections)
1613 {
1614 DECL_SECTION_NAME (decl)
1615 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1616 IDENTIFIER_POINTER (attr_list->name));
1617 DECL_COMMON (decl) = 0;
1618 }
1619 else
1620 post_error ("?section attributes are not supported for this target",
1621 attr_list->error_point);
1622 break;
1623
1624 case ATTR_LINK_CONSTRUCTOR:
1625 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1626 TREE_USED (decl) = 1;
1627 break;
1628
1629 case ATTR_LINK_DESTRUCTOR:
1630 DECL_STATIC_DESTRUCTOR (decl) = 1;
1631 TREE_USED (decl) = 1;
1632 break;
40a14772
TG
1633
1634 case ATTR_THREAD_LOCAL_STORAGE:
62298c61
TG
1635 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1636 DECL_COMMON (decl) = 0;
40a14772 1637 break;
a1ab4c31
AC
1638 }
1639}
1640\f
1641/* Record a global renaming pointer. */
1642
1643void
1644record_global_renaming_pointer (tree decl)
1645{
1646 gcc_assert (DECL_RENAMED_OBJECT (decl));
1647 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1648}
1649
1650/* Invalidate the global renaming pointers. */
1651
1652void
1653invalidate_global_renaming_pointers (void)
1654{
1655 unsigned int i;
1656 tree iter;
1657
1658 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1659 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1660
1661 VEC_free (tree, gc, global_renaming_pointers);
1662}
1663
1664/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1665 a power of 2. */
1666
1667bool
1668value_factor_p (tree value, HOST_WIDE_INT factor)
1669{
1670 if (host_integerp (value, 1))
1671 return tree_low_cst (value, 1) % factor == 0;
1672
1673 if (TREE_CODE (value) == MULT_EXPR)
1674 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1675 || value_factor_p (TREE_OPERAND (value, 1), factor));
1676
1677 return false;
1678}
1679
1680/* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1681 unless we can prove these 2 fields are laid out in such a way that no gap
1682 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1683 is the distance in bits between the end of PREV_FIELD and the starting
1684 position of CURR_FIELD. It is ignored if null. */
1685
1686static bool
1687potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1688{
1689 /* If this is the first field of the record, there cannot be any gap */
1690 if (!prev_field)
1691 return false;
1692
1693 /* If the previous field is a union type, then return False: The only
1694 time when such a field is not the last field of the record is when
1695 there are other components at fixed positions after it (meaning there
1696 was a rep clause for every field), in which case we don't want the
1697 alignment constraint to override them. */
1698 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1699 return false;
1700
1701 /* If the distance between the end of prev_field and the beginning of
1702 curr_field is constant, then there is a gap if the value of this
1703 constant is not null. */
1704 if (offset && host_integerp (offset, 1))
1705 return !integer_zerop (offset);
1706
1707 /* If the size and position of the previous field are constant,
1708 then check the sum of this size and position. There will be a gap
1709 iff it is not multiple of the current field alignment. */
1710 if (host_integerp (DECL_SIZE (prev_field), 1)
1711 && host_integerp (bit_position (prev_field), 1))
1712 return ((tree_low_cst (bit_position (prev_field), 1)
1713 + tree_low_cst (DECL_SIZE (prev_field), 1))
1714 % DECL_ALIGN (curr_field) != 0);
1715
1716 /* If both the position and size of the previous field are multiples
1717 of the current field alignment, there cannot be any gap. */
1718 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1719 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1720 return false;
1721
1722 /* Fallback, return that there may be a potential gap */
1723 return true;
1724}
1725
1726/* Returns a LABEL_DECL node for LABEL_NAME. */
1727
1728tree
1729create_label_decl (tree label_name)
1730{
1731 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1732
1733 DECL_CONTEXT (label_decl) = current_function_decl;
1734 DECL_MODE (label_decl) = VOIDmode;
1735 DECL_SOURCE_LOCATION (label_decl) = input_location;
1736
1737 return label_decl;
1738}
1739\f
1740/* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1741 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1742 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1743 PARM_DECL nodes chained through the TREE_CHAIN field).
1744
1745 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1746 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1747
1748tree
1749create_subprog_decl (tree subprog_name, tree asm_name,
1750 tree subprog_type, tree param_decl_list, bool inline_flag,
1751 bool public_flag, bool extern_flag,
1752 struct attrib *attr_list, Node_Id gnat_node)
1753{
1754 tree return_type = TREE_TYPE (subprog_type);
1755 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1756
d84b344a
EB
1757 /* If this is a non-inline function nested inside an inlined external
1758 function, we cannot honor both requests without cloning the nested
1759 function in the current unit since it is private to the other unit.
1760 We could inline the nested function as well but it's probably better
1761 to err on the side of too little inlining. */
1762 if (!inline_flag
1763 && current_function_decl
1764 && DECL_DECLARED_INLINE_P (current_function_decl)
a1ab4c31 1765 && DECL_EXTERNAL (current_function_decl))
d84b344a 1766 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
a1ab4c31
AC
1767
1768 DECL_EXTERNAL (subprog_decl) = extern_flag;
1769 TREE_PUBLIC (subprog_decl) = public_flag;
1770 TREE_STATIC (subprog_decl) = 1;
1771 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1772 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1773 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
d84b344a 1774 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
a1ab4c31
AC
1775 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1776 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1777 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1778 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1779
1780 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1781 target by-reference return mechanism. This is not supported all the
1782 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1783 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1784 the RESULT_DECL instead - see gnat_genericize for more details. */
1785 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1786 {
1787 tree result_decl = DECL_RESULT (subprog_decl);
1788
1789 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1790 DECL_BY_REFERENCE (result_decl) = 1;
1791 }
1792
a1ab4c31
AC
1793 if (asm_name)
1794 {
1795 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1796
1797 /* The expand_main_function circuitry expects "main_identifier_node" to
1798 designate the DECL_NAME of the 'main' entry point, in turn expected
1799 to be declared as the "main" function literally by default. Ada
1800 program entry points are typically declared with a different name
1801 within the binder generated file, exported as 'main' to satisfy the
1802 system expectations. Redirect main_identifier_node in this case. */
1803 if (asm_name == main_identifier_node)
1804 main_identifier_node = DECL_NAME (subprog_decl);
1805 }
1806
1807 process_attributes (subprog_decl, attr_list);
1808
1809 /* Add this decl to the current binding level. */
1810 gnat_pushdecl (subprog_decl, gnat_node);
1811
1812 /* Output the assembler code and/or RTL for the declaration. */
1813 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1814
1815 return subprog_decl;
1816}
1817\f
1818/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1819 body. This routine needs to be invoked before processing the declarations
1820 appearing in the subprogram. */
1821
1822void
1823begin_subprog_body (tree subprog_decl)
1824{
1825 tree param_decl;
1826
1827 current_function_decl = subprog_decl;
1828 announce_function (subprog_decl);
1829
1830 /* Enter a new binding level and show that all the parameters belong to
1831 this function. */
1832 gnat_pushlevel ();
1833 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1834 param_decl = TREE_CHAIN (param_decl))
1835 DECL_CONTEXT (param_decl) = subprog_decl;
1836
1837 make_decl_rtl (subprog_decl);
1838
1839 /* We handle pending sizes via the elaboration of types, so we don't need to
1840 save them. This causes them to be marked as part of the outer function
1841 and then discarded. */
1842 get_pending_sizes ();
1843}
1844
1845
1846/* Helper for the genericization callback. Return a dereference of VAL
1847 if it is of a reference type. */
1848
1849static tree
1850convert_from_reference (tree val)
1851{
1852 tree value_type, ref;
1853
1854 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1855 return val;
1856
1857 value_type = TREE_TYPE (TREE_TYPE (val));
1858 ref = build1 (INDIRECT_REF, value_type, val);
1859
1860 /* See if what we reference is CONST or VOLATILE, which requires
1861 looking into array types to get to the component type. */
1862
1863 while (TREE_CODE (value_type) == ARRAY_TYPE)
1864 value_type = TREE_TYPE (value_type);
1865
1866 TREE_READONLY (ref)
1867 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1868 TREE_THIS_VOLATILE (ref)
1869 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1870
1871 TREE_SIDE_EFFECTS (ref)
1872 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1873
1874 return ref;
1875}
1876
1877/* Helper for the genericization callback. Returns true if T denotes
1878 a RESULT_DECL with DECL_BY_REFERENCE set. */
1879
1880static inline bool
1881is_byref_result (tree t)
1882{
1883 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1884}
1885
1886
1887/* Tree walking callback for gnat_genericize. Currently ...
1888
1889 o Adjust references to the function's DECL_RESULT if it is marked
1890 DECL_BY_REFERENCE and so has had its type turned into a reference
1891 type at the end of the function compilation. */
1892
1893static tree
1894gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1895{
1896 /* This implementation is modeled after what the C++ front-end is
1897 doing, basis of the downstream passes behavior. */
1898
1899 tree stmt = *stmt_p;
1900 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1901
1902 /* If we have a direct mention of the result decl, dereference. */
1903 if (is_byref_result (stmt))
1904 {
1905 *stmt_p = convert_from_reference (stmt);
1906 *walk_subtrees = 0;
1907 return NULL;
1908 }
1909
1910 /* Otherwise, no need to walk the same tree twice. */
1911 if (pointer_set_contains (p_set, stmt))
1912 {
1913 *walk_subtrees = 0;
1914 return NULL_TREE;
1915 }
1916
1917 /* If we are taking the address of what now is a reference, just get the
1918 reference value. */
1919 if (TREE_CODE (stmt) == ADDR_EXPR
1920 && is_byref_result (TREE_OPERAND (stmt, 0)))
1921 {
1922 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1923 *walk_subtrees = 0;
1924 }
1925
1926 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1927 else if (TREE_CODE (stmt) == RETURN_EXPR
1928 && TREE_OPERAND (stmt, 0)
1929 && is_byref_result (TREE_OPERAND (stmt, 0)))
1930 *walk_subtrees = 0;
1931
1932 /* Don't look inside trees that cannot embed references of interest. */
1933 else if (IS_TYPE_OR_DECL_P (stmt))
1934 *walk_subtrees = 0;
1935
1936 pointer_set_insert (p_set, *stmt_p);
1937
1938 return NULL;
1939}
1940
1941/* Perform lowering of Ada trees to GENERIC. In particular:
1942
1943 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1944 and adjust all the references to this decl accordingly. */
1945
1946static void
1947gnat_genericize (tree fndecl)
1948{
1949 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1950 was handled by simply setting TREE_ADDRESSABLE on the result type.
1951 Everything required to actually pass by invisible ref using the target
1952 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1953
1954 This doesn't work with GCC 4 any more for several reasons. First, the
1955 gimplification process might need the creation of temporaries of this
1956 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1957 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1958 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1959 be explicitly accounted for by the front-end in the function body.
1960
1961 We achieve the complete transformation in two steps:
1962
1963 1/ create_subprog_decl performs early attribute tweaks: it clears
1964 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1965 the result decl. The former ensures that the bit isn't set in the GCC
1966 tree saved for the function, so prevents ICEs on temporary creation.
1967 The latter we use here to trigger the rest of the processing.
1968
1969 2/ This function performs the type transformation on the result decl
1970 and adjusts all the references to this decl from the function body
1971 accordingly.
1972
1973 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1974 strategy, which escapes the gimplifier temporary creation issues by
1975 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1976 on simple specific support code in aggregate_value_p to look at the
1977 target function result decl explicitly. */
1978
1979 struct pointer_set_t *p_set;
1980 tree decl_result = DECL_RESULT (fndecl);
1981
1982 if (!DECL_BY_REFERENCE (decl_result))
1983 return;
1984
1985 /* Make the DECL_RESULT explicitly by-reference and adjust all the
1986 occurrences in the function body using the common tree-walking facility.
1987 We want to see every occurrence of the result decl to adjust the
1988 referencing tree, so need to use our own pointer set to control which
1989 trees should be visited again or not. */
1990
1991 p_set = pointer_set_create ();
1992
1993 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
1994 TREE_ADDRESSABLE (decl_result) = 0;
1995 relayout_decl (decl_result);
1996
1997 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
1998
1999 pointer_set_destroy (p_set);
2000}
2001
2002/* Finish the definition of the current subprogram BODY and compile it all the
2003 way to assembler language output. ELAB_P tells if this is called for an
2004 elaboration routine, to be entirely discarded if empty. */
2005
2006void
2007end_subprog_body (tree body, bool elab_p)
2008{
2009 tree fndecl = current_function_decl;
2010
2011 /* Mark the BLOCK for this level as being for this function and pop the
2012 level. Since the vars in it are the parameters, clear them. */
2013 BLOCK_VARS (current_binding_level->block) = 0;
2014 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2015 DECL_INITIAL (fndecl) = current_binding_level->block;
2016 gnat_poplevel ();
2017
a1ab4c31
AC
2018 /* We handle pending sizes via the elaboration of types, so we don't
2019 need to save them. */
2020 get_pending_sizes ();
2021
2022 /* Mark the RESULT_DECL as being in this subprogram. */
2023 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2024
2025 DECL_SAVED_TREE (fndecl) = body;
2026
2027 current_function_decl = DECL_CONTEXT (fndecl);
2028 set_cfun (NULL);
2029
2030 /* We cannot track the location of errors past this point. */
2031 error_gnat_node = Empty;
2032
2033 /* If we're only annotating types, don't actually compile this function. */
2034 if (type_annotate_only)
2035 return;
2036
2037 /* Perform the required pre-gimplification transformations on the tree. */
2038 gnat_genericize (fndecl);
2039
2040 /* We do different things for nested and non-nested functions.
2041 ??? This should be in cgraph. */
2042 if (!DECL_CONTEXT (fndecl))
2043 {
2044 gnat_gimplify_function (fndecl);
2045
2046 /* If this is an empty elaboration proc, just discard the node.
2047 Otherwise, compile further. */
2048 if (elab_p && empty_body_p (gimple_body (fndecl)))
2049 cgraph_remove_node (cgraph_node (fndecl));
2050 else
2051 cgraph_finalize_function (fndecl, false);
2052 }
2053 else
2054 /* Register this function with cgraph just far enough to get it
2055 added to our parent's nested function list. */
2056 (void) cgraph_node (fndecl);
2057}
2058
2059/* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2060
2061static void
2062gnat_gimplify_function (tree fndecl)
2063{
2064 struct cgraph_node *cgn;
2065
2066 dump_function (TDI_original, fndecl);
2067 gimplify_function_tree (fndecl);
2068 dump_function (TDI_generic, fndecl);
2069
2070 /* Convert all nested functions to GIMPLE now. We do things in this order
2071 so that items like VLA sizes are expanded properly in the context of the
2072 correct function. */
2073 cgn = cgraph_node (fndecl);
2074 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2075 gnat_gimplify_function (cgn->decl);
2076}
a1ab4c31
AC
2077
2078tree
2079gnat_builtin_function (tree decl)
2080{
2081 gnat_pushdecl (decl, Empty);
2082 return decl;
2083}
2084
2085/* Return an integer type with the number of bits of precision given by
2086 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2087 it is a signed type. */
2088
2089tree
2090gnat_type_for_size (unsigned precision, int unsignedp)
2091{
2092 tree t;
2093 char type_name[20];
2094
2095 if (precision <= 2 * MAX_BITS_PER_WORD
2096 && signed_and_unsigned_types[precision][unsignedp])
2097 return signed_and_unsigned_types[precision][unsignedp];
2098
2099 if (unsignedp)
2100 t = make_unsigned_type (precision);
2101 else
2102 t = make_signed_type (precision);
2103
2104 if (precision <= 2 * MAX_BITS_PER_WORD)
2105 signed_and_unsigned_types[precision][unsignedp] = t;
2106
2107 if (!TYPE_NAME (t))
2108 {
2109 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2110 TYPE_NAME (t) = get_identifier (type_name);
2111 }
2112
2113 return t;
2114}
2115
2116/* Likewise for floating-point types. */
2117
2118static tree
2119float_type_for_precision (int precision, enum machine_mode mode)
2120{
2121 tree t;
2122 char type_name[20];
2123
2124 if (float_types[(int) mode])
2125 return float_types[(int) mode];
2126
2127 float_types[(int) mode] = t = make_node (REAL_TYPE);
2128 TYPE_PRECISION (t) = precision;
2129 layout_type (t);
2130
2131 gcc_assert (TYPE_MODE (t) == mode);
2132 if (!TYPE_NAME (t))
2133 {
2134 sprintf (type_name, "FLOAT_%d", precision);
2135 TYPE_NAME (t) = get_identifier (type_name);
2136 }
2137
2138 return t;
2139}
2140
2141/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2142 an unsigned type; otherwise a signed type is returned. */
2143
2144tree
2145gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2146{
2147 if (mode == BLKmode)
2148 return NULL_TREE;
2149 else if (mode == VOIDmode)
2150 return void_type_node;
2151 else if (COMPLEX_MODE_P (mode))
2152 return NULL_TREE;
2153 else if (SCALAR_FLOAT_MODE_P (mode))
2154 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2155 else if (SCALAR_INT_MODE_P (mode))
2156 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2157 else
2158 return NULL_TREE;
2159}
2160
2161/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2162
2163tree
2164gnat_unsigned_type (tree type_node)
2165{
2166 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2167
2168 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2169 {
2170 type = copy_node (type);
2171 TREE_TYPE (type) = type_node;
2172 }
2173 else if (TREE_TYPE (type_node)
2174 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2175 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2176 {
2177 type = copy_node (type);
2178 TREE_TYPE (type) = TREE_TYPE (type_node);
2179 }
2180
2181 return type;
2182}
2183
2184/* Return the signed version of a TYPE_NODE, a scalar type. */
2185
2186tree
2187gnat_signed_type (tree type_node)
2188{
2189 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2190
2191 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2192 {
2193 type = copy_node (type);
2194 TREE_TYPE (type) = type_node;
2195 }
2196 else if (TREE_TYPE (type_node)
2197 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2198 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2199 {
2200 type = copy_node (type);
2201 TREE_TYPE (type) = TREE_TYPE (type_node);
2202 }
2203
2204 return type;
2205}
2206
2207/* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2208 transparently converted to each other. */
2209
2210int
2211gnat_types_compatible_p (tree t1, tree t2)
2212{
2213 enum tree_code code;
2214
2215 /* This is the default criterion. */
2216 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2217 return 1;
2218
2219 /* We only check structural equivalence here. */
2220 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2221 return 0;
2222
2223 /* Array types are also compatible if they are constrained and have
2224 the same component type and the same domain. */
2225 if (code == ARRAY_TYPE
2226 && TREE_TYPE (t1) == TREE_TYPE (t2)
0adef32b
JJ
2227 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2228 || (TYPE_DOMAIN (t1)
2229 && TYPE_DOMAIN (t2)
2230 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2231 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2232 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2233 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
a1ab4c31
AC
2234 return 1;
2235
2236 /* Padding record types are also compatible if they pad the same
2237 type and have the same constant size. */
2238 if (code == RECORD_TYPE
2239 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2240 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2241 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2242 return 1;
2243
2244 return 0;
2245}
2246\f
2247/* EXP is an expression for the size of an object. If this size contains
2248 discriminant references, replace them with the maximum (if MAX_P) or
2249 minimum (if !MAX_P) possible value of the discriminant. */
2250
2251tree
2252max_size (tree exp, bool max_p)
2253{
2254 enum tree_code code = TREE_CODE (exp);
2255 tree type = TREE_TYPE (exp);
2256
2257 switch (TREE_CODE_CLASS (code))
2258 {
2259 case tcc_declaration:
2260 case tcc_constant:
2261 return exp;
2262
2263 case tcc_vl_exp:
2264 if (code == CALL_EXPR)
2265 {
2266 tree *argarray;
2267 int i, n = call_expr_nargs (exp);
2268 gcc_assert (n > 0);
2269
2270 argarray = (tree *) alloca (n * sizeof (tree));
2271 for (i = 0; i < n; i++)
2272 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2273 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2274 }
2275 break;
2276
2277 case tcc_reference:
2278 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2279 modify. Otherwise, we treat it like a variable. */
2280 if (!CONTAINS_PLACEHOLDER_P (exp))
2281 return exp;
2282
2283 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2284 return
2285 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2286
2287 case tcc_comparison:
2288 return max_p ? size_one_node : size_zero_node;
2289
2290 case tcc_unary:
2291 case tcc_binary:
2292 case tcc_expression:
2293 switch (TREE_CODE_LENGTH (code))
2294 {
2295 case 1:
2296 if (code == NON_LVALUE_EXPR)
2297 return max_size (TREE_OPERAND (exp, 0), max_p);
2298 else
2299 return
2300 fold_build1 (code, type,
2301 max_size (TREE_OPERAND (exp, 0),
2302 code == NEGATE_EXPR ? !max_p : max_p));
2303
2304 case 2:
2305 if (code == COMPOUND_EXPR)
2306 return max_size (TREE_OPERAND (exp, 1), max_p);
2307
2308 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2309 may provide a tighter bound on max_size. */
2310 if (code == MINUS_EXPR
2311 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2312 {
2313 tree lhs = fold_build2 (MINUS_EXPR, type,
2314 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2315 TREE_OPERAND (exp, 1));
2316 tree rhs = fold_build2 (MINUS_EXPR, type,
2317 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2318 TREE_OPERAND (exp, 1));
2319 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2320 max_size (lhs, max_p),
2321 max_size (rhs, max_p));
2322 }
2323
2324 {
2325 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2326 tree rhs = max_size (TREE_OPERAND (exp, 1),
2327 code == MINUS_EXPR ? !max_p : max_p);
2328
2329 /* Special-case wanting the maximum value of a MIN_EXPR.
2330 In that case, if one side overflows, return the other.
2331 sizetype is signed, but we know sizes are non-negative.
2332 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2333 overflowing or the maximum possible value and the RHS
2334 a variable. */
2335 if (max_p
2336 && code == MIN_EXPR
2337 && TREE_CODE (rhs) == INTEGER_CST
2338 && TREE_OVERFLOW (rhs))
2339 return lhs;
2340 else if (max_p
2341 && code == MIN_EXPR
2342 && TREE_CODE (lhs) == INTEGER_CST
2343 && TREE_OVERFLOW (lhs))
2344 return rhs;
2345 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2346 && ((TREE_CODE (lhs) == INTEGER_CST
2347 && TREE_OVERFLOW (lhs))
2348 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2349 && !TREE_CONSTANT (rhs))
2350 return lhs;
2351 else
2352 return fold_build2 (code, type, lhs, rhs);
2353 }
2354
2355 case 3:
2356 if (code == SAVE_EXPR)
2357 return exp;
2358 else if (code == COND_EXPR)
2359 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2360 max_size (TREE_OPERAND (exp, 1), max_p),
2361 max_size (TREE_OPERAND (exp, 2), max_p));
2362 }
2363
2364 /* Other tree classes cannot happen. */
2365 default:
2366 break;
2367 }
2368
2369 gcc_unreachable ();
2370}
2371\f
2372/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2373 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2374 Return a constructor for the template. */
2375
2376tree
2377build_template (tree template_type, tree array_type, tree expr)
2378{
2379 tree template_elts = NULL_TREE;
2380 tree bound_list = NULL_TREE;
2381 tree field;
2382
2383 while (TREE_CODE (array_type) == RECORD_TYPE
2384 && (TYPE_IS_PADDING_P (array_type)
2385 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2386 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2387
2388 if (TREE_CODE (array_type) == ARRAY_TYPE
2389 || (TREE_CODE (array_type) == INTEGER_TYPE
2390 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2391 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2392
2393 /* First make the list for a CONSTRUCTOR for the template. Go down the
2394 field list of the template instead of the type chain because this
2395 array might be an Ada array of arrays and we can't tell where the
2396 nested arrays stop being the underlying object. */
2397
2398 for (field = TYPE_FIELDS (template_type); field;
2399 (bound_list
2400 ? (bound_list = TREE_CHAIN (bound_list))
2401 : (array_type = TREE_TYPE (array_type))),
2402 field = TREE_CHAIN (TREE_CHAIN (field)))
2403 {
2404 tree bounds, min, max;
2405
2406 /* If we have a bound list, get the bounds from there. Likewise
2407 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2408 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2409 This will give us a maximum range. */
2410 if (bound_list)
2411 bounds = TREE_VALUE (bound_list);
2412 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2413 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2414 else if (expr && TREE_CODE (expr) == PARM_DECL
2415 && DECL_BY_COMPONENT_PTR_P (expr))
2416 bounds = TREE_TYPE (field);
2417 else
2418 gcc_unreachable ();
2419
2420 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2421 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2422
2423 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2424 substitute it from OBJECT. */
2425 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2426 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2427
2428 template_elts = tree_cons (TREE_CHAIN (field), max,
2429 tree_cons (field, min, template_elts));
2430 }
2431
2432 return gnat_build_constructor (template_type, nreverse (template_elts));
2433}
2434\f
6ca2b0a0 2435/* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
a1ab4c31
AC
2436 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2437 in the type contains in its DECL_INITIAL the expression to use when
2438 a constructor is made for the type. GNAT_ENTITY is an entity used
2439 to print out an error message if the mechanism cannot be applied to
2440 an object of that type and also for the name. */
2441
2442tree
d628c015 2443build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
a1ab4c31
AC
2444{
2445 tree record_type = make_node (RECORD_TYPE);
2446 tree pointer32_type;
2447 tree field_list = 0;
2448 int class;
2449 int dtype = 0;
2450 tree inner_type;
2451 int ndim;
2452 int i;
2453 tree *idx_arr;
2454 tree tem;
2455
2456 /* If TYPE is an unconstrained array, use the underlying array type. */
2457 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2458 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2459
2460 /* If this is an array, compute the number of dimensions in the array,
2461 get the index types, and point to the inner type. */
2462 if (TREE_CODE (type) != ARRAY_TYPE)
2463 ndim = 0;
2464 else
2465 for (ndim = 1, inner_type = type;
2466 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2467 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2468 ndim++, inner_type = TREE_TYPE (inner_type))
2469 ;
2470
2471 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2472
d628c015 2473 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
a1ab4c31
AC
2474 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2475 for (i = ndim - 1, inner_type = type;
2476 i >= 0;
2477 i--, inner_type = TREE_TYPE (inner_type))
2478 idx_arr[i] = TYPE_DOMAIN (inner_type);
2479 else
2480 for (i = 0, inner_type = type;
2481 i < ndim;
2482 i++, inner_type = TREE_TYPE (inner_type))
2483 idx_arr[i] = TYPE_DOMAIN (inner_type);
2484
2485 /* Now get the DTYPE value. */
2486 switch (TREE_CODE (type))
2487 {
2488 case INTEGER_TYPE:
2489 case ENUMERAL_TYPE:
01ddebf2 2490 case BOOLEAN_TYPE:
a1ab4c31
AC
2491 if (TYPE_VAX_FLOATING_POINT_P (type))
2492 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2493 {
2494 case 6:
2495 dtype = 10;
2496 break;
2497 case 9:
2498 dtype = 11;
2499 break;
2500 case 15:
2501 dtype = 27;
2502 break;
2503 }
2504 else
2505 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2506 {
2507 case 8:
2508 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2509 break;
2510 case 16:
2511 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2512 break;
2513 case 32:
2514 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2515 break;
2516 case 64:
2517 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2518 break;
2519 case 128:
2520 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2521 break;
2522 }
2523 break;
2524
2525 case REAL_TYPE:
2526 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2527 break;
2528
2529 case COMPLEX_TYPE:
2530 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2531 && TYPE_VAX_FLOATING_POINT_P (type))
2532 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2533 {
2534 case 6:
2535 dtype = 12;
2536 break;
2537 case 9:
2538 dtype = 13;
2539 break;
2540 case 15:
2541 dtype = 29;
2542 }
2543 else
2544 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2545 break;
2546
2547 case ARRAY_TYPE:
2548 dtype = 14;
2549 break;
2550
2551 default:
2552 break;
2553 }
2554
2555 /* Get the CLASS value. */
2556 switch (mech)
2557 {
2558 case By_Descriptor_A:
d628c015 2559 case By_Short_Descriptor_A:
a1ab4c31
AC
2560 class = 4;
2561 break;
2562 case By_Descriptor_NCA:
d628c015 2563 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2564 class = 10;
2565 break;
2566 case By_Descriptor_SB:
d628c015 2567 case By_Short_Descriptor_SB:
a1ab4c31
AC
2568 class = 15;
2569 break;
2570 case By_Descriptor:
d628c015 2571 case By_Short_Descriptor:
a1ab4c31 2572 case By_Descriptor_S:
d628c015 2573 case By_Short_Descriptor_S:
a1ab4c31
AC
2574 default:
2575 class = 1;
2576 break;
2577 }
2578
2579 /* Make the type for a descriptor for VMS. The first four fields
2580 are the same for all types. */
2581
2582 field_list
2583 = chainon (field_list,
2584 make_descriptor_field
2585 ("LENGTH", gnat_type_for_size (16, 1), record_type,
d628c015
DR
2586 size_in_bytes ((mech == By_Descriptor_A ||
2587 mech == By_Short_Descriptor_A)
2588 ? inner_type : type)));
a1ab4c31
AC
2589
2590 field_list = chainon (field_list,
2591 make_descriptor_field ("DTYPE",
2592 gnat_type_for_size (8, 1),
2593 record_type, size_int (dtype)));
2594 field_list = chainon (field_list,
2595 make_descriptor_field ("CLASS",
2596 gnat_type_for_size (8, 1),
2597 record_type, size_int (class)));
2598
2599 /* Of course this will crash at run-time if the address space is not
2600 within the low 32 bits, but there is nothing else we can do. */
2601 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2602
2603 field_list
2604 = chainon (field_list,
2605 make_descriptor_field
2606 ("POINTER", pointer32_type, record_type,
2607 build_unary_op (ADDR_EXPR,
2608 pointer32_type,
2609 build0 (PLACEHOLDER_EXPR, type))));
2610
2611 switch (mech)
2612 {
2613 case By_Descriptor:
d628c015 2614 case By_Short_Descriptor:
a1ab4c31 2615 case By_Descriptor_S:
d628c015 2616 case By_Short_Descriptor_S:
a1ab4c31
AC
2617 break;
2618
2619 case By_Descriptor_SB:
d628c015 2620 case By_Short_Descriptor_SB:
a1ab4c31
AC
2621 field_list
2622 = chainon (field_list,
2623 make_descriptor_field
2624 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2625 TREE_CODE (type) == ARRAY_TYPE
2626 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2627 field_list
2628 = chainon (field_list,
2629 make_descriptor_field
2630 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2631 TREE_CODE (type) == ARRAY_TYPE
2632 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2633 break;
2634
2635 case By_Descriptor_A:
d628c015 2636 case By_Short_Descriptor_A:
a1ab4c31 2637 case By_Descriptor_NCA:
d628c015 2638 case By_Short_Descriptor_NCA:
a1ab4c31
AC
2639 field_list = chainon (field_list,
2640 make_descriptor_field ("SCALE",
2641 gnat_type_for_size (8, 1),
2642 record_type,
2643 size_zero_node));
2644
2645 field_list = chainon (field_list,
2646 make_descriptor_field ("DIGITS",
2647 gnat_type_for_size (8, 1),
2648 record_type,
2649 size_zero_node));
2650
2651 field_list
2652 = chainon (field_list,
2653 make_descriptor_field
2654 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
d628c015
DR
2655 size_int ((mech == By_Descriptor_NCA ||
2656 mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2657 ? 0
2658 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2659 : (TREE_CODE (type) == ARRAY_TYPE
2660 && TYPE_CONVENTION_FORTRAN_P (type)
2661 ? 224 : 192))));
2662
2663 field_list = chainon (field_list,
2664 make_descriptor_field ("DIMCT",
2665 gnat_type_for_size (8, 1),
2666 record_type,
2667 size_int (ndim)));
2668
2669 field_list = chainon (field_list,
2670 make_descriptor_field ("ARSIZE",
2671 gnat_type_for_size (32, 1),
2672 record_type,
2673 size_in_bytes (type)));
2674
2675 /* Now build a pointer to the 0,0,0... element. */
2676 tem = build0 (PLACEHOLDER_EXPR, type);
2677 for (i = 0, inner_type = type; i < ndim;
2678 i++, inner_type = TREE_TYPE (inner_type))
2679 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2680 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2681 NULL_TREE, NULL_TREE);
2682
2683 field_list
2684 = chainon (field_list,
2685 make_descriptor_field
2686 ("A0",
2687 build_pointer_type_for_mode (inner_type, SImode, false),
2688 record_type,
2689 build1 (ADDR_EXPR,
2690 build_pointer_type_for_mode (inner_type, SImode,
2691 false),
2692 tem)));
2693
2694 /* Next come the addressing coefficients. */
2695 tem = size_one_node;
2696 for (i = 0; i < ndim; i++)
2697 {
2698 char fname[3];
2699 tree idx_length
2700 = size_binop (MULT_EXPR, tem,
2701 size_binop (PLUS_EXPR,
2702 size_binop (MINUS_EXPR,
2703 TYPE_MAX_VALUE (idx_arr[i]),
2704 TYPE_MIN_VALUE (idx_arr[i])),
2705 size_int (1)));
2706
d628c015
DR
2707 fname[0] = ((mech == By_Descriptor_NCA ||
2708 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
a1ab4c31
AC
2709 fname[1] = '0' + i, fname[2] = 0;
2710 field_list
2711 = chainon (field_list,
2712 make_descriptor_field (fname,
2713 gnat_type_for_size (32, 1),
2714 record_type, idx_length));
2715
d628c015 2716 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
a1ab4c31
AC
2717 tem = idx_length;
2718 }
2719
2720 /* Finally here are the bounds. */
2721 for (i = 0; i < ndim; i++)
2722 {
2723 char fname[3];
2724
2725 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2726 field_list
2727 = chainon (field_list,
2728 make_descriptor_field
2729 (fname, gnat_type_for_size (32, 1), record_type,
2730 TYPE_MIN_VALUE (idx_arr[i])));
2731
2732 fname[0] = 'U';
2733 field_list
2734 = chainon (field_list,
2735 make_descriptor_field
2736 (fname, gnat_type_for_size (32, 1), record_type,
2737 TYPE_MAX_VALUE (idx_arr[i])));
2738 }
2739 break;
2740
2741 default:
2742 post_error ("unsupported descriptor type for &", gnat_entity);
2743 }
2744
10069d53 2745 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
a1ab4c31 2746 finish_record_type (record_type, field_list, 0, true);
a1ab4c31
AC
2747 return record_type;
2748}
2749
6ca2b0a0
DR
2750/* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2751 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2752 in the type contains in its DECL_INITIAL the expression to use when
2753 a constructor is made for the type. GNAT_ENTITY is an entity used
2754 to print out an error message if the mechanism cannot be applied to
2755 an object of that type and also for the name. */
2756
2757tree
d628c015 2758build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
6ca2b0a0
DR
2759{
2760 tree record64_type = make_node (RECORD_TYPE);
2761 tree pointer64_type;
2762 tree field_list64 = 0;
2763 int class;
2764 int dtype = 0;
2765 tree inner_type;
2766 int ndim;
2767 int i;
2768 tree *idx_arr;
2769 tree tem;
2770
2771 /* If TYPE is an unconstrained array, use the underlying array type. */
2772 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2773 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2774
2775 /* If this is an array, compute the number of dimensions in the array,
2776 get the index types, and point to the inner type. */
2777 if (TREE_CODE (type) != ARRAY_TYPE)
2778 ndim = 0;
2779 else
2780 for (ndim = 1, inner_type = type;
2781 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2782 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2783 ndim++, inner_type = TREE_TYPE (inner_type))
2784 ;
2785
2786 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2787
2788 if (mech != By_Descriptor_NCA
2789 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2790 for (i = ndim - 1, inner_type = type;
2791 i >= 0;
2792 i--, inner_type = TREE_TYPE (inner_type))
2793 idx_arr[i] = TYPE_DOMAIN (inner_type);
2794 else
2795 for (i = 0, inner_type = type;
2796 i < ndim;
2797 i++, inner_type = TREE_TYPE (inner_type))
2798 idx_arr[i] = TYPE_DOMAIN (inner_type);
2799
2800 /* Now get the DTYPE value. */
2801 switch (TREE_CODE (type))
2802 {
2803 case INTEGER_TYPE:
2804 case ENUMERAL_TYPE:
01ddebf2 2805 case BOOLEAN_TYPE:
6ca2b0a0
DR
2806 if (TYPE_VAX_FLOATING_POINT_P (type))
2807 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2808 {
2809 case 6:
2810 dtype = 10;
2811 break;
2812 case 9:
2813 dtype = 11;
2814 break;
2815 case 15:
2816 dtype = 27;
2817 break;
2818 }
2819 else
2820 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2821 {
2822 case 8:
2823 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2824 break;
2825 case 16:
2826 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2827 break;
2828 case 32:
2829 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2830 break;
2831 case 64:
2832 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2833 break;
2834 case 128:
2835 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2836 break;
2837 }
2838 break;
2839
2840 case REAL_TYPE:
2841 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2842 break;
2843
2844 case COMPLEX_TYPE:
2845 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2846 && TYPE_VAX_FLOATING_POINT_P (type))
2847 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2848 {
2849 case 6:
2850 dtype = 12;
2851 break;
2852 case 9:
2853 dtype = 13;
2854 break;
2855 case 15:
2856 dtype = 29;
2857 }
2858 else
2859 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2860 break;
2861
2862 case ARRAY_TYPE:
2863 dtype = 14;
2864 break;
2865
2866 default:
2867 break;
2868 }
2869
2870 /* Get the CLASS value. */
2871 switch (mech)
2872 {
2873 case By_Descriptor_A:
2874 class = 4;
2875 break;
2876 case By_Descriptor_NCA:
2877 class = 10;
2878 break;
2879 case By_Descriptor_SB:
2880 class = 15;
2881 break;
2882 case By_Descriptor:
2883 case By_Descriptor_S:
2884 default:
2885 class = 1;
2886 break;
2887 }
2888
2889 /* Make the type for a 64bit descriptor for VMS. The first six fields
2890 are the same for all types. */
2891
2892 field_list64 = chainon (field_list64,
2893 make_descriptor_field ("MBO",
2894 gnat_type_for_size (16, 1),
2895 record64_type, size_int (1)));
2896
2897 field_list64 = chainon (field_list64,
2898 make_descriptor_field ("DTYPE",
2899 gnat_type_for_size (8, 1),
2900 record64_type, size_int (dtype)));
2901 field_list64 = chainon (field_list64,
2902 make_descriptor_field ("CLASS",
2903 gnat_type_for_size (8, 1),
2904 record64_type, size_int (class)));
2905
2906 field_list64 = chainon (field_list64,
2907 make_descriptor_field ("MBMO",
2908 gnat_type_for_size (32, 1),
2909 record64_type, ssize_int (-1)));
2910
2911 field_list64
2912 = chainon (field_list64,
2913 make_descriptor_field
2914 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2915 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2916
2917 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2918
2919 field_list64
2920 = chainon (field_list64,
2921 make_descriptor_field
2922 ("POINTER", pointer64_type, record64_type,
2923 build_unary_op (ADDR_EXPR,
2924 pointer64_type,
2925 build0 (PLACEHOLDER_EXPR, type))));
2926
2927 switch (mech)
2928 {
2929 case By_Descriptor:
2930 case By_Descriptor_S:
2931 break;
2932
2933 case By_Descriptor_SB:
2934 field_list64
2935 = chainon (field_list64,
2936 make_descriptor_field
2937 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2938 TREE_CODE (type) == ARRAY_TYPE
2939 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2940 field_list64
2941 = chainon (field_list64,
2942 make_descriptor_field
2943 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2944 TREE_CODE (type) == ARRAY_TYPE
2945 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2946 break;
2947
2948 case By_Descriptor_A:
2949 case By_Descriptor_NCA:
2950 field_list64 = chainon (field_list64,
2951 make_descriptor_field ("SCALE",
2952 gnat_type_for_size (8, 1),
2953 record64_type,
2954 size_zero_node));
2955
2956 field_list64 = chainon (field_list64,
2957 make_descriptor_field ("DIGITS",
2958 gnat_type_for_size (8, 1),
2959 record64_type,
2960 size_zero_node));
2961
2962 field_list64
2963 = chainon (field_list64,
2964 make_descriptor_field
2965 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2966 size_int (mech == By_Descriptor_NCA
2967 ? 0
2968 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2969 : (TREE_CODE (type) == ARRAY_TYPE
2970 && TYPE_CONVENTION_FORTRAN_P (type)
2971 ? 224 : 192))));
2972
2973 field_list64 = chainon (field_list64,
2974 make_descriptor_field ("DIMCT",
2975 gnat_type_for_size (8, 1),
2976 record64_type,
2977 size_int (ndim)));
2978
2979 field_list64 = chainon (field_list64,
2980 make_descriptor_field ("MBZ",
2981 gnat_type_for_size (32, 1),
2982 record64_type,
2983 size_int (0)));
2984 field_list64 = chainon (field_list64,
2985 make_descriptor_field ("ARSIZE",
2986 gnat_type_for_size (64, 1),
2987 record64_type,
2988 size_in_bytes (type)));
2989
2990 /* Now build a pointer to the 0,0,0... element. */
2991 tem = build0 (PLACEHOLDER_EXPR, type);
2992 for (i = 0, inner_type = type; i < ndim;
2993 i++, inner_type = TREE_TYPE (inner_type))
2994 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2995 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2996 NULL_TREE, NULL_TREE);
2997
2998 field_list64
2999 = chainon (field_list64,
3000 make_descriptor_field
3001 ("A0",
3002 build_pointer_type_for_mode (inner_type, DImode, false),
3003 record64_type,
3004 build1 (ADDR_EXPR,
3005 build_pointer_type_for_mode (inner_type, DImode,
3006 false),
3007 tem)));
3008
3009 /* Next come the addressing coefficients. */
3010 tem = size_one_node;
3011 for (i = 0; i < ndim; i++)
3012 {
3013 char fname[3];
3014 tree idx_length
3015 = size_binop (MULT_EXPR, tem,
3016 size_binop (PLUS_EXPR,
3017 size_binop (MINUS_EXPR,
3018 TYPE_MAX_VALUE (idx_arr[i]),
3019 TYPE_MIN_VALUE (idx_arr[i])),
3020 size_int (1)));
3021
3022 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3023 fname[1] = '0' + i, fname[2] = 0;
3024 field_list64
3025 = chainon (field_list64,
3026 make_descriptor_field (fname,
3027 gnat_type_for_size (64, 1),
3028 record64_type, idx_length));
3029
3030 if (mech == By_Descriptor_NCA)
3031 tem = idx_length;
3032 }
3033
3034 /* Finally here are the bounds. */
3035 for (i = 0; i < ndim; i++)
3036 {
3037 char fname[3];
3038
3039 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3040 field_list64
3041 = chainon (field_list64,
3042 make_descriptor_field
3043 (fname, gnat_type_for_size (64, 1), record64_type,
3044 TYPE_MIN_VALUE (idx_arr[i])));
3045
3046 fname[0] = 'U';
3047 field_list64
3048 = chainon (field_list64,
3049 make_descriptor_field
3050 (fname, gnat_type_for_size (64, 1), record64_type,
3051 TYPE_MAX_VALUE (idx_arr[i])));
3052 }
3053 break;
3054
3055 default:
3056 post_error ("unsupported descriptor type for &", gnat_entity);
3057 }
3058
10069d53 3059 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
6ca2b0a0 3060 finish_record_type (record64_type, field_list64, 0, true);
6ca2b0a0
DR
3061 return record64_type;
3062}
3063
a1ab4c31
AC
3064/* Utility routine for above code to make a field. */
3065
3066static tree
3067make_descriptor_field (const char *name, tree type,
3068 tree rec_type, tree initial)
3069{
3070 tree field
3071 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3072
3073 DECL_INITIAL (field) = initial;
3074 return field;
3075}
3076
d628c015
DR
3077/* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3078 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3079 which the VMS descriptor is passed. */
a1ab4c31
AC
3080
3081static tree
d628c015
DR
3082convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3083{
3084 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3085 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3086 /* The CLASS field is the 3rd field in the descriptor. */
3087 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3088 /* The POINTER field is the 6th field in the descriptor. */
3089 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3090
3091 /* Retrieve the value of the POINTER field. */
3092 tree gnu_expr64
3093 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3094
3095 if (POINTER_TYPE_P (gnu_type))
3096 return convert (gnu_type, gnu_expr64);
3097
3098 else if (TYPE_FAT_POINTER_P (gnu_type))
3099 {
3100 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3101 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3102 tree template_type = TREE_TYPE (p_bounds_type);
3103 tree min_field = TYPE_FIELDS (template_type);
3104 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3105 tree template, template_addr, aflags, dimct, t, u;
3106 /* See the head comment of build_vms_descriptor. */
3107 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3108 tree lfield, ufield;
3109
3110 /* Convert POINTER to the type of the P_ARRAY field. */
3111 gnu_expr64 = convert (p_array_type, gnu_expr64);
3112
3113 switch (iclass)
3114 {
3115 case 1: /* Class S */
3116 case 15: /* Class SB */
3117 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3118 t = TREE_CHAIN (TREE_CHAIN (class));
3119 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3120 t = tree_cons (min_field,
3121 convert (TREE_TYPE (min_field), integer_one_node),
3122 tree_cons (max_field,
3123 convert (TREE_TYPE (max_field), t),
3124 NULL_TREE));
3125 template = gnat_build_constructor (template_type, t);
3126 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3127
3128 /* For class S, we are done. */
3129 if (iclass == 1)
3130 break;
3131
3132 /* Test that we really have a SB descriptor, like DEC Ada. */
3133 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3134 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3135 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3136 /* If so, there is already a template in the descriptor and
3137 it is located right after the POINTER field. The fields are
3138 64bits so they must be repacked. */
3139 t = TREE_CHAIN (pointer64);
3140 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3141 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3142
3143 t = TREE_CHAIN (t);
3144 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3145 ufield = convert
3146 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3147
3148 /* Build the template in the form of a constructor. */
3149 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3150 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3151 ufield, NULL_TREE));
3152 template = gnat_build_constructor (template_type, t);
3153
3154 /* Otherwise use the {1, LENGTH} template we build above. */
3155 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3156 build_unary_op (ADDR_EXPR, p_bounds_type,
3157 template),
3158 template_addr);
3159 break;
3160
3161 case 4: /* Class A */
3162 /* The AFLAGS field is the 3rd field after the pointer in the
3163 descriptor. */
3164 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3165 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3166 /* The DIMCT field is the next field in the descriptor after
3167 aflags. */
3168 t = TREE_CHAIN (t);
3169 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3170 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3171 or FL_COEFF or FL_BOUNDS not set. */
3172 u = build_int_cst (TREE_TYPE (aflags), 192);
3173 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3174 build_binary_op (NE_EXPR, integer_type_node,
3175 dimct,
3176 convert (TREE_TYPE (dimct),
3177 size_one_node)),
3178 build_binary_op (NE_EXPR, integer_type_node,
3179 build2 (BIT_AND_EXPR,
3180 TREE_TYPE (aflags),
3181 aflags, u),
3182 u));
3183 /* There is already a template in the descriptor and it is located
3184 in block 3. The fields are 64bits so they must be repacked. */
3185 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3186 (t)))));
3187 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3188 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3189
3190 t = TREE_CHAIN (t);
3191 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3192 ufield = convert
3193 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3194
3195 /* Build the template in the form of a constructor. */
3196 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3197 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3198 ufield, NULL_TREE));
3199 template = gnat_build_constructor (template_type, t);
3200 template = build3 (COND_EXPR, p_bounds_type, u,
3201 build_call_raise (CE_Length_Check_Failed, Empty,
3202 N_Raise_Constraint_Error),
3203 template);
3204 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3205 break;
3206
3207 case 10: /* Class NCA */
3208 default:
3209 post_error ("unsupported descriptor type for &", gnat_subprog);
3210 template_addr = integer_zero_node;
3211 break;
3212 }
3213
3214 /* Build the fat pointer in the form of a constructor. */
3215 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3216 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3217 template_addr, NULL_TREE));
3218 return gnat_build_constructor (gnu_type, t);
3219 }
3220
3221 else
3222 gcc_unreachable ();
3223}
3224
3225/* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3226 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3227 which the VMS descriptor is passed. */
3228
3229static tree
3230convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
a1ab4c31
AC
3231{
3232 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3233 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3234 /* The CLASS field is the 3rd field in the descriptor. */
3235 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3236 /* The POINTER field is the 4th field in the descriptor. */
3237 tree pointer = TREE_CHAIN (class);
3238
3239 /* Retrieve the value of the POINTER field. */
d628c015 3240 tree gnu_expr32
a1ab4c31
AC
3241 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3242
3243 if (POINTER_TYPE_P (gnu_type))
d628c015 3244 return convert (gnu_type, gnu_expr32);
a1ab4c31
AC
3245
3246 else if (TYPE_FAT_POINTER_P (gnu_type))
3247 {
3248 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3249 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3250 tree template_type = TREE_TYPE (p_bounds_type);
3251 tree min_field = TYPE_FIELDS (template_type);
3252 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3253 tree template, template_addr, aflags, dimct, t, u;
3254 /* See the head comment of build_vms_descriptor. */
3255 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3256
3257 /* Convert POINTER to the type of the P_ARRAY field. */
d628c015 3258 gnu_expr32 = convert (p_array_type, gnu_expr32);
a1ab4c31
AC
3259
3260 switch (iclass)
3261 {
3262 case 1: /* Class S */
3263 case 15: /* Class SB */
3264 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3265 t = TYPE_FIELDS (desc_type);
3266 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3267 t = tree_cons (min_field,
3268 convert (TREE_TYPE (min_field), integer_one_node),
3269 tree_cons (max_field,
3270 convert (TREE_TYPE (max_field), t),
3271 NULL_TREE));
3272 template = gnat_build_constructor (template_type, t);
3273 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3274
3275 /* For class S, we are done. */
3276 if (iclass == 1)
3277 break;
3278
3279 /* Test that we really have a SB descriptor, like DEC Ada. */
3280 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3281 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3282 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3283 /* If so, there is already a template in the descriptor and
3284 it is located right after the POINTER field. */
3285 t = TREE_CHAIN (pointer);
3286 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3287 /* Otherwise use the {1, LENGTH} template we build above. */
3288 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3289 build_unary_op (ADDR_EXPR, p_bounds_type,
3290 template),
3291 template_addr);
3292 break;
3293
3294 case 4: /* Class A */
3295 /* The AFLAGS field is the 7th field in the descriptor. */
3296 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3297 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3298 /* The DIMCT field is the 8th field in the descriptor. */
3299 t = TREE_CHAIN (t);
3300 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3301 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3302 or FL_COEFF or FL_BOUNDS not set. */
3303 u = build_int_cst (TREE_TYPE (aflags), 192);
3304 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3305 build_binary_op (NE_EXPR, integer_type_node,
3306 dimct,
3307 convert (TREE_TYPE (dimct),
3308 size_one_node)),
3309 build_binary_op (NE_EXPR, integer_type_node,
3310 build2 (BIT_AND_EXPR,
3311 TREE_TYPE (aflags),
3312 aflags, u),
3313 u));
a1ab4c31
AC
3314 /* There is already a template in the descriptor and it is
3315 located at the start of block 3 (12th field). */
3316 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3317 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
d628c015
DR
3318 template = build3 (COND_EXPR, p_bounds_type, u,
3319 build_call_raise (CE_Length_Check_Failed, Empty,
3320 N_Raise_Constraint_Error),
3321 template);
a1ab4c31
AC
3322 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3323 break;
3324
3325 case 10: /* Class NCA */
3326 default:
3327 post_error ("unsupported descriptor type for &", gnat_subprog);
3328 template_addr = integer_zero_node;
3329 break;
3330 }
3331
3332 /* Build the fat pointer in the form of a constructor. */
d628c015 3333 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
a1ab4c31
AC
3334 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3335 template_addr, NULL_TREE));
d628c015 3336
a1ab4c31
AC
3337 return gnat_build_constructor (gnu_type, t);
3338 }
3339
3340 else
3341 gcc_unreachable ();
3342}
3343
a981c964
EB
3344/* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3345 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3346 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3347 VMS descriptor is passed. */
d628c015
DR
3348
3349static tree
a981c964
EB
3350convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3351 Entity_Id gnat_subprog)
d628c015
DR
3352{
3353 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3354 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3355 tree mbo = TYPE_FIELDS (desc_type);
3356 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3357 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
a981c964 3358 tree is64bit, gnu_expr32, gnu_expr64;
d628c015 3359
a981c964
EB
3360 /* If the field name is not MBO, it must be 32-bit and no alternate.
3361 Otherwise primary must be 64-bit and alternate 32-bit. */
d628c015 3362 if (strcmp (mbostr, "MBO") != 0)
d628c015
DR
3363 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3364
a981c964 3365 /* Build the test for 64-bit descriptor. */
d628c015
DR
3366 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3367 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
a981c964
EB
3368 is64bit
3369 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3370 build_binary_op (EQ_EXPR, integer_type_node,
3371 convert (integer_type_node, mbo),
3372 integer_one_node),
3373 build_binary_op (EQ_EXPR, integer_type_node,
3374 convert (integer_type_node, mbmo),
3375 integer_minus_one_node));
3376
3377 /* Build the 2 possible end results. */
3378 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3379 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3380 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3381
3382 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
d628c015
DR
3383}
3384
a1ab4c31
AC
3385/* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3386 and the GNAT node GNAT_SUBPROG. */
3387
3388void
3389build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3390{
3391 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3392 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3393 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3394 tree gnu_body;
3395
3396 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3397 gnu_param_list = NULL_TREE;
3398
3399 begin_subprog_body (gnu_stub_decl);
3400 gnat_pushlevel ();
3401
3402 start_stmt_group ();
3403
3404 /* Loop over the parameters of the stub and translate any of them
3405 passed by descriptor into a by reference one. */
3406 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3407 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3408 gnu_stub_param;
3409 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3410 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3411 {
3412 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
a981c964
EB
3413 gnu_param
3414 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3415 gnu_stub_param,
3416 DECL_PARM_ALT_TYPE (gnu_stub_param),
3417 gnat_subprog);
a1ab4c31
AC
3418 else
3419 gnu_param = gnu_stub_param;
3420
3421 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3422 }
3423
3424 gnu_body = end_stmt_group ();
3425
3426 /* Invoke the internal subprogram. */
3427 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3428 gnu_subprog);
3429 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3430 gnu_subprog_addr,
3431 nreverse (gnu_param_list));
3432
3433 /* Propagate the return value, if any. */
3434 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3435 append_to_statement_list (gnu_subprog_call, &gnu_body);
3436 else
3437 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3438 gnu_subprog_call),
3439 &gnu_body);
3440
3441 gnat_poplevel ();
3442
3443 allocate_struct_function (gnu_stub_decl, false);
3444 end_subprog_body (gnu_body, false);
3445}
3446\f
3447/* Build a type to be used to represent an aliased object whose nominal
3448 type is an unconstrained array. This consists of a RECORD_TYPE containing
3449 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3450 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3451 is used to represent an arbitrary unconstrained object. Use NAME
3452 as the name of the record. */
3453
3454tree
3455build_unc_object_type (tree template_type, tree object_type, tree name)
3456{
3457 tree type = make_node (RECORD_TYPE);
3458 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3459 template_type, type, 0, 0, 0, 1);
3460 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3461 type, 0, 0, 0, 1);
3462
3463 TYPE_NAME (type) = name;
3464 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3465 finish_record_type (type,
3466 chainon (chainon (NULL_TREE, template_field),
3467 array_field),
3468 0, false);
3469
3470 return type;
3471}
3472
3473/* Same, taking a thin or fat pointer type instead of a template type. */
3474
3475tree
3476build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3477 tree name)
3478{
3479 tree template_type;
3480
3481 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3482
3483 template_type
3484 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3485 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3486 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3487 return build_unc_object_type (template_type, object_type, name);
3488}
3489
3490/* Shift the component offsets within an unconstrained object TYPE to make it
3491 suitable for use as a designated type for thin pointers. */
3492
3493void
3494shift_unc_components_for_thin_pointers (tree type)
3495{
3496 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3497 allocated past the BOUNDS template. The designated type is adjusted to
3498 have ARRAY at position zero and the template at a negative offset, so
3499 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3500
3501 tree bounds_field = TYPE_FIELDS (type);
3502 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3503
3504 DECL_FIELD_OFFSET (bounds_field)
3505 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3506
3507 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3508 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3509}
3510\f
229077b0
EB
3511/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3512 In the normal case this is just two adjustments, but we have more to
3513 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
a1ab4c31
AC
3514
3515void
3516update_pointer_to (tree old_type, tree new_type)
3517{
3518 tree ptr = TYPE_POINTER_TO (old_type);
3519 tree ref = TYPE_REFERENCE_TO (old_type);
3520 tree ptr1, ref1;
3521 tree type;
3522
3523 /* If this is the main variant, process all the other variants first. */
3524 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3525 for (type = TYPE_NEXT_VARIANT (old_type); type;
3526 type = TYPE_NEXT_VARIANT (type))
3527 update_pointer_to (type, new_type);
3528
229077b0 3529 /* If no pointers and no references, we are done. */
a1ab4c31
AC
3530 if (!ptr && !ref)
3531 return;
3532
3533 /* Merge the old type qualifiers in the new type.
3534
3535 Each old variant has qualifiers for specific reasons, and the new
229077b0 3536 designated type as well. Each set of qualifiers represents useful
a1ab4c31
AC
3537 information grabbed at some point, and merging the two simply unifies
3538 these inputs into the final type description.
3539
3540 Consider for instance a volatile type frozen after an access to constant
229077b0
EB
3541 type designating it; after the designated type's freeze, we get here with
3542 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3543 when the access type was processed. We will make a volatile and readonly
a1ab4c31
AC
3544 designated type, because that's what it really is.
3545
229077b0
EB
3546 We might also get here for a non-dummy OLD_TYPE variant with different
3547 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
a1ab4c31 3548 to private record type elaboration (see the comments around the call to
229077b0
EB
3549 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3550 the qualifiers in those cases too, to avoid accidentally discarding the
3551 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3552 new_type
3553 = build_qualified_type (new_type,
3554 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3555
3556 /* If old type and new type are identical, there is nothing to do. */
a1ab4c31
AC
3557 if (old_type == new_type)
3558 return;
3559
3560 /* Otherwise, first handle the simple case. */
3561 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3562 {
3563 TYPE_POINTER_TO (new_type) = ptr;
3564 TYPE_REFERENCE_TO (new_type) = ref;
3565
3566 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3567 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3568 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3569 TREE_TYPE (ptr1) = new_type;
3570
3571 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3572 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3573 ref1 = TYPE_NEXT_VARIANT (ref1))
3574 TREE_TYPE (ref1) = new_type;
3575 }
3576
229077b0 3577 /* Now deal with the unconstrained array case. In this case the "pointer"
a1ab4c31
AC
3578 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3579 Turn them into pointers to the correct types using update_pointer_to. */
229077b0 3580 else if (!TYPE_FAT_POINTER_P (ptr))
a1ab4c31
AC
3581 gcc_unreachable ();
3582
3583 else
3584 {
3585 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3586 tree array_field = TYPE_FIELDS (ptr);
3587 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3588 tree new_ptr = TYPE_POINTER_TO (new_type);
3589 tree new_ref;
3590 tree var;
3591
3592 /* Make pointers to the dummy template point to the real template. */
3593 update_pointer_to
3594 (TREE_TYPE (TREE_TYPE (bounds_field)),
3595 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3596
3597 /* The references to the template bounds present in the array type
229077b0
EB
3598 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3599 are updating PTR to make it a full replacement for NEW_PTR as
3600 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3601 to make it of type PTR. */
a1ab4c31
AC
3602 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3603 build0 (PLACEHOLDER_EXPR, ptr),
3604 bounds_field, NULL_TREE);
3605
229077b0 3606 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
77022fa8 3607 to the dummy array point to it. */
a1ab4c31
AC
3608 update_pointer_to
3609 (TREE_TYPE (TREE_TYPE (array_field)),
3610 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3611 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3612
229077b0 3613 /* Make PTR the pointer to NEW_TYPE. */
a1ab4c31
AC
3614 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3615 = TREE_TYPE (new_type) = ptr;
3616
3617 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3618 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3619
3620 /* Now handle updating the allocation record, what the thin pointer
3621 points to. Update all pointers from the old record into the new
3622 one, update the type of the array field, and recompute the size. */
3623 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3624
3625 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3626 = TREE_TYPE (TREE_TYPE (array_field));
3627
3628 /* The size recomputation needs to account for alignment constraints, so
3629 we let layout_type work it out. This will reset the field offsets to
3630 what they would be in a regular record, so we shift them back to what
3631 we want them to be for a thin pointer designated type afterwards. */
3632 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3633 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3634 TYPE_SIZE (new_obj_rec) = 0;
3635 layout_type (new_obj_rec);
3636
3637 shift_unc_components_for_thin_pointers (new_obj_rec);
3638
3639 /* We are done, at last. */
3640 rest_of_record_type_compilation (ptr);
3641 }
3642}
3643\f
8df2e902
EB
3644/* Convert EXPR, a pointer to a constrained array, into a pointer to an
3645 unconstrained one. This involves making or finding a template. */
a1ab4c31
AC
3646
3647static tree
3648convert_to_fat_pointer (tree type, tree expr)
3649{
3650 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
8df2e902 3651 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31 3652 tree etype = TREE_TYPE (expr);
8df2e902 3653 tree template;
a1ab4c31 3654
8df2e902
EB
3655 /* If EXPR is null, make a fat pointer that contains null pointers to the
3656 template and array. */
a1ab4c31
AC
3657 if (integer_zerop (expr))
3658 return
3659 gnat_build_constructor
3660 (type,
3661 tree_cons (TYPE_FIELDS (type),
8df2e902 3662 convert (p_array_type, expr),
a1ab4c31
AC
3663 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3664 convert (build_pointer_type (template_type),
3665 expr),
3666 NULL_TREE)));
3667
8df2e902 3668 /* If EXPR is a thin pointer, make template and data from the record.. */
a1ab4c31
AC
3669 else if (TYPE_THIN_POINTER_P (etype))
3670 {
3671 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3672
3673 expr = save_expr (expr);
3674 if (TREE_CODE (expr) == ADDR_EXPR)
3675 expr = TREE_OPERAND (expr, 0);
3676 else
3677 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3678
3679 template = build_component_ref (expr, NULL_TREE, fields, false);
3680 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3681 build_component_ref (expr, NULL_TREE,
3682 TREE_CHAIN (fields), false));
3683 }
8df2e902
EB
3684
3685 /* Otherwise, build the constructor for the template. */
a1ab4c31 3686 else
a1ab4c31
AC
3687 template = build_template (template_type, TREE_TYPE (etype), expr);
3688
8df2e902 3689 /* The final result is a constructor for the fat pointer.
a1ab4c31 3690
8df2e902
EB
3691 If EXPR is an argument of a foreign convention subprogram, the type it
3692 points to is directly the component type. In this case, the expression
a1ab4c31 3693 type may not match the corresponding FIELD_DECL type at this point, so we
8df2e902 3694 call "convert" here to fix that up if necessary. This type consistency is
a1ab4c31 3695 required, for instance because it ensures that possible later folding of
8df2e902 3696 COMPONENT_REFs against this constructor always yields something of the
a1ab4c31
AC
3697 same type as the initial reference.
3698
8df2e902
EB
3699 Note that the call to "build_template" above is still fine because it
3700 will only refer to the provided TEMPLATE_TYPE in this case. */
3701 return
3702 gnat_build_constructor
3703 (type,
3704 tree_cons (TYPE_FIELDS (type),
3705 convert (p_array_type, expr),
3706 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3707 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3708 NULL_TREE)));
a1ab4c31
AC
3709}
3710\f
3711/* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3712 is something that is a fat pointer, so convert to it first if it EXPR
3713 is not already a fat pointer. */
3714
3715static tree
3716convert_to_thin_pointer (tree type, tree expr)
3717{
3718 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3719 expr
3720 = convert_to_fat_pointer
3721 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3722
3723 /* We get the pointer to the data and use a NOP_EXPR to make it the
3724 proper GCC type. */
3725 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3726 false);
3727 expr = build1 (NOP_EXPR, type, expr);
3728
3729 return expr;
3730}
3731\f
3732/* Create an expression whose value is that of EXPR,
3733 converted to type TYPE. The TREE_TYPE of the value
3734 is always TYPE. This function implements all reasonable
3735 conversions; callers should filter out those that are
3736 not permitted by the language being compiled. */
3737
3738tree
3739convert (tree type, tree expr)
3740{
3741 enum tree_code code = TREE_CODE (type);
3742 tree etype = TREE_TYPE (expr);
3743 enum tree_code ecode = TREE_CODE (etype);
3744
3745 /* If EXPR is already the right type, we are done. */
3746 if (type == etype)
3747 return expr;
3748
3749 /* If both input and output have padding and are of variable size, do this
3750 as an unchecked conversion. Likewise if one is a mere variant of the
3751 other, so we avoid a pointless unpad/repad sequence. */
3752 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3753 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3754 && (!TREE_CONSTANT (TYPE_SIZE (type))
3755 || !TREE_CONSTANT (TYPE_SIZE (etype))
3756 || gnat_types_compatible_p (type, etype)
3757 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3758 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3759 ;
3760
3761 /* If the output type has padding, convert to the inner type and
3762 make a constructor to build the record. */
3763 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3764 {
3765 /* If we previously converted from another type and our type is
3766 of variable size, remove the conversion to avoid the need for
3767 variable-size temporaries. Likewise for a conversion between
3768 original and packable version. */
3769 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3770 && (!TREE_CONSTANT (TYPE_SIZE (type))
3771 || (ecode == RECORD_TYPE
3772 && TYPE_NAME (etype)
3773 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3774 expr = TREE_OPERAND (expr, 0);
3775
3776 /* If we are just removing the padding from expr, convert the original
3777 object if we have variable size in order to avoid the need for some
3778 variable-size temporaries. Likewise if the padding is a mere variant
3779 of the other, so we avoid a pointless unpad/repad sequence. */
3780 if (TREE_CODE (expr) == COMPONENT_REF
3781 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3782 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3783 && (!TREE_CONSTANT (TYPE_SIZE (type))
3784 || gnat_types_compatible_p (type,
3785 TREE_TYPE (TREE_OPERAND (expr, 0)))
3786 || (ecode == RECORD_TYPE
3787 && TYPE_NAME (etype)
3788 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3789 return convert (type, TREE_OPERAND (expr, 0));
3790
3791 /* If the result type is a padded type with a self-referentially-sized
3792 field and the expression type is a record, do this as an
3793 unchecked conversion. */
3794 else if (TREE_CODE (etype) == RECORD_TYPE
3795 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3796 return unchecked_convert (type, expr, false);
3797
3798 else
3799 return
3800 gnat_build_constructor (type,
3801 tree_cons (TYPE_FIELDS (type),
3802 convert (TREE_TYPE
3803 (TYPE_FIELDS (type)),
3804 expr),
3805 NULL_TREE));
3806 }
3807
3808 /* If the input type has padding, remove it and convert to the output type.
3809 The conditions ordering is arranged to ensure that the output type is not
3810 a padding type here, as it is not clear whether the conversion would
3811 always be correct if this was to happen. */
3812 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3813 {
3814 tree unpadded;
3815
3816 /* If we have just converted to this padded type, just get the
3817 inner expression. */
3818 if (TREE_CODE (expr) == CONSTRUCTOR
3819 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3820 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3821 == TYPE_FIELDS (etype))
3822 unpadded
3823 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3824
3825 /* Otherwise, build an explicit component reference. */
3826 else
3827 unpadded
3828 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3829
3830 return convert (type, unpadded);
3831 }
3832
3833 /* If the input is a biased type, adjust first. */
3834 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3835 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3836 fold_convert (TREE_TYPE (etype),
3837 expr),
3838 TYPE_MIN_VALUE (etype)));
3839
3840 /* If the input is a justified modular type, we need to extract the actual
3841 object before converting it to any other type with the exceptions of an
3842 unconstrained array or of a mere type variant. It is useful to avoid the
3843 extraction and conversion in the type variant case because it could end
3844 up replacing a VAR_DECL expr by a constructor and we might be about the
3845 take the address of the result. */
3846 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3847 && code != UNCONSTRAINED_ARRAY_TYPE
3848 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3849 return convert (type, build_component_ref (expr, NULL_TREE,
3850 TYPE_FIELDS (etype), false));
3851
3852 /* If converting to a type that contains a template, convert to the data
3853 type and then build the template. */
3854 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3855 {
3856 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3857
3858 /* If the source already has a template, get a reference to the
3859 associated array only, as we are going to rebuild a template
3860 for the target type anyway. */
3861 expr = maybe_unconstrained_array (expr);
3862
3863 return
3864 gnat_build_constructor
3865 (type,
3866 tree_cons (TYPE_FIELDS (type),
3867 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3868 obj_type, NULL_TREE),
3869 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3870 convert (obj_type, expr), NULL_TREE)));
3871 }
3872
3873 /* There are some special cases of expressions that we process
3874 specially. */
3875 switch (TREE_CODE (expr))
3876 {
3877 case ERROR_MARK:
3878 return expr;
3879
3880 case NULL_EXPR:
3881 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3882 conversion in gnat_expand_expr. NULL_EXPR does not represent
3883 and actual value, so no conversion is needed. */
3884 expr = copy_node (expr);
3885 TREE_TYPE (expr) = type;
3886 return expr;
3887
3888 case STRING_CST:
3889 /* If we are converting a STRING_CST to another constrained array type,
3890 just make a new one in the proper type. */
3891 if (code == ecode && AGGREGATE_TYPE_P (etype)
3892 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3893 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3894 {
3895 expr = copy_node (expr);
3896 TREE_TYPE (expr) = type;
3897 return expr;
3898 }
3899 break;
3900
3901 case CONSTRUCTOR:
3902 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3903 a new one in the proper type. */
3904 if (code == ecode && gnat_types_compatible_p (type, etype))
3905 {
3906 expr = copy_node (expr);
3907 TREE_TYPE (expr) = type;
3908 return expr;
3909 }
3910
3911 /* Likewise for a conversion between original and packable version, but
3912 we have to work harder in order to preserve type consistency. */
3913 if (code == ecode
3914 && code == RECORD_TYPE
3915 && TYPE_NAME (type) == TYPE_NAME (etype))
3916 {
3917 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3918 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3919 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3920 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3921 unsigned HOST_WIDE_INT idx;
3922 tree index, value;
3923
3924 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3925 {
3926 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3927 /* We expect only simple constructors. Otherwise, punt. */
3928 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3929 break;
3930 elt->index = field;
3931 elt->value = convert (TREE_TYPE (field), value);
3932 efield = TREE_CHAIN (efield);
3933 field = TREE_CHAIN (field);
3934 }
3935
3936 if (idx == len)
3937 {
3938 expr = copy_node (expr);
3939 TREE_TYPE (expr) = type;
3940 CONSTRUCTOR_ELTS (expr) = v;
3941 return expr;
3942 }
3943 }
3944 break;
3945
3946 case UNCONSTRAINED_ARRAY_REF:
3947 /* Convert this to the type of the inner array by getting the address of
3948 the array from the template. */
3949 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3950 build_component_ref (TREE_OPERAND (expr, 0),
3951 get_identifier ("P_ARRAY"),
3952 NULL_TREE, false));
3953 etype = TREE_TYPE (expr);
3954 ecode = TREE_CODE (etype);
3955 break;
3956
3957 case VIEW_CONVERT_EXPR:
3958 {
3959 /* GCC 4.x is very sensitive to type consistency overall, and view
3960 conversions thus are very frequent. Even though just "convert"ing
3961 the inner operand to the output type is fine in most cases, it
3962 might expose unexpected input/output type mismatches in special
3963 circumstances so we avoid such recursive calls when we can. */
3964 tree op0 = TREE_OPERAND (expr, 0);
3965
3966 /* If we are converting back to the original type, we can just
3967 lift the input conversion. This is a common occurrence with
3968 switches back-and-forth amongst type variants. */
3969 if (type == TREE_TYPE (op0))
3970 return op0;
3971
3972 /* Otherwise, if we're converting between two aggregate types, we
3973 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3974 in place or to just convert the inner expression. */
3975 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3976 {
3977 /* If we are converting between mere variants, we can just
3978 substitute the VIEW_CONVERT_EXPR in place. */
3979 if (gnat_types_compatible_p (type, etype))
3980 return build1 (VIEW_CONVERT_EXPR, type, op0);
3981
3982 /* Otherwise, we may just bypass the input view conversion unless
3983 one of the types is a fat pointer, which is handled by
3984 specialized code below which relies on exact type matching. */
3985 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3986 return convert (type, op0);
3987 }
3988 }
3989 break;
3990
3991 case INDIRECT_REF:
3992 /* If both types are record types, just convert the pointer and
3993 make a new INDIRECT_REF.
3994
3995 ??? Disable this for now since it causes problems with the
3996 code in build_binary_op for MODIFY_EXPR which wants to
3997 strip off conversions. But that code really is a mess and
3998 we need to do this a much better way some time. */
3999 if (0
4000 && (TREE_CODE (type) == RECORD_TYPE
4001 || TREE_CODE (type) == UNION_TYPE)
4002 && (TREE_CODE (etype) == RECORD_TYPE
4003 || TREE_CODE (etype) == UNION_TYPE)
4004 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4005 return build_unary_op (INDIRECT_REF, NULL_TREE,
4006 convert (build_pointer_type (type),
4007 TREE_OPERAND (expr, 0)));
4008 break;
4009
4010 default:
4011 break;
4012 }
4013
4014 /* Check for converting to a pointer to an unconstrained array. */
4015 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4016 return convert_to_fat_pointer (type, expr);
4017
4018 /* If we are converting between two aggregate types that are mere
4019 variants, just make a VIEW_CONVERT_EXPR. */
4020 else if (code == ecode
4021 && AGGREGATE_TYPE_P (type)
4022 && gnat_types_compatible_p (type, etype))
4023 return build1 (VIEW_CONVERT_EXPR, type, expr);
4024
4025 /* In all other cases of related types, make a NOP_EXPR. */
4026 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4027 || (code == INTEGER_CST && ecode == INTEGER_CST
4028 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4029 return fold_convert (type, expr);
4030
4031 switch (code)
4032 {
4033 case VOID_TYPE:
4034 return fold_build1 (CONVERT_EXPR, type, expr);
4035
a1ab4c31
AC
4036 case INTEGER_TYPE:
4037 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4038 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4039 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4040 return unchecked_convert (type, expr, false);
4041 else if (TYPE_BIASED_REPRESENTATION_P (type))
4042 return fold_convert (type,
4043 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4044 convert (TREE_TYPE (type), expr),
4045 TYPE_MIN_VALUE (type)));
4046
4047 /* ... fall through ... */
4048
4049 case ENUMERAL_TYPE:
01ddebf2 4050 case BOOLEAN_TYPE:
a1ab4c31
AC
4051 /* If we are converting an additive expression to an integer type
4052 with lower precision, be wary of the optimization that can be
4053 applied by convert_to_integer. There are 2 problematic cases:
4054 - if the first operand was originally of a biased type,
4055 because we could be recursively called to convert it
4056 to an intermediate type and thus rematerialize the
4057 additive operator endlessly,
4058 - if the expression contains a placeholder, because an
4059 intermediate conversion that changes the sign could
4060 be inserted and thus introduce an artificial overflow
4061 at compile time when the placeholder is substituted. */
4062 if (code == INTEGER_TYPE
4063 && ecode == INTEGER_TYPE
4064 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4065 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4066 {
4067 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4068
4069 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4070 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4071 || CONTAINS_PLACEHOLDER_P (expr))
4072 return build1 (NOP_EXPR, type, expr);
4073 }
4074
4075 return fold (convert_to_integer (type, expr));
4076
4077 case POINTER_TYPE:
4078 case REFERENCE_TYPE:
4079 /* If converting between two pointers to records denoting
4080 both a template and type, adjust if needed to account
4081 for any differing offsets, since one might be negative. */
4082 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4083 {
4084 tree bit_diff
4085 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4086 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4087 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4088 sbitsize_int (BITS_PER_UNIT));
4089
4090 expr = build1 (NOP_EXPR, type, expr);
4091 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4092 if (integer_zerop (byte_diff))
4093 return expr;
4094
4095 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4096 fold (convert (sizetype, byte_diff)));
4097 }
4098
4099 /* If converting to a thin pointer, handle specially. */
4100 if (TYPE_THIN_POINTER_P (type)
4101 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4102 return convert_to_thin_pointer (type, expr);
4103
4104 /* If converting fat pointer to normal pointer, get the pointer to the
4105 array and then convert it. */
4106 else if (TYPE_FAT_POINTER_P (etype))
4107 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4108 NULL_TREE, false);
4109
4110 return fold (convert_to_pointer (type, expr));
4111
4112 case REAL_TYPE:
4113 return fold (convert_to_real (type, expr));
4114
4115 case RECORD_TYPE:
4116 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4117 return
4118 gnat_build_constructor
4119 (type, tree_cons (TYPE_FIELDS (type),
4120 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4121 NULL_TREE));
4122
4123 /* ... fall through ... */
4124
4125 case ARRAY_TYPE:
4126 /* In these cases, assume the front-end has validated the conversion.
4127 If the conversion is valid, it will be a bit-wise conversion, so
4128 it can be viewed as an unchecked conversion. */
4129 return unchecked_convert (type, expr, false);
4130
4131 case UNION_TYPE:
4132 /* This is a either a conversion between a tagged type and some
4133 subtype, which we have to mark as a UNION_TYPE because of
4134 overlapping fields or a conversion of an Unchecked_Union. */
4135 return unchecked_convert (type, expr, false);
4136
4137 case UNCONSTRAINED_ARRAY_TYPE:
4138 /* If EXPR is a constrained array, take its address, convert it to a
4139 fat pointer, and then dereference it. Likewise if EXPR is a
4140 record containing both a template and a constrained array.
4141 Note that a record representing a justified modular type
4142 always represents a packed constrained array. */
4143 if (ecode == ARRAY_TYPE
4144 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4145 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4146 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4147 return
4148 build_unary_op
4149 (INDIRECT_REF, NULL_TREE,
4150 convert_to_fat_pointer (TREE_TYPE (type),
4151 build_unary_op (ADDR_EXPR,
4152 NULL_TREE, expr)));
4153
4154 /* Do something very similar for converting one unconstrained
4155 array to another. */
4156 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4157 return
4158 build_unary_op (INDIRECT_REF, NULL_TREE,
4159 convert (TREE_TYPE (type),
4160 build_unary_op (ADDR_EXPR,
4161 NULL_TREE, expr)));
4162 else
4163 gcc_unreachable ();
4164
4165 case COMPLEX_TYPE:
4166 return fold (convert_to_complex (type, expr));
4167
4168 default:
4169 gcc_unreachable ();
4170 }
4171}
4172\f
4173/* Remove all conversions that are done in EXP. This includes converting
4174 from a padded type or to a justified modular type. If TRUE_ADDRESS
4175 is true, always return the address of the containing object even if
4176 the address is not bit-aligned. */
4177
4178tree
4179remove_conversions (tree exp, bool true_address)
4180{
4181 switch (TREE_CODE (exp))
4182 {
4183 case CONSTRUCTOR:
4184 if (true_address
4185 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4186 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4187 return
4188 remove_conversions (VEC_index (constructor_elt,
4189 CONSTRUCTOR_ELTS (exp), 0)->value,
4190 true);
4191 break;
4192
4193 case COMPONENT_REF:
4194 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4195 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4196 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4197 break;
4198
4199 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4200 CASE_CONVERT:
4201 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4202
4203 default:
4204 break;
4205 }
4206
4207 return exp;
4208}
4209\f
4210/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4211 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4212 likewise return an expression pointing to the underlying array. */
4213
4214tree
4215maybe_unconstrained_array (tree exp)
4216{
4217 enum tree_code code = TREE_CODE (exp);
4218 tree new;
4219
4220 switch (TREE_CODE (TREE_TYPE (exp)))
4221 {
4222 case UNCONSTRAINED_ARRAY_TYPE:
4223 if (code == UNCONSTRAINED_ARRAY_REF)
4224 {
4225 new
4226 = build_unary_op (INDIRECT_REF, NULL_TREE,
4227 build_component_ref (TREE_OPERAND (exp, 0),
4228 get_identifier ("P_ARRAY"),
4229 NULL_TREE, false));
4230 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4231 return new;
4232 }
4233
4234 else if (code == NULL_EXPR)
4235 return build1 (NULL_EXPR,
4236 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4237 (TREE_TYPE (TREE_TYPE (exp))))),
4238 TREE_OPERAND (exp, 0));
4239
4240 case RECORD_TYPE:
4241 /* If this is a padded type, convert to the unpadded type and see if
4242 it contains a template. */
4243 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4244 {
4245 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4246 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4247 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4248 return
4249 build_component_ref (new, NULL_TREE,
4250 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4251 0);
4252 }
4253 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4254 return
4255 build_component_ref (exp, NULL_TREE,
4256 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4257 break;
4258
4259 default:
4260 break;
4261 }
4262
4263 return exp;
4264}
4265\f
afcea859
EB
4266/* Return true if EXPR is an expression that can be folded as an operand
4267 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4268 the rationale. */
4269
4270static bool
4271can_fold_for_view_convert_p (tree expr)
4272{
4273 tree t1, t2;
4274
4275 /* The folder will fold NOP_EXPRs between integral types with the same
4276 precision (in the middle-end's sense). We cannot allow it if the
4277 types don't have the same precision in the Ada sense as well. */
4278 if (TREE_CODE (expr) != NOP_EXPR)
4279 return true;
4280
4281 t1 = TREE_TYPE (expr);
4282 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4283
4284 /* Defer to the folder for non-integral conversions. */
4285 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4286 return true;
4287
4288 /* Only fold conversions that preserve both precisions. */
4289 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4290 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4291 return true;
4292
4293 return false;
4294}
4295
a1ab4c31 4296/* Return an expression that does an unchecked conversion of EXPR to TYPE.
afcea859
EB
4297 If NOTRUNC_P is true, truncation operations should be suppressed.
4298
4299 Special care is required with (source or target) integral types whose
4300 precision is not equal to their size, to make sure we fetch or assign
4301 the value bits whose location might depend on the endianness, e.g.
4302
4303 Rmsize : constant := 8;
4304 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4305
4306 type Bit_Array is array (1 .. Rmsize) of Boolean;
4307 pragma Pack (Bit_Array);
4308
4309 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4310
4311 Value : Int := 2#1000_0001#;
4312 Vbits : Bit_Array := To_Bit_Array (Value);
4313
4314 we expect the 8 bits at Vbits'Address to always contain Value, while
4315 their original location depends on the endianness, at Value'Address
4316 on a little-endian architecture but not on a big-endian one.
4317
4318 ??? There is a problematic discrepancy between what is called precision
4319 here (and more generally throughout gigi) for integral types and what is
4320 called precision in the middle-end. In the former case it's the RM size
4321 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4322 latter case, the hitch being that they are not equal when they matter,
4323 that is when the number of value bits is not equal to the type's size:
4324 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4325 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4326
4327 The consequence is that gigi must duplicate code bridging the gap between
4328 the type's size and its precision that exists for TYPE_PRECISION in the
4329 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4330 wary of transformations applied in the middle-end based on TYPE_PRECISION
4331 because this value doesn't reflect the actual precision for Ada. */
a1ab4c31
AC
4332
4333tree
4334unchecked_convert (tree type, tree expr, bool notrunc_p)
4335{
4336 tree etype = TREE_TYPE (expr);
4337
4338 /* If the expression is already the right type, we are done. */
4339 if (etype == type)
4340 return expr;
4341
4342 /* If both types types are integral just do a normal conversion.
4343 Likewise for a conversion to an unconstrained array. */
4344 if ((((INTEGRAL_TYPE_P (type)
4345 && !(TREE_CODE (type) == INTEGER_TYPE
4346 && TYPE_VAX_FLOATING_POINT_P (type)))
4347 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4348 || (TREE_CODE (type) == RECORD_TYPE
4349 && TYPE_JUSTIFIED_MODULAR_P (type)))
4350 && ((INTEGRAL_TYPE_P (etype)
4351 && !(TREE_CODE (etype) == INTEGER_TYPE
4352 && TYPE_VAX_FLOATING_POINT_P (etype)))
4353 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4354 || (TREE_CODE (etype) == RECORD_TYPE
4355 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4356 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4357 {
a1ab4c31
AC
4358 if (TREE_CODE (etype) == INTEGER_TYPE
4359 && TYPE_BIASED_REPRESENTATION_P (etype))
4360 {
4361 tree ntype = copy_type (etype);
a1ab4c31
AC
4362 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4363 TYPE_MAIN_VARIANT (ntype) = ntype;
4364 expr = build1 (NOP_EXPR, ntype, expr);
4365 }
4366
4367 if (TREE_CODE (type) == INTEGER_TYPE
4368 && TYPE_BIASED_REPRESENTATION_P (type))
4369 {
afcea859 4370 tree rtype = copy_type (type);
a1ab4c31
AC
4371 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4372 TYPE_MAIN_VARIANT (rtype) = rtype;
afcea859
EB
4373 expr = convert (rtype, expr);
4374 expr = build1 (NOP_EXPR, type, expr);
a1ab4c31
AC
4375 }
4376
afcea859
EB
4377 /* We have another special case: if we are unchecked converting either
4378 a subtype or a type with limited range into a base type, we need to
4379 ensure that VRP doesn't propagate range information because this
4380 conversion may be done precisely to validate that the object is
4381 within the range it is supposed to have. */
a1ab4c31
AC
4382 else if (TREE_CODE (expr) != INTEGER_CST
4383 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4384 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4385 || TREE_CODE (etype) == ENUMERAL_TYPE
4386 || TREE_CODE (etype) == BOOLEAN_TYPE))
4387 {
4388 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4389 in order not to be deemed an useless type conversion, it must
4390 be from subtype to base type.
4391
afcea859
EB
4392 Therefore we first do the bulk of the conversion to a subtype of
4393 the final type. And this conversion must itself not be deemed
4394 useless if the source type is not a subtype because, otherwise,
4395 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4396 why we toggle the unsigned flag in this conversion, which is
4397 harmless since the final conversion is only a reinterpretation
4398 of the bit pattern.
4399
a1ab4c31
AC
4400 ??? This may raise addressability and/or aliasing issues because
4401 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4402 address of its operand to be taken if it is deemed addressable
4403 and not already in GIMPLE form. */
afcea859
EB
4404 tree rtype
4405 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
a1ab4c31
AC
4406 rtype = copy_type (rtype);
4407 TYPE_MAIN_VARIANT (rtype) = rtype;
4408 TREE_TYPE (rtype) = type;
afcea859
EB
4409 expr = convert (rtype, expr);
4410 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4411 }
4412
afcea859
EB
4413 else
4414 expr = convert (type, expr);
a1ab4c31
AC
4415 }
4416
afcea859
EB
4417 /* If we are converting to an integral type whose precision is not equal
4418 to its size, first unchecked convert to a record that contains an
4419 object of the output type. Then extract the field. */
a1ab4c31
AC
4420 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4421 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4422 GET_MODE_BITSIZE (TYPE_MODE (type))))
4423 {
4424 tree rec_type = make_node (RECORD_TYPE);
4425 tree field = create_field_decl (get_identifier ("OBJ"), type,
4426 rec_type, 1, 0, 0, 0);
4427
4428 TYPE_FIELDS (rec_type) = field;
4429 layout_type (rec_type);
4430
4431 expr = unchecked_convert (rec_type, expr, notrunc_p);
4432 expr = build_component_ref (expr, NULL_TREE, field, 0);
4433 }
4434
afcea859
EB
4435 /* Similarly if we are converting from an integral type whose precision
4436 is not equal to its size. */
a1ab4c31
AC
4437 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4438 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4439 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4440 {
4441 tree rec_type = make_node (RECORD_TYPE);
4442 tree field
4443 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4444 1, 0, 0, 0);
4445
4446 TYPE_FIELDS (rec_type) = field;
4447 layout_type (rec_type);
4448
4449 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4450 expr = unchecked_convert (type, expr, notrunc_p);
4451 }
4452
4453 /* We have a special case when we are converting between two
4454 unconstrained array types. In that case, take the address,
4455 convert the fat pointer types, and dereference. */
4456 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4457 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4458 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4459 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4460 build_unary_op (ADDR_EXPR, NULL_TREE,
4461 expr)));
4462 else
4463 {
4464 expr = maybe_unconstrained_array (expr);
4465 etype = TREE_TYPE (expr);
afcea859
EB
4466 if (can_fold_for_view_convert_p (expr))
4467 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4468 else
4469 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
a1ab4c31
AC
4470 }
4471
afcea859
EB
4472 /* If the result is an integral type whose precision is not equal to its
4473 size, sign- or zero-extend the result. We need not do this if the input
4474 is an integral type of the same precision and signedness or if the output
a1ab4c31
AC
4475 is a biased type or if both the input and output are unsigned. */
4476 if (!notrunc_p
4477 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4478 && !(TREE_CODE (type) == INTEGER_TYPE
4479 && TYPE_BIASED_REPRESENTATION_P (type))
4480 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4481 GET_MODE_BITSIZE (TYPE_MODE (type)))
4482 && !(INTEGRAL_TYPE_P (etype)
4483 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4484 && operand_equal_p (TYPE_RM_SIZE (type),
4485 (TYPE_RM_SIZE (etype) != 0
4486 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4487 0))
4488 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4489 {
4490 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4491 TYPE_UNSIGNED (type));
4492 tree shift_expr
4493 = convert (base_type,
4494 size_binop (MINUS_EXPR,
4495 bitsize_int
4496 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4497 TYPE_RM_SIZE (type)));
4498 expr
4499 = convert (type,
4500 build_binary_op (RSHIFT_EXPR, base_type,
4501 build_binary_op (LSHIFT_EXPR, base_type,
4502 convert (base_type, expr),
4503 shift_expr),
4504 shift_expr));
4505 }
4506
4507 /* An unchecked conversion should never raise Constraint_Error. The code
4508 below assumes that GCC's conversion routines overflow the same way that
4509 the underlying hardware does. This is probably true. In the rare case
4510 when it is false, we can rely on the fact that such conversions are
4511 erroneous anyway. */
4512 if (TREE_CODE (expr) == INTEGER_CST)
4513 TREE_OVERFLOW (expr) = 0;
4514
4515 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4516 show no longer constant. */
4517 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4518 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4519 OEP_ONLY_CONST))
4520 TREE_CONSTANT (expr) = 0;
4521
4522 return expr;
4523}
4524\f
4525/* Return the appropriate GCC tree code for the specified GNAT type,
4526 the latter being a record type as predicated by Is_Record_Type. */
4527
4528enum tree_code
4529tree_code_for_record_type (Entity_Id gnat_type)
4530{
4531 Node_Id component_list
4532 = Component_List (Type_Definition
4533 (Declaration_Node
4534 (Implementation_Base_Type (gnat_type))));
4535 Node_Id component;
4536
4537 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4538 we have a non-discriminant field outside a variant. In either case,
4539 it's a RECORD_TYPE. */
4540
4541 if (!Is_Unchecked_Union (gnat_type))
4542 return RECORD_TYPE;
4543
4544 for (component = First_Non_Pragma (Component_Items (component_list));
4545 Present (component);
4546 component = Next_Non_Pragma (component))
4547 if (Ekind (Defining_Entity (component)) == E_Component)
4548 return RECORD_TYPE;
4549
4550 return UNION_TYPE;
4551}
4552
4553/* Return true if GNU_TYPE is suitable as the type of a non-aliased
4554 component of an aggregate type. */
4555
4556bool
4557type_for_nonaliased_component_p (tree gnu_type)
4558{
4559 /* If the type is passed by reference, we may have pointers to the
4560 component so it cannot be made non-aliased. */
4561 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4562 return false;
4563
4564 /* We used to say that any component of aggregate type is aliased
4565 because the front-end may take 'Reference of it. The front-end
4566 has been enhanced in the meantime so as to use a renaming instead
4567 in most cases, but the back-end can probably take the address of
4568 such a component too so we go for the conservative stance.
4569
4570 For instance, we might need the address of any array type, even
4571 if normally passed by copy, to construct a fat pointer if the
4572 component is used as an actual for an unconstrained formal.
4573
4574 Likewise for record types: even if a specific record subtype is
4575 passed by copy, the parent type might be passed by ref (e.g. if
4576 it's of variable size) and we might take the address of a child
4577 component to pass to a parent formal. We have no way to check
4578 for such conditions here. */
4579 if (AGGREGATE_TYPE_P (gnu_type))
4580 return false;
4581
4582 return true;
4583}
4584
4585/* Perform final processing on global variables. */
4586
4587void
4588gnat_write_global_declarations (void)
4589{
4590 /* Proceed to optimize and emit assembly.
4591 FIXME: shouldn't be the front end's responsibility to call this. */
4592 cgraph_optimize ();
4593
4594 /* Emit debug info for all global declarations. */
4595 emit_debug_global_declarations (VEC_address (tree, global_decls),
4596 VEC_length (tree, global_decls));
4597}
4598
4599/* ************************************************************************
4600 * * GCC builtins support *
4601 * ************************************************************************ */
4602
4603/* The general scheme is fairly simple:
4604
4605 For each builtin function/type to be declared, gnat_install_builtins calls
4606 internal facilities which eventually get to gnat_push_decl, which in turn
4607 tracks the so declared builtin function decls in the 'builtin_decls' global
4608 datastructure. When an Intrinsic subprogram declaration is processed, we
4609 search this global datastructure to retrieve the associated BUILT_IN DECL
4610 node. */
4611
4612/* Search the chain of currently available builtin declarations for a node
4613 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4614 found, if any, or NULL_TREE otherwise. */
4615tree
4616builtin_decl_for (tree name)
4617{
4618 unsigned i;
4619 tree decl;
4620
4621 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4622 if (DECL_NAME (decl) == name)
4623 return decl;
4624
4625 return NULL_TREE;
4626}
4627
4628/* The code below eventually exposes gnat_install_builtins, which declares
4629 the builtin types and functions we might need, either internally or as
4630 user accessible facilities.
4631
4632 ??? This is a first implementation shot, still in rough shape. It is
4633 heavily inspired from the "C" family implementation, with chunks copied
4634 verbatim from there.
4635
4636 Two obvious TODO candidates are
4637 o Use a more efficient name/decl mapping scheme
4638 o Devise a middle-end infrastructure to avoid having to copy
4639 pieces between front-ends. */
4640
4641/* ----------------------------------------------------------------------- *
4642 * BUILTIN ELEMENTARY TYPES *
4643 * ----------------------------------------------------------------------- */
4644
4645/* Standard data types to be used in builtin argument declarations. */
4646
4647enum c_tree_index
4648{
4649 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4650 CTI_STRING_TYPE,
4651 CTI_CONST_STRING_TYPE,
4652
4653 CTI_MAX
4654};
4655
4656static tree c_global_trees[CTI_MAX];
4657
4658#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4659#define string_type_node c_global_trees[CTI_STRING_TYPE]
4660#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4661
4662/* ??? In addition some attribute handlers, we currently don't support a
4663 (small) number of builtin-types, which in turns inhibits support for a
4664 number of builtin functions. */
4665#define wint_type_node void_type_node
4666#define intmax_type_node void_type_node
4667#define uintmax_type_node void_type_node
4668
4669/* Build the void_list_node (void_type_node having been created). */
4670
4671static tree
4672build_void_list_node (void)
4673{
4674 tree t = build_tree_list (NULL_TREE, void_type_node);
4675 return t;
4676}
4677
4678/* Used to help initialize the builtin-types.def table. When a type of
4679 the correct size doesn't exist, use error_mark_node instead of NULL.
4680 The later results in segfaults even when a decl using the type doesn't
4681 get invoked. */
4682
4683static tree
4684builtin_type_for_size (int size, bool unsignedp)
4685{
4686 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4687 return type ? type : error_mark_node;
4688}
4689
4690/* Build/push the elementary type decls that builtin functions/types
4691 will need. */
4692
4693static void
4694install_builtin_elementary_types (void)
4695{
4696 signed_size_type_node = size_type_node;
4697 pid_type_node = integer_type_node;
4698 void_list_node = build_void_list_node ();
4699
4700 string_type_node = build_pointer_type (char_type_node);
4701 const_string_type_node
4702 = build_pointer_type (build_qualified_type
4703 (char_type_node, TYPE_QUAL_CONST));
4704}
4705
4706/* ----------------------------------------------------------------------- *
4707 * BUILTIN FUNCTION TYPES *
4708 * ----------------------------------------------------------------------- */
4709
4710/* Now, builtin function types per se. */
4711
4712enum c_builtin_type
4713{
4714#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4715#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4716#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4717#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4718#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4719#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4720#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4721#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4722#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4723#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4724#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4725#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4726#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4727#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4728#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4729 NAME,
4730#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4731#include "builtin-types.def"
4732#undef DEF_PRIMITIVE_TYPE
4733#undef DEF_FUNCTION_TYPE_0
4734#undef DEF_FUNCTION_TYPE_1
4735#undef DEF_FUNCTION_TYPE_2
4736#undef DEF_FUNCTION_TYPE_3
4737#undef DEF_FUNCTION_TYPE_4
4738#undef DEF_FUNCTION_TYPE_5
4739#undef DEF_FUNCTION_TYPE_6
4740#undef DEF_FUNCTION_TYPE_7
4741#undef DEF_FUNCTION_TYPE_VAR_0
4742#undef DEF_FUNCTION_TYPE_VAR_1
4743#undef DEF_FUNCTION_TYPE_VAR_2
4744#undef DEF_FUNCTION_TYPE_VAR_3
4745#undef DEF_FUNCTION_TYPE_VAR_4
4746#undef DEF_FUNCTION_TYPE_VAR_5
4747#undef DEF_POINTER_TYPE
4748 BT_LAST
4749};
4750
4751typedef enum c_builtin_type builtin_type;
4752
4753/* A temporary array used in communication with def_fn_type. */
4754static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4755
4756/* A helper function for install_builtin_types. Build function type
4757 for DEF with return type RET and N arguments. If VAR is true, then the
4758 function should be variadic after those N arguments.
4759
4760 Takes special care not to ICE if any of the types involved are
4761 error_mark_node, which indicates that said type is not in fact available
4762 (see builtin_type_for_size). In which case the function type as a whole
4763 should be error_mark_node. */
4764
4765static void
4766def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4767{
4768 tree args = NULL, t;
4769 va_list list;
4770 int i;
4771
4772 va_start (list, n);
4773 for (i = 0; i < n; ++i)
4774 {
4775 builtin_type a = va_arg (list, builtin_type);
4776 t = builtin_types[a];
4777 if (t == error_mark_node)
4778 goto egress;
4779 args = tree_cons (NULL_TREE, t, args);
4780 }
4781 va_end (list);
4782
4783 args = nreverse (args);
4784 if (!var)
4785 args = chainon (args, void_list_node);
4786
4787 t = builtin_types[ret];
4788 if (t == error_mark_node)
4789 goto egress;
4790 t = build_function_type (t, args);
4791
4792 egress:
4793 builtin_types[def] = t;
4794}
4795
4796/* Build the builtin function types and install them in the builtin_types
4797 array for later use in builtin function decls. */
4798
4799static void
4800install_builtin_function_types (void)
4801{
4802 tree va_list_ref_type_node;
4803 tree va_list_arg_type_node;
4804
4805 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4806 {
4807 va_list_arg_type_node = va_list_ref_type_node =
4808 build_pointer_type (TREE_TYPE (va_list_type_node));
4809 }
4810 else
4811 {
4812 va_list_arg_type_node = va_list_type_node;
4813 va_list_ref_type_node = build_reference_type (va_list_type_node);
4814 }
4815
4816#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4817 builtin_types[ENUM] = VALUE;
4818#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4819 def_fn_type (ENUM, RETURN, 0, 0);
4820#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4821 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4822#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4823 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4824#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4825 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4826#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4827 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4828#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4829 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4830#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4831 ARG6) \
4832 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4833#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4834 ARG6, ARG7) \
4835 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4836#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4837 def_fn_type (ENUM, RETURN, 1, 0);
4838#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4839 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4840#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4841 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4842#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4843 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4844#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4845 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4846#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4847 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4848#define DEF_POINTER_TYPE(ENUM, TYPE) \
4849 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4850
4851#include "builtin-types.def"
4852
4853#undef DEF_PRIMITIVE_TYPE
4854#undef DEF_FUNCTION_TYPE_1
4855#undef DEF_FUNCTION_TYPE_2
4856#undef DEF_FUNCTION_TYPE_3
4857#undef DEF_FUNCTION_TYPE_4
4858#undef DEF_FUNCTION_TYPE_5
4859#undef DEF_FUNCTION_TYPE_6
4860#undef DEF_FUNCTION_TYPE_VAR_0
4861#undef DEF_FUNCTION_TYPE_VAR_1
4862#undef DEF_FUNCTION_TYPE_VAR_2
4863#undef DEF_FUNCTION_TYPE_VAR_3
4864#undef DEF_FUNCTION_TYPE_VAR_4
4865#undef DEF_FUNCTION_TYPE_VAR_5
4866#undef DEF_POINTER_TYPE
4867 builtin_types[(int) BT_LAST] = NULL_TREE;
4868}
4869
4870/* ----------------------------------------------------------------------- *
4871 * BUILTIN ATTRIBUTES *
4872 * ----------------------------------------------------------------------- */
4873
4874enum built_in_attribute
4875{
4876#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4877#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4878#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4879#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4880#include "builtin-attrs.def"
4881#undef DEF_ATTR_NULL_TREE
4882#undef DEF_ATTR_INT
4883#undef DEF_ATTR_IDENT
4884#undef DEF_ATTR_TREE_LIST
4885 ATTR_LAST
4886};
4887
4888static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4889
4890static void
4891install_builtin_attributes (void)
4892{
4893 /* Fill in the built_in_attributes array. */
4894#define DEF_ATTR_NULL_TREE(ENUM) \
4895 built_in_attributes[(int) ENUM] = NULL_TREE;
4896#define DEF_ATTR_INT(ENUM, VALUE) \
4897 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4898#define DEF_ATTR_IDENT(ENUM, STRING) \
4899 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4900#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4901 built_in_attributes[(int) ENUM] \
4902 = tree_cons (built_in_attributes[(int) PURPOSE], \
4903 built_in_attributes[(int) VALUE], \
4904 built_in_attributes[(int) CHAIN]);
4905#include "builtin-attrs.def"
4906#undef DEF_ATTR_NULL_TREE
4907#undef DEF_ATTR_INT
4908#undef DEF_ATTR_IDENT
4909#undef DEF_ATTR_TREE_LIST
4910}
4911
4912/* Handle a "const" attribute; arguments as in
4913 struct attribute_spec.handler. */
4914
4915static tree
4916handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4917 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4918 bool *no_add_attrs)
4919{
4920 if (TREE_CODE (*node) == FUNCTION_DECL)
4921 TREE_READONLY (*node) = 1;
4922 else
4923 *no_add_attrs = true;
4924
4925 return NULL_TREE;
4926}
4927
4928/* Handle a "nothrow" attribute; arguments as in
4929 struct attribute_spec.handler. */
4930
4931static tree
4932handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4933 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4934 bool *no_add_attrs)
4935{
4936 if (TREE_CODE (*node) == FUNCTION_DECL)
4937 TREE_NOTHROW (*node) = 1;
4938 else
4939 *no_add_attrs = true;
4940
4941 return NULL_TREE;
4942}
4943
4944/* Handle a "pure" attribute; arguments as in
4945 struct attribute_spec.handler. */
4946
4947static tree
4948handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4949 int ARG_UNUSED (flags), bool *no_add_attrs)
4950{
4951 if (TREE_CODE (*node) == FUNCTION_DECL)
4952 DECL_PURE_P (*node) = 1;
4953 /* ??? TODO: Support types. */
4954 else
4955 {
4956 warning (OPT_Wattributes, "%qE attribute ignored", name);
4957 *no_add_attrs = true;
4958 }
4959
4960 return NULL_TREE;
4961}
4962
4963/* Handle a "no vops" attribute; arguments as in
4964 struct attribute_spec.handler. */
4965
4966static tree
4967handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4968 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4969 bool *ARG_UNUSED (no_add_attrs))
4970{
4971 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4972 DECL_IS_NOVOPS (*node) = 1;
4973 return NULL_TREE;
4974}
4975
4976/* Helper for nonnull attribute handling; fetch the operand number
4977 from the attribute argument list. */
4978
4979static bool
4980get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4981{
4982 /* Verify the arg number is a constant. */
4983 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4984 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4985 return false;
4986
4987 *valp = TREE_INT_CST_LOW (arg_num_expr);
4988 return true;
4989}
4990
4991/* Handle the "nonnull" attribute. */
4992static tree
4993handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
4994 tree args, int ARG_UNUSED (flags),
4995 bool *no_add_attrs)
4996{
4997 tree type = *node;
4998 unsigned HOST_WIDE_INT attr_arg_num;
4999
5000 /* If no arguments are specified, all pointer arguments should be
5001 non-null. Verify a full prototype is given so that the arguments
5002 will have the correct types when we actually check them later. */
5003 if (!args)
5004 {
5005 if (!TYPE_ARG_TYPES (type))
5006 {
5007 error ("nonnull attribute without arguments on a non-prototype");
5008 *no_add_attrs = true;
5009 }
5010 return NULL_TREE;
5011 }
5012
5013 /* Argument list specified. Verify that each argument number references
5014 a pointer argument. */
5015 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5016 {
5017 tree argument;
5018 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5019
5020 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5021 {
5022 error ("nonnull argument has invalid operand number (argument %lu)",
5023 (unsigned long) attr_arg_num);
5024 *no_add_attrs = true;
5025 return NULL_TREE;
5026 }
5027
5028 argument = TYPE_ARG_TYPES (type);
5029 if (argument)
5030 {
5031 for (ck_num = 1; ; ck_num++)
5032 {
5033 if (!argument || ck_num == arg_num)
5034 break;
5035 argument = TREE_CHAIN (argument);
5036 }
5037
5038 if (!argument
5039 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5040 {
5041 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5042 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5043 *no_add_attrs = true;
5044 return NULL_TREE;
5045 }
5046
5047 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5048 {
5049 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5050 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5051 *no_add_attrs = true;
5052 return NULL_TREE;
5053 }
5054 }
5055 }
5056
5057 return NULL_TREE;
5058}
5059
5060/* Handle a "sentinel" attribute. */
5061
5062static tree
5063handle_sentinel_attribute (tree *node, tree name, tree args,
5064 int ARG_UNUSED (flags), bool *no_add_attrs)
5065{
5066 tree params = TYPE_ARG_TYPES (*node);
5067
5068 if (!params)
5069 {
5070 warning (OPT_Wattributes,
5071 "%qE attribute requires prototypes with named arguments", name);
5072 *no_add_attrs = true;
5073 }
5074 else
5075 {
5076 while (TREE_CHAIN (params))
5077 params = TREE_CHAIN (params);
5078
5079 if (VOID_TYPE_P (TREE_VALUE (params)))
5080 {
5081 warning (OPT_Wattributes,
5082 "%qE attribute only applies to variadic functions", name);
5083 *no_add_attrs = true;
5084 }
5085 }
5086
5087 if (args)
5088 {
5089 tree position = TREE_VALUE (args);
5090
5091 if (TREE_CODE (position) != INTEGER_CST)
5092 {
5093 warning (0, "requested position is not an integer constant");
5094 *no_add_attrs = true;
5095 }
5096 else
5097 {
5098 if (tree_int_cst_lt (position, integer_zero_node))
5099 {
5100 warning (0, "requested position is less than zero");
5101 *no_add_attrs = true;
5102 }
5103 }
5104 }
5105
5106 return NULL_TREE;
5107}
5108
5109/* Handle a "noreturn" attribute; arguments as in
5110 struct attribute_spec.handler. */
5111
5112static tree
5113handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5114 int ARG_UNUSED (flags), bool *no_add_attrs)
5115{
5116 tree type = TREE_TYPE (*node);
5117
5118 /* See FIXME comment in c_common_attribute_table. */
5119 if (TREE_CODE (*node) == FUNCTION_DECL)
5120 TREE_THIS_VOLATILE (*node) = 1;
5121 else if (TREE_CODE (type) == POINTER_TYPE
5122 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5123 TREE_TYPE (*node)
5124 = build_pointer_type
5125 (build_type_variant (TREE_TYPE (type),
5126 TYPE_READONLY (TREE_TYPE (type)), 1));
5127 else
5128 {
5129 warning (OPT_Wattributes, "%qE attribute ignored", name);
5130 *no_add_attrs = true;
5131 }
5132
5133 return NULL_TREE;
5134}
5135
5136/* Handle a "malloc" attribute; arguments as in
5137 struct attribute_spec.handler. */
5138
5139static tree
5140handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5141 int ARG_UNUSED (flags), bool *no_add_attrs)
5142{
5143 if (TREE_CODE (*node) == FUNCTION_DECL
5144 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5145 DECL_IS_MALLOC (*node) = 1;
5146 else
5147 {
5148 warning (OPT_Wattributes, "%qE attribute ignored", name);
5149 *no_add_attrs = true;
5150 }
5151
5152 return NULL_TREE;
5153}
5154
5155/* Fake handler for attributes we don't properly support. */
5156
5157tree
5158fake_attribute_handler (tree * ARG_UNUSED (node),
5159 tree ARG_UNUSED (name),
5160 tree ARG_UNUSED (args),
5161 int ARG_UNUSED (flags),
5162 bool * ARG_UNUSED (no_add_attrs))
5163{
5164 return NULL_TREE;
5165}
5166
5167/* Handle a "type_generic" attribute. */
5168
5169static tree
5170handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5171 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5172 bool * ARG_UNUSED (no_add_attrs))
5173{
5174 tree params;
5175
5176 /* Ensure we have a function type. */
5177 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5178
5179 params = TYPE_ARG_TYPES (*node);
5180 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5181 params = TREE_CHAIN (params);
5182
5183 /* Ensure we have a variadic function. */
5184 gcc_assert (!params);
5185
5186 return NULL_TREE;
5187}
5188
5189/* ----------------------------------------------------------------------- *
5190 * BUILTIN FUNCTIONS *
5191 * ----------------------------------------------------------------------- */
5192
5193/* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5194 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5195 if nonansi_p and flag_no_nonansi_builtin. */
5196
5197static void
5198def_builtin_1 (enum built_in_function fncode,
5199 const char *name,
5200 enum built_in_class fnclass,
5201 tree fntype, tree libtype,
5202 bool both_p, bool fallback_p,
5203 bool nonansi_p ATTRIBUTE_UNUSED,
5204 tree fnattrs, bool implicit_p)
5205{
5206 tree decl;
5207 const char *libname;
5208
5209 /* Preserve an already installed decl. It most likely was setup in advance
5210 (e.g. as part of the internal builtins) for specific reasons. */
5211 if (built_in_decls[(int) fncode] != NULL_TREE)
5212 return;
5213
5214 gcc_assert ((!both_p && !fallback_p)
5215 || !strncmp (name, "__builtin_",
5216 strlen ("__builtin_")));
5217
5218 libname = name + strlen ("__builtin_");
5219 decl = add_builtin_function (name, fntype, fncode, fnclass,
5220 (fallback_p ? libname : NULL),
5221 fnattrs);
5222 if (both_p)
5223 /* ??? This is normally further controlled by command-line options
5224 like -fno-builtin, but we don't have them for Ada. */
5225 add_builtin_function (libname, libtype, fncode, fnclass,
5226 NULL, fnattrs);
5227
5228 built_in_decls[(int) fncode] = decl;
5229 if (implicit_p)
5230 implicit_built_in_decls[(int) fncode] = decl;
5231}
5232
5233static int flag_isoc94 = 0;
5234static int flag_isoc99 = 0;
5235
5236/* Install what the common builtins.def offers. */
5237
5238static void
5239install_builtin_functions (void)
5240{
5241#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5242 NONANSI_P, ATTRS, IMPLICIT, COND) \
5243 if (NAME && COND) \
5244 def_builtin_1 (ENUM, NAME, CLASS, \
5245 builtin_types[(int) TYPE], \
5246 builtin_types[(int) LIBTYPE], \
5247 BOTH_P, FALLBACK_P, NONANSI_P, \
5248 built_in_attributes[(int) ATTRS], IMPLICIT);
5249#include "builtins.def"
5250#undef DEF_BUILTIN
5251}
5252
5253/* ----------------------------------------------------------------------- *
5254 * BUILTIN FUNCTIONS *
5255 * ----------------------------------------------------------------------- */
5256
5257/* Install the builtin functions we might need. */
5258
5259void
5260gnat_install_builtins (void)
5261{
5262 install_builtin_elementary_types ();
5263 install_builtin_function_types ();
5264 install_builtin_attributes ();
5265
5266 /* Install builtins used by generic middle-end pieces first. Some of these
5267 know about internal specificities and control attributes accordingly, for
5268 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5269 the generic definition from builtins.def. */
5270 build_common_builtin_nodes ();
5271
5272 /* Now, install the target specific builtins, such as the AltiVec family on
5273 ppc, and the common set as exposed by builtins.def. */
5274 targetm.init_builtins ();
5275 install_builtin_functions ();
5276}
5277
5278#include "gt-ada-utils.h"
5279#include "gtype-ada.h"
This page took 0.755249 seconds and 5 git commands to generate.